Max level shown:
1 Downloading crates ...
2 Downloaded rand_xorshift v0.3.0
3 Downloaded bit-set v0.5.3
4 Downloaded similar v2.2.1
5 Downloaded subprocess v0.2.9
6 Downloaded quick-error v1.2.3
7 Downloaded wait-timeout v0.2.0
8 Downloaded structmeta v0.2.0
9 Downloaded tokio-test v0.4.2
10 Downloaded unarray v0.1.4
11 Downloaded structmeta-derive v0.2.0
12 Downloaded httptest v0.15.4
13 Downloaded convert_case v0.5.0
14 Downloaded tokio-stream v0.1.8
15 Downloaded test-strategy v0.3.1
16 Downloaded regex-automata v0.1.10
17 Downloaded expectorate v1.0.7
18 Downloaded proptest v1.2.0
19 Downloaded bstr v0.2.17
20 Downloaded bit-vec v0.6.3
21 Downloaded newline-converter v0.3.0
22 Downloaded rusty-fork v0.3.0
23 Compiling libc v0.2.147
24 Compiling proc-macro2 v1.0.63
25 Compiling quote v1.0.29
26 Compiling unicode-ident v1.0.11
27 Compiling cfg-if v1.0.0
28 Compiling autocfg v1.1.0
29 Compiling serde v1.0.167
30 Compiling version_check v0.9.4
31 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libc-0.2.147/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="extra_traits"' --cfg 'feature="std"' -C metadata=b5c3e22808162f2b -C extra-filename=-b5c3e22808162f2b --out-dir /work/oxidecomputer/crucible/target/debug/build/libc-b5c3e22808162f2b -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
32 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/proc-macro2-1.0.63/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="proc-macro"' -C metadata=9a0ae810f5602a16 -C extra-filename=-9a0ae810f5602a16 --out-dir /work/oxidecomputer/crucible/target/debug/build/proc-macro2-9a0ae810f5602a16 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
33 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/quote-1.0.29/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="proc-macro"' -C metadata=e1ca2b57372da817 -C extra-filename=-e1ca2b57372da817 --out-dir /work/oxidecomputer/crucible/target/debug/build/quote-e1ca2b57372da817 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
34 Running `rustc --crate-name unicode_ident --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unicode-ident-1.0.11/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=81e7752fff89e70f -C extra-filename=-81e7752fff89e70f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
35 Running `rustc --crate-name cfg_if --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/cfg-if-1.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=a93276574f0edf39 -C extra-filename=-a93276574f0edf39 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
36 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde-1.0.167/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="rc"' --cfg 'feature="serde_derive"' --cfg 'feature="std"' -C metadata=a519c991a68c99af -C extra-filename=-a519c991a68c99af --out-dir /work/oxidecomputer/crucible/target/debug/build/serde-a519c991a68c99af -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
37 Running `rustc --crate-name autocfg /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/autocfg-1.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=b1cec8cc882d2fdd -C extra-filename=-b1cec8cc882d2fdd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
38 Running `rustc --crate-name version_check /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/version_check-0.9.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=23142ed5bf6178a0 -C extra-filename=-23142ed5bf6178a0 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
39 Compiling once_cell v1.14.0
40 Running `rustc --crate-name once_cell --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/once_cell-1.14.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="race"' --cfg 'feature="std"' --cfg 'feature="unstable"' -C metadata=fdb5f9e769d1e589 -C extra-filename=-fdb5f9e769d1e589 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
41 Compiling syn v1.0.107
42 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/syn-1.0.107/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="clone-impls"' --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="extra-traits"' --cfg 'feature="full"' --cfg 'feature="parsing"' --cfg 'feature="printing"' --cfg 'feature="proc-macro"' --cfg 'feature="quote"' --cfg 'feature="visit"' --cfg 'feature="visit-mut"' -C metadata=52840f115412428f -C extra-filename=-52840f115412428f --out-dir /work/oxidecomputer/crucible/target/debug/build/syn-52840f115412428f -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
43 Running `/work/oxidecomputer/crucible/target/debug/build/quote-e1ca2b57372da817/build-script-build`
44 Running `/work/oxidecomputer/crucible/target/debug/build/serde-a519c991a68c99af/build-script-build`
45 Compiling log v0.4.14
46 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/log-0.4.14/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="std"' -C metadata=9aabd99961801a55 -C extra-filename=-9aabd99961801a55 --out-dir /work/oxidecomputer/crucible/target/debug/build/log-9aabd99961801a55 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
47 Running `/work/oxidecomputer/crucible/target/debug/build/proc-macro2-9a0ae810f5602a16/build-script-build`
48 Compiling itoa v1.0.1
49 Running `rustc --crate-name itoa --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/itoa-1.0.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=f0781104e344570e -C extra-filename=-f0781104e344570e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
50 Running `/work/oxidecomputer/crucible/target/debug/build/libc-b5c3e22808162f2b/build-script-build`
51 Compiling memchr v2.5.0
52 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/memchr-2.5.0/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=39ec00add1e73006 -C extra-filename=-39ec00add1e73006 --out-dir /work/oxidecomputer/crucible/target/debug/build/memchr-39ec00add1e73006 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
53 Compiling libm v0.2.6
54 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libm-0.2.6/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' -C metadata=80351c6c420eb23d -C extra-filename=-80351c6c420eb23d --out-dir /work/oxidecomputer/crucible/target/debug/build/libm-80351c6c420eb23d -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
55 Compiling num-traits v0.2.16
56 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-traits-0.2.16/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="libm"' --cfg 'feature="std"' -C metadata=0ea88a0660a9c2fb -C extra-filename=-0ea88a0660a9c2fb --out-dir /work/oxidecomputer/crucible/target/debug/build/num-traits-0ea88a0660a9c2fb -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-b1cec8cc882d2fdd.rlib --cap-lints allow`
57 Running `rustc --crate-name proc_macro2 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/proc-macro2-1.0.63/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="proc-macro"' -C metadata=a0e1129b171da08c -C extra-filename=-a0e1129b171da08c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern unicode_ident=/work/oxidecomputer/crucible/target/debug/deps/libunicode_ident-81e7752fff89e70f.rmeta --cap-lints allow --cfg wrap_proc_macro`
58 Running `rustc --crate-name libc /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libc-0.2.147/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="extra_traits"' --cfg 'feature="std"' -C metadata=a748caf4ceff51bd -C extra-filename=-a748caf4ceff51bd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg freebsd11 --cfg libc_priv_mod_use --cfg libc_union --cfg libc_const_size_of --cfg libc_align --cfg libc_int128 --cfg libc_core_cvoid --cfg libc_packedN --cfg libc_cfg_target_vendor --cfg libc_non_exhaustive --cfg libc_long_array --cfg libc_ptr_addr_of --cfg libc_underscore_const_names --cfg libc_const_extern_fn`
59 Running `/work/oxidecomputer/crucible/target/debug/build/syn-52840f115412428f/build-script-build`
60 Compiling scopeguard v1.1.0
61 Running `rustc --crate-name scopeguard /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/scopeguard-1.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=00b7ece4eb7b8e7e -C extra-filename=-00b7ece4eb7b8e7e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
62 Compiling pin-project-lite v0.2.13
63 Running `rustc --crate-name pin_project_lite --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/pin-project-lite-0.2.13/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=c035e320730303c2 -C extra-filename=-c035e320730303c2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
64 Running `/work/oxidecomputer/crucible/target/debug/build/log-9aabd99961801a55/build-script-build`
65 Running `rustc --crate-name log /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/log-0.4.14/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=930e200cffaa7930 -C extra-filename=-930e200cffaa7930 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --cap-lints allow --cfg atomic_cas --cfg has_atomics`
66 Compiling smallvec v1.10.0
67 Running `rustc --crate-name smallvec --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/smallvec-1.10.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=397f26bd8c84e528 -C extra-filename=-397f26bd8c84e528 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
68 Running `/work/oxidecomputer/crucible/target/debug/build/num-traits-0ea88a0660a9c2fb/build-script-build`
69 Running `/work/oxidecomputer/crucible/target/debug/build/libm-80351c6c420eb23d/build-script-build`
70 Compiling thiserror v1.0.40
71 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/thiserror-1.0.40/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=23fb1ce510a925d0 -C extra-filename=-23fb1ce510a925d0 --out-dir /work/oxidecomputer/crucible/target/debug/build/thiserror-23fb1ce510a925d0 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
72 Running `rustc --crate-name libm --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libm-0.2.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=e5a688f05412e317 -C extra-filename=-e5a688f05412e317 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
73 Running `/work/oxidecomputer/crucible/target/debug/build/memchr-39ec00add1e73006/build-script-build`
74 Running `rustc --crate-name memchr --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/memchr-2.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=9611c546f9b73cca -C extra-filename=-9611c546f9b73cca --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg memchr_runtime_simd --cfg memchr_runtime_sse2 --cfg memchr_runtime_sse42 --cfg memchr_runtime_avx`
75 Compiling ahash v0.7.6
76 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ahash-0.7.6/./build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=72382e4025e5c1c3 -C extra-filename=-72382e4025e5c1c3 --out-dir /work/oxidecomputer/crucible/target/debug/build/ahash-72382e4025e5c1c3 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern version_check=/work/oxidecomputer/crucible/target/debug/deps/libversion_check-23142ed5bf6178a0.rlib --cap-lints allow`
77 Running `rustc --crate-name quote --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/quote-1.0.29/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="proc-macro"' -C metadata=2ffbaa80dd156621 -C extra-filename=-2ffbaa80dd156621 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --cap-lints allow`
78 Running `/work/oxidecomputer/crucible/target/debug/build/thiserror-23fb1ce510a925d0/build-script-build`
79 Compiling parking_lot_core v0.9.1
80 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/parking_lot_core-0.9.1/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=309b532cd044ced9 -C extra-filename=-309b532cd044ced9 --out-dir /work/oxidecomputer/crucible/target/debug/build/parking_lot_core-309b532cd044ced9 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
81 Compiling getrandom v0.2.5
82 Running `rustc --crate-name getrandom --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/getrandom-0.2.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=567199de146d617e -C extra-filename=-567199de146d617e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
83 Compiling syn v2.0.23
84 Running `rustc --crate-name syn --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/syn-2.0.23/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="clone-impls"' --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="extra-traits"' --cfg 'feature="full"' --cfg 'feature="parsing"' --cfg 'feature="printing"' --cfg 'feature="proc-macro"' --cfg 'feature="quote"' --cfg 'feature="visit"' --cfg 'feature="visit-mut"' -C metadata=baedf68a9175a6da -C extra-filename=-baedf68a9175a6da --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern unicode_ident=/work/oxidecomputer/crucible/target/debug/deps/libunicode_ident-81e7752fff89e70f.rmeta --cap-lints allow`
85 Running `rustc --crate-name syn --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/syn-1.0.107/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="clone-impls"' --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="extra-traits"' --cfg 'feature="full"' --cfg 'feature="parsing"' --cfg 'feature="printing"' --cfg 'feature="proc-macro"' --cfg 'feature="quote"' --cfg 'feature="visit"' --cfg 'feature="visit-mut"' -C metadata=837f9a049f77ca38 -C extra-filename=-837f9a049f77ca38 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern unicode_ident=/work/oxidecomputer/crucible/target/debug/deps/libunicode_ident-81e7752fff89e70f.rmeta --cap-lints allow --cfg syn_disable_nightly_tests`
86 Compiling jobserver v0.1.25
87 Running `rustc --crate-name jobserver --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/jobserver-0.1.25/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=0ee11fba78dd3235 -C extra-filename=-0ee11fba78dd3235 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
88 Running `/work/oxidecomputer/crucible/target/debug/build/parking_lot_core-309b532cd044ced9/build-script-build`
89 Running `/work/oxidecomputer/crucible/target/debug/build/ahash-72382e4025e5c1c3/build-script-build`
90 Compiling num_cpus v1.13.1
91 Running `rustc --crate-name num_cpus /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num_cpus-1.13.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=67a451bebfcc5086 -C extra-filename=-67a451bebfcc5086 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
92 Running `rustc --crate-name num_traits --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-traits-0.2.16/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="libm"' --cfg 'feature="std"' -C metadata=8e50de91aba3f8f9 -C extra-filename=-8e50de91aba3f8f9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libm=/work/oxidecomputer/crucible/target/debug/deps/liblibm-e5a688f05412e317.rmeta --cap-lints allow --cfg has_to_int_unchecked --cfg has_reverse_bits --cfg has_leading_trailing_ones --cfg has_div_euclid --cfg has_copysign --cfg has_is_subnormal --cfg has_int_to_from_bytes --cfg has_float_to_from_bytes`
93 Running `rustc --crate-name ahash --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ahash-0.7.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=c781b32f1fcd8d92 -C extra-filename=-c781b32f1fcd8d92 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern getrandom=/work/oxidecomputer/crucible/target/debug/deps/libgetrandom-567199de146d617e.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --cap-lints allow --cfg 'feature="runtime-rng"' --cfg 'feature="folded_multiply"'`
94 Running `rustc --crate-name parking_lot_core --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/parking_lot_core-0.9.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=f45d0642d52c20c1 -C extra-filename=-f45d0642d52c20c1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern smallvec=/work/oxidecomputer/crucible/target/debug/deps/libsmallvec-397f26bd8c84e528.rmeta --cap-lints allow`
95 Compiling lock_api v0.4.6
96 Running `rustc --crate-name lock_api --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/lock_api-0.4.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4b01d37c549347e9 -C extra-filename=-4b01d37c549347e9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern scopeguard=/work/oxidecomputer/crucible/target/debug/deps/libscopeguard-00b7ece4eb7b8e7e.rmeta --cap-lints allow`
97 Compiling cc v1.0.73
98 Running `rustc --crate-name cc --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/cc-1.0.73/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no --cfg 'feature="jobserver"' --cfg 'feature="parallel"' -C metadata=2976d4b8f46fa671 -C extra-filename=-2976d4b8f46fa671 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern jobserver=/work/oxidecomputer/crucible/target/debug/deps/libjobserver-0ee11fba78dd3235.rmeta --cap-lints allow`
99 Compiling ryu v1.0.9
100 Compiling futures-core v0.3.28
101 Running `rustc --crate-name ryu --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ryu-1.0.9/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=062e5ac4087417b3 -C extra-filename=-062e5ac4087417b3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
102 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-core-0.3.28/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=f9a38619a6c6491e -C extra-filename=-f9a38619a6c6491e --out-dir /work/oxidecomputer/crucible/target/debug/build/futures-core-f9a38619a6c6491e -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
103 Compiling parking_lot v0.12.0
104 Running `rustc --crate-name parking_lot --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/parking_lot-0.12.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="send_guard"' -C metadata=970d5c0acece447c -C extra-filename=-970d5c0acece447c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern lock_api=/work/oxidecomputer/crucible/target/debug/deps/liblock_api-4b01d37c549347e9.rmeta --extern parking_lot_core=/work/oxidecomputer/crucible/target/debug/deps/libparking_lot_core-f45d0642d52c20c1.rmeta --cap-lints allow`
105 Compiling hashbrown v0.12.3
106 Running `rustc --crate-name hashbrown --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hashbrown-0.12.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="ahash"' --cfg 'feature="default"' --cfg 'feature="inline-more"' --cfg 'feature="raw"' -C metadata=3ad6614047c487f9 -C extra-filename=-3ad6614047c487f9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern ahash=/work/oxidecomputer/crucible/target/debug/deps/libahash-c781b32f1fcd8d92.rmeta --cap-lints allow`
107 Running `/work/oxidecomputer/crucible/target/debug/build/futures-core-f9a38619a6c6491e/build-script-build`
108 Compiling serde_json v1.0.105
109 Running `rustc --crate-name build_script_build --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_json-1.0.105/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=9a7f1bd5890a4c18 -C extra-filename=-9a7f1bd5890a4c18 --out-dir /work/oxidecomputer/crucible/target/debug/build/serde_json-9a7f1bd5890a4c18 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
110 Running `rustc --crate-name futures_core --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-core-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=46c6e3a1b3966417 -C extra-filename=-46c6e3a1b3966417 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
111 Compiling bitflags v1.3.2
112 Running `rustc --crate-name bitflags --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bitflags-1.3.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=b5bc15fb96af61fc -C extra-filename=-b5bc15fb96af61fc --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
113 Compiling pin-utils v0.1.0
114 Running `rustc --crate-name pin_utils --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/pin-utils-0.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bcfb754cd1ab8c67 -C extra-filename=-bcfb754cd1ab8c67 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
115 Running `/work/oxidecomputer/crucible/target/debug/build/serde_json-9a7f1bd5890a4c18/build-script-build`
116 Compiling pkg-config v0.3.24
117 Running `rustc --crate-name pkg_config /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/pkg-config-0.3.24/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=e05c47386f4bdcc0 -C extra-filename=-e05c47386f4bdcc0 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
118 Compiling signal-hook-registry v1.4.0
119 Running `rustc --crate-name signal_hook_registry /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/signal-hook-registry-1.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4d955479f235827e -C extra-filename=-4d955479f235827e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
120 Compiling rand_core v0.6.4
121 Running `rustc --crate-name rand_core --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_core-0.6.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="getrandom"' --cfg 'feature="std"' -C metadata=e2870cc0e01c33c9 -C extra-filename=-e2870cc0e01c33c9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern getrandom=/work/oxidecomputer/crucible/target/debug/deps/libgetrandom-567199de146d617e.rmeta --cap-lints allow`
122 Compiling mio v0.8.8
123 Running `rustc --crate-name mio --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/mio-0.8.8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="log"' --cfg 'feature="net"' --cfg 'feature="os-ext"' --cfg 'feature="os-poll"' -C metadata=27a8136cf12de2bb -C extra-filename=-27a8136cf12de2bb --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --cap-lints allow`
124 Compiling futures-task v0.3.28
125 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-task-0.3.28/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="alloc"' --cfg 'feature="std"' -C metadata=6662546d276b2bb5 -C extra-filename=-6662546d276b2bb5 --out-dir /work/oxidecomputer/crucible/target/debug/build/futures-task-6662546d276b2bb5 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
126 Compiling futures-sink v0.3.28
127 Running `rustc --crate-name futures_sink --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-sink-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=a6d6ed8a846c5f8a -C extra-filename=-a6d6ed8a846c5f8a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
128 Compiling futures-channel v0.3.28
129 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-channel-0.3.28/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="futures-sink"' --cfg 'feature="sink"' --cfg 'feature="std"' -C metadata=224ac16fb4ca2e60 -C extra-filename=-224ac16fb4ca2e60 --out-dir /work/oxidecomputer/crucible/target/debug/build/futures-channel-224ac16fb4ca2e60 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
130 Compiling fnv v1.0.7
131 Running `rustc --crate-name fnv /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/fnv-1.0.7/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=5641130f60a8056b -C extra-filename=-5641130f60a8056b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
132 Running `/work/oxidecomputer/crucible/target/debug/build/futures-task-6662546d276b2bb5/build-script-build`
133 Compiling futures-util v0.3.28
134 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-util-0.3.28/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="alloc"' --cfg 'feature="async-await"' --cfg 'feature="async-await-macro"' --cfg 'feature="channel"' --cfg 'feature="default"' --cfg 'feature="futures-channel"' --cfg 'feature="futures-io"' --cfg 'feature="futures-macro"' --cfg 'feature="futures-sink"' --cfg 'feature="io"' --cfg 'feature="memchr"' --cfg 'feature="sink"' --cfg 'feature="slab"' --cfg 'feature="std"' -C metadata=5ffe8b784c1e5ae9 -C extra-filename=-5ffe8b784c1e5ae9 --out-dir /work/oxidecomputer/crucible/target/debug/build/futures-util-5ffe8b784c1e5ae9 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
135 Running `rustc --crate-name futures_task --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-task-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="std"' -C metadata=12b58f257ddc96a4 -C extra-filename=-12b58f257ddc96a4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
136 Compiling indexmap v1.9.3
137 Running `rustc --crate-name build_script_build --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/indexmap-1.9.3/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="serde"' --cfg 'feature="serde-1"' --cfg 'feature="std"' -C metadata=69a0dfc84d6dbf8d -C extra-filename=-69a0dfc84d6dbf8d --out-dir /work/oxidecomputer/crucible/target/debug/build/indexmap-69a0dfc84d6dbf8d -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-b1cec8cc882d2fdd.rlib --cap-lints allow`
138 Compiling futures-io v0.3.28
139 Running `rustc --crate-name futures_io --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-io-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=bcbbef0c8c581d67 -C extra-filename=-bcbbef0c8c581d67 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
140 Running `/work/oxidecomputer/crucible/target/debug/build/futures-channel-224ac16fb4ca2e60/build-script-build`
141 Running `rustc --crate-name futures_channel --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-channel-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="futures-sink"' --cfg 'feature="sink"' --cfg 'feature="std"' -C metadata=34a7a018f58dc5a7 -C extra-filename=-34a7a018f58dc5a7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_sink=/work/oxidecomputer/crucible/target/debug/deps/libfutures_sink-a6d6ed8a846c5f8a.rmeta --cap-lints allow`
142 Running `/work/oxidecomputer/crucible/target/debug/build/futures-util-5ffe8b784c1e5ae9/build-script-build`
143 Compiling percent-encoding v2.3.0
144 Running `rustc --crate-name percent_encoding --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/percent-encoding-2.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=0000aebce3d30803 -C extra-filename=-0000aebce3d30803 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
145 Compiling slab v0.4.5
146 Running `rustc --crate-name slab --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slab-0.4.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=5b7c79e345d6363e -C extra-filename=-5b7c79e345d6363e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
147 Running `/work/oxidecomputer/crucible/target/debug/build/indexmap-69a0dfc84d6dbf8d/build-script-build`
148 Compiling socket2 v0.5.3
149 Running `rustc --crate-name socket2 --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/socket2-0.5.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="all"' -C metadata=b464b617227db85e -C extra-filename=-b464b617227db85e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
150 Compiling time v0.1.44
151 Running `rustc --crate-name time /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/time-0.1.44/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=fcbf6ea26d482f3a -C extra-filename=-fcbf6ea26d482f3a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
152 Compiling iana-time-zone v0.1.47
153 Running `rustc --crate-name iana_time_zone --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/iana-time-zone-0.1.47/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="fallback"' -C metadata=3051bc3a975f54c9 -C extra-filename=-3051bc3a975f54c9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
154 Compiling tinyvec_macros v0.1.0
155 Running `rustc --crate-name tinyvec_macros --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tinyvec_macros-0.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=71472bb1a287b427 -C extra-filename=-71472bb1a287b427 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
156 Compiling tinyvec v1.5.1
157 Running `rustc --crate-name tinyvec --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tinyvec-1.5.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="tinyvec_macros"' -C metadata=592495f429dcf8bd -C extra-filename=-592495f429dcf8bd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern tinyvec_macros=/work/oxidecomputer/crucible/target/debug/deps/libtinyvec_macros-71472bb1a287b427.rmeta --cap-lints allow`
158 Compiling unicode-bidi v0.3.7
159 Running `rustc --crate-name unicode_bidi --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unicode-bidi-0.3.7/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=2c825f5beb05a037 -C extra-filename=-2c825f5beb05a037 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
160 Compiling tracing-core v0.1.30
161 Running `rustc --crate-name tracing_core --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tracing-core-0.1.30/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="once_cell"' --cfg 'feature="std"' --cfg 'feature="valuable"' -C metadata=adac59f754126e83 -C extra-filename=-adac59f754126e83 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --cap-lints allow`
162 Compiling schemars v0.8.12
163 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/schemars-0.8.12/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="bytes"' --cfg 'feature="chrono"' --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="schemars_derive"' --cfg 'feature="uuid1"' -C metadata=cf4f920476471968 -C extra-filename=-cf4f920476471968 --out-dir /work/oxidecomputer/crucible/target/debug/build/schemars-cf4f920476471968 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
164 Compiling form_urlencoded v1.2.0
165 Running `rustc --crate-name form_urlencoded --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/form_urlencoded-1.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=ef731295a29c9150 -C extra-filename=-ef731295a29c9150 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --cap-lints allow`
166 Compiling ring v0.16.20
167 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ring-0.16.20/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="dev_urandom_fallback"' --cfg 'feature="once_cell"' -C metadata=9674c8d76dda18b5 -C extra-filename=-9674c8d76dda18b5 --out-dir /work/oxidecomputer/crucible/target/debug/build/ring-9674c8d76dda18b5 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cc=/work/oxidecomputer/crucible/target/debug/deps/libcc-2976d4b8f46fa671.rlib --cap-lints allow`
168 Running `/work/oxidecomputer/crucible/target/debug/build/schemars-cf4f920476471968/build-script-build`
169 Compiling dyn-clone v1.0.5
170 Running `rustc --crate-name dyn_clone --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dyn-clone-1.0.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=6ca330b50294836a -C extra-filename=-6ca330b50294836a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
171 Compiling matches v0.1.9
172 Running `rustc --crate-name matches /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/matches-0.1.9/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=7dd63e8ea065bea3 -C extra-filename=-7dd63e8ea065bea3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
173 Compiling spin v0.5.2
174 Running `rustc --crate-name spin /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/spin-0.5.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bfb6115ad3135235 -C extra-filename=-bfb6115ad3135235 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
175 Compiling untrusted v0.7.1
176 Running `rustc --crate-name untrusted --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/untrusted-0.7.1/src/untrusted.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4b93784238d33e58 -C extra-filename=-4b93784238d33e58 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
177 Compiling base64 v0.21.3
178 Running `rustc --crate-name base64 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/base64-0.21.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=c139bdd129e780ac -C extra-filename=-c139bdd129e780ac --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
179 Compiling lazy_static v1.4.0
180 Running `rustc --crate-name lazy_static /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/lazy_static-1.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=9c1a21fa7aad0259 -C extra-filename=-9c1a21fa7aad0259 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
181 Compiling rustls v0.21.6
182 Running `rustc --crate-name build_script_build --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustls-0.21.6/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="dangerous_configuration"' --cfg 'feature="default"' --cfg 'feature="log"' --cfg 'feature="logging"' --cfg 'feature="tls12"' -C metadata=0c105edc866f624d -C extra-filename=-0c105edc866f624d --out-dir /work/oxidecomputer/crucible/target/debug/build/rustls-0c105edc866f624d -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
183 Compiling unicode-normalization v0.1.19
184 Running `rustc --crate-name unicode_normalization --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unicode-normalization-0.1.19/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=3df8261a03d4248e -C extra-filename=-3df8261a03d4248e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern tinyvec=/work/oxidecomputer/crucible/target/debug/deps/libtinyvec-592495f429dcf8bd.rmeta --cap-lints allow`
185 Compiling heck v0.4.1
186 Running `rustc --crate-name heck --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/heck-0.4.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=29af75c938b110f7 -C extra-filename=-29af75c938b110f7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
187 Compiling ppv-lite86 v0.2.16
188 Running `rustc --crate-name ppv_lite86 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ppv-lite86-0.2.16/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="simd"' --cfg 'feature="std"' -C metadata=a7fd0e8d54744928 -C extra-filename=-a7fd0e8d54744928 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
189 Compiling anyhow v1.0.71
190 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/anyhow-1.0.71/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=6798eab25f59189e -C extra-filename=-6798eab25f59189e --out-dir /work/oxidecomputer/crucible/target/debug/build/anyhow-6798eab25f59189e -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
191 Running `/work/oxidecomputer/crucible/target/debug/build/ring-9674c8d76dda18b5/build-script-build`
192 Compiling rustls-pemfile v1.0.3
193 Running `rustc --crate-name rustls_pemfile --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustls-pemfile-1.0.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e52b2a6debfcae48 -C extra-filename=-e52b2a6debfcae48 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rmeta --cap-lints allow`
194 Compiling openssl-sys v0.9.90
195 Running `rustc --crate-name build_script_main --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/openssl-sys-0.9.90/build/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=b13a834c02d58f09 -C extra-filename=-b13a834c02d58f09 --out-dir /work/oxidecomputer/crucible/target/debug/build/openssl-sys-b13a834c02d58f09 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cc=/work/oxidecomputer/crucible/target/debug/deps/libcc-2976d4b8f46fa671.rlib --extern pkg_config=/work/oxidecomputer/crucible/target/debug/deps/libpkg_config-e05c47386f4bdcc0.rlib --cap-lints allow`
196 Running `/work/oxidecomputer/crucible/target/debug/build/anyhow-6798eab25f59189e/build-script-build`
197 Compiling rand_chacha v0.3.1
198 Running `rustc --crate-name rand_chacha --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_chacha-0.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=add466c063ef8725 -C extra-filename=-add466c063ef8725 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern ppv_lite86=/work/oxidecomputer/crucible/target/debug/deps/libppv_lite86-a7fd0e8d54744928.rmeta --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-e2870cc0e01c33c9.rmeta --cap-lints allow`
199 Running `rustc --crate-name anyhow --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/anyhow-1.0.71/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=c23d7ea2a714731e -C extra-filename=-c23d7ea2a714731e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
200 Compiling httparse v1.8.0
201 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/httparse-1.8.0/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=0ca1fa0577ea1bb6 -C extra-filename=-0ca1fa0577ea1bb6 --out-dir /work/oxidecomputer/crucible/target/debug/build/httparse-0ca1fa0577ea1bb6 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
202 Compiling serde_derive_internals v0.26.0
203 Compiling idna v0.2.3
204 Running `rustc --crate-name serde_derive_internals /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_derive_internals-0.26.0/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=6aea929a774cf30f -C extra-filename=-6aea929a774cf30f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rmeta --cap-lints allow`
205 Running `rustc --crate-name idna --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/idna-0.2.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=67f12269d91917c9 -C extra-filename=-67f12269d91917c9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern matches=/work/oxidecomputer/crucible/target/debug/deps/libmatches-7dd63e8ea065bea3.rmeta --extern unicode_bidi=/work/oxidecomputer/crucible/target/debug/deps/libunicode_bidi-2c825f5beb05a037.rmeta --extern unicode_normalization=/work/oxidecomputer/crucible/target/debug/deps/libunicode_normalization-3df8261a03d4248e.rmeta --cap-lints allow`
206 Compiling rand v0.8.5
207 Running `rustc --crate-name rand --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand-0.8.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="getrandom"' --cfg 'feature="libc"' --cfg 'feature="min_const_gen"' --cfg 'feature="rand_chacha"' --cfg 'feature="small_rng"' --cfg 'feature="std"' --cfg 'feature="std_rng"' -C metadata=1f91a9ea4aed49ee -C extra-filename=-1f91a9ea4aed49ee --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rmeta --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-e2870cc0e01c33c9.rmeta --cap-lints allow`
208 Running `/work/oxidecomputer/crucible/target/debug/build/openssl-sys-b13a834c02d58f09/build-script-main`
209 Running `/work/oxidecomputer/crucible/target/debug/build/httparse-0ca1fa0577ea1bb6/build-script-build`
210 Compiling socket2 v0.4.9
211 Running `rustc --crate-name socket2 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/socket2-0.4.9/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="all"' -C metadata=3c3e3607c1c6d64e -C extra-filename=-3c3e3607c1c6d64e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
212 Compiling try-lock v0.2.3
213 Running `rustc --crate-name try_lock /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/try-lock-0.2.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=02baba71598f88d3 -C extra-filename=-02baba71598f88d3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
214 Compiling want v0.3.0
215 Running `rustc --crate-name want --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/want-0.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=5737a0d118420ef7 -C extra-filename=-5737a0d118420ef7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern try_lock=/work/oxidecomputer/crucible/target/debug/deps/libtry_lock-02baba71598f88d3.rmeta --cap-lints allow`
216 Running `rustc --crate-name httparse --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/httparse-1.8.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=59406412a39ce707 -C extra-filename=-59406412a39ce707 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg httparse_simd`
217 Compiling url v2.2.2
218 Running `rustc --crate-name url --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/url-2.2.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=ff56943ab9066fdc -C extra-filename=-ff56943ab9066fdc --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern form_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libform_urlencoded-ef731295a29c9150.rmeta --extern idna=/work/oxidecomputer/crucible/target/debug/deps/libidna-67f12269d91917c9.rmeta --extern matches=/work/oxidecomputer/crucible/target/debug/deps/libmatches-7dd63e8ea065bea3.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --cap-lints allow`
219 Compiling tower-service v0.3.1
220 Running `rustc --crate-name tower_service --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tower-service-0.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=51da71f2ad5117ee -C extra-filename=-51da71f2ad5117ee --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
221 Compiling httpdate v1.0.2
222 Running `rustc --crate-name httpdate --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/httpdate-1.0.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e3ef82a990113a54 -C extra-filename=-e3ef82a990113a54 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
223 Compiling ahash v0.8.3
224 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ahash-0.8.3/./build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=a145e18de00de996 -C extra-filename=-a145e18de00de996 --out-dir /work/oxidecomputer/crucible/target/debug/build/ahash-a145e18de00de996 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern version_check=/work/oxidecomputer/crucible/target/debug/deps/libversion_check-23142ed5bf6178a0.rlib --cap-lints allow`
225 Compiling typenum v1.15.0
226 Running `rustc --crate-name build_script_main --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/typenum-1.15.0/build/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=bfc33f8cedea7839 -C extra-filename=-bfc33f8cedea7839 --out-dir /work/oxidecomputer/crucible/target/debug/build/typenum-bfc33f8cedea7839 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
227 Compiling crossbeam-utils v0.8.8
228 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossbeam-utils-0.8.8/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="lazy_static"' --cfg 'feature="std"' -C metadata=bb0b9e93c375ad11 -C extra-filename=-bb0b9e93c375ad11 --out-dir /work/oxidecomputer/crucible/target/debug/build/crossbeam-utils-bb0b9e93c375ad11 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
229 Compiling foreign-types-shared v0.1.1
230 Running `rustc --crate-name foreign_types_shared /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/foreign-types-shared-0.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=623b4e0ee39a8057 -C extra-filename=-623b4e0ee39a8057 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
231 Compiling openssl v0.10.55
232 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/openssl-0.10.55/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' -C metadata=754c33330368d7dd -C extra-filename=-754c33330368d7dd --out-dir /work/oxidecomputer/crucible/target/debug/build/openssl-754c33330368d7dd -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
233 Compiling foreign-types v0.3.2
234 Running `rustc --crate-name foreign_types /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/foreign-types-0.3.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=07949a1c938aca9c -C extra-filename=-07949a1c938aca9c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern foreign_types_shared=/work/oxidecomputer/crucible/target/debug/deps/libforeign_types_shared-623b4e0ee39a8057.rmeta --cap-lints allow`
235 Running `/work/oxidecomputer/crucible/target/debug/build/ahash-a145e18de00de996/build-script-build`
236 Running `/work/oxidecomputer/crucible/target/debug/build/crossbeam-utils-bb0b9e93c375ad11/build-script-build`
237 Running `rustc --crate-name openssl_sys --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/openssl-sys-0.9.90/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=fe2ced2b5ac7bf5a -C extra-filename=-fe2ced2b5ac7bf5a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow -L native=/usr/ssl-3/lib/amd64 -l ssl -l crypto --cfg openssl --cfg 'osslconf="OPENSSL_NO_SSL3_METHOD"' --cfg ossl300 --cfg ossl101 --cfg ossl102 --cfg ossl102f --cfg ossl102h --cfg ossl110 --cfg ossl110f --cfg ossl110g --cfg ossl110h --cfg ossl111 --cfg ossl111b --cfg ossl111c --cfg ossl111d`
238 Compiling generic-array v0.14.5
239 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/generic-array-0.14.5/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="more_lengths"' -C metadata=3cb0d81c60829d14 -C extra-filename=-3cb0d81c60829d14 --out-dir /work/oxidecomputer/crucible/target/debug/build/generic-array-3cb0d81c60829d14 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern version_check=/work/oxidecomputer/crucible/target/debug/deps/libversion_check-23142ed5bf6178a0.rlib --cap-lints allow`
240 Compiling native-tls v0.2.11
241 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/native-tls-0.2.11/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=52d6aeab7008e848 -C extra-filename=-52d6aeab7008e848 --out-dir /work/oxidecomputer/crucible/target/debug/build/native-tls-52d6aeab7008e848 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
242 Running `/work/oxidecomputer/crucible/target/debug/build/openssl-754c33330368d7dd/build-script-build`
243 Running `rustc --crate-name ahash --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ahash-0.8.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0c4a58ad1daaf354 -C extra-filename=-0c4a58ad1daaf354 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --cap-lints allow --cfg 'feature="folded_multiply"'`
244 Running `/work/oxidecomputer/crucible/target/debug/build/typenum-bfc33f8cedea7839/build-script-main`
245 Running `rustc --crate-name typenum --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/typenum-1.15.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=21543e9c4f362850 -C extra-filename=-21543e9c4f362850 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
246 Running `/work/oxidecomputer/crucible/target/debug/build/generic-array-3cb0d81c60829d14/build-script-build`
247 Running `/work/oxidecomputer/crucible/target/debug/build/native-tls-52d6aeab7008e848/build-script-build`
248 Running `rustc --crate-name crossbeam_utils --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossbeam-utils-0.8.8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="lazy_static"' --cfg 'feature="std"' -C metadata=a1fb255bfa31483a -C extra-filename=-a1fb255bfa31483a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --cap-lints allow`
249 Compiling openssl-probe v0.1.5
250 Running `rustc --crate-name openssl_probe /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/openssl-probe-0.1.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=ef939aeef8872804 -C extra-filename=-ef939aeef8872804 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
251 Compiling unicode-width v0.1.9
252 Running `rustc --crate-name unicode_width /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unicode-width-0.1.9/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=87c5262a4c4bb0e9 -C extra-filename=-87c5262a4c4bb0e9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
253 Compiling hashbrown v0.13.2
254 Running `rustc --crate-name hashbrown --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hashbrown-0.13.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="ahash"' --cfg 'feature="default"' --cfg 'feature="inline-more"' -C metadata=d4fb045aef0e24c1 -C extra-filename=-d4fb045aef0e24c1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern ahash=/work/oxidecomputer/crucible/target/debug/deps/libahash-0c4a58ad1daaf354.rmeta --cap-lints allow`
255 Compiling aho-corasick v1.0.1
256 Running `rustc --crate-name aho_corasick --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/aho-corasick-1.0.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="perf-literal"' --cfg 'feature="std"' -C metadata=afc99e972f7e39a1 -C extra-filename=-afc99e972f7e39a1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --cap-lints allow`
257 Compiling regex-syntax v0.7.2
258 Running `rustc --crate-name regex_syntax --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/regex-syntax-0.7.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' --cfg 'feature="unicode"' --cfg 'feature="unicode-age"' --cfg 'feature="unicode-bool"' --cfg 'feature="unicode-case"' --cfg 'feature="unicode-gencat"' --cfg 'feature="unicode-perl"' --cfg 'feature="unicode-script"' --cfg 'feature="unicode-segment"' -C metadata=c9e6cb4ff3b1ed69 -C extra-filename=-c9e6cb4ff3b1ed69 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
259 Compiling mime v0.3.16
260 Running `rustc --crate-name mime /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/mime-0.3.16/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=ac14a9115eddd3c2 -C extra-filename=-ac14a9115eddd3c2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
261 Running `rustc --crate-name generic_array /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/generic-array-0.14.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="more_lengths"' -C metadata=bc31b01978a602e7 -C extra-filename=-bc31b01978a602e7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern typenum=/work/oxidecomputer/crucible/target/debug/deps/libtypenum-21543e9c4f362850.rmeta --cap-lints allow --cfg relaxed_coherence`
262 Compiling encoding_rs v0.8.30
263 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/encoding_rs-0.8.30/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="alloc"' --cfg 'feature="default"' -C metadata=99449b8c6d70454a -C extra-filename=-99449b8c6d70454a --out-dir /work/oxidecomputer/crucible/target/debug/build/encoding_rs-99449b8c6d70454a -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
264 Compiling byteorder v1.4.3
265 Running `rustc --crate-name byteorder --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/byteorder-1.4.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=1306999913c8e1b3 -C extra-filename=-1306999913c8e1b3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
266 Compiling regress v0.6.0
267 Running `rustc --crate-name regress --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/regress-0.6.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="backend-pikevm"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=10da65958da1c830 -C extra-filename=-10da65958da1c830 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern hashbrown=/work/oxidecomputer/crucible/target/debug/deps/libhashbrown-d4fb045aef0e24c1.rmeta --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --cap-lints allow`
268 Running `/work/oxidecomputer/crucible/target/debug/build/encoding_rs-99449b8c6d70454a/build-script-build`
269 Running `rustc --crate-name encoding_rs --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/encoding_rs-0.8.30/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' -C metadata=3255048793b3f7a6 -C extra-filename=-3255048793b3f7a6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --cap-lints allow`
270 Compiling crossbeam-channel v0.5.3
271 Running `rustc --crate-name crossbeam_channel --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossbeam-channel-0.5.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="crossbeam-utils"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=19ff6fa486e51c63 -C extra-filename=-19ff6fa486e51c63 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern crossbeam_utils=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_utils-a1fb255bfa31483a.rmeta --cap-lints allow`
272 Compiling ipnet v2.4.0
273 Running `rustc --crate-name ipnet /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ipnet-2.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=8b250db103a32779 -C extra-filename=-8b250db103a32779 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
274 Compiling webpki-roots v0.25.2
275 Running `rustc --crate-name webpki_roots --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/webpki-roots-0.25.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=31272bd9a7615638 -C extra-filename=-31272bd9a7615638 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
276 Compiling io-lifetimes v1.0.3
277 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/io-lifetimes-1.0.3/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="close"' --cfg 'feature="libc"' --cfg 'feature="windows-sys"' -C metadata=e14a4ad36d121892 -C extra-filename=-e14a4ad36d121892 --out-dir /work/oxidecomputer/crucible/target/debug/build/io-lifetimes-e14a4ad36d121892 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
278 Running `/work/oxidecomputer/crucible/target/debug/build/io-lifetimes-e14a4ad36d121892/build-script-build`
279 Compiling regex v1.8.3
280 Running `rustc --crate-name regex --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/regex-1.8.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="aho-corasick"' --cfg 'feature="default"' --cfg 'feature="memchr"' --cfg 'feature="perf"' --cfg 'feature="perf-cache"' --cfg 'feature="perf-dfa"' --cfg 'feature="perf-inline"' --cfg 'feature="perf-literal"' --cfg 'feature="std"' --cfg 'feature="unicode"' --cfg 'feature="unicode-age"' --cfg 'feature="unicode-bool"' --cfg 'feature="unicode-case"' --cfg 'feature="unicode-gencat"' --cfg 'feature="unicode-perl"' --cfg 'feature="unicode-script"' --cfg 'feature="unicode-segment"' -C metadata=f9e3a4eb3da387ce -C extra-filename=-f9e3a4eb3da387ce --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern aho_corasick=/work/oxidecomputer/crucible/target/debug/deps/libaho_corasick-afc99e972f7e39a1.rmeta --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --extern regex_syntax=/work/oxidecomputer/crucible/target/debug/deps/libregex_syntax-c9e6cb4ff3b1ed69.rmeta --cap-lints allow`
281 Compiling crypto-common v0.1.6
282 Running `rustc --crate-name crypto_common --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crypto-common-0.1.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="getrandom"' --cfg 'feature="rand_core"' --cfg 'feature="std"' -C metadata=0953bfc5dcef84b9 -C extra-filename=-0953bfc5dcef84b9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern generic_array=/work/oxidecomputer/crucible/target/debug/deps/libgeneric_array-bc31b01978a602e7.rmeta --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-e2870cc0e01c33c9.rmeta --extern typenum=/work/oxidecomputer/crucible/target/debug/deps/libtypenum-21543e9c4f362850.rmeta --cap-lints allow`
283 Running `rustc --crate-name io_lifetimes --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/io-lifetimes-1.0.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="close"' --cfg 'feature="libc"' --cfg 'feature="windows-sys"' -C metadata=df7ee936a2a5cbac -C extra-filename=-df7ee936a2a5cbac --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow --cfg io_safety_is_in_std --cfg panic_in_const_fn`
284 Compiling getopts v0.2.21
285 Running `rustc --crate-name getopts /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/getopts-0.2.21/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=93a8419d37acce69 -C extra-filename=-93a8419d37acce69 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern unicode_width=/work/oxidecomputer/crucible/target/debug/deps/libunicode_width-87c5262a4c4bb0e9.rmeta --cap-lints allow`
286 Compiling unsafe-libyaml v0.2.5
287 Running `rustc --crate-name unsafe_libyaml --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unsafe-libyaml-0.2.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=18c99c193ab0b0f5 -C extra-filename=-18c99c193ab0b0f5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
288 Compiling serde_derive v1.0.167
289 Running `rustc --crate-name serde_derive /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_derive-1.0.167/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no --cfg 'feature="default"' -C metadata=3441a1f9756a6d5b -C extra-filename=-3441a1f9756a6d5b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
290 Compiling thiserror-impl v1.0.40
291 Running `rustc --crate-name thiserror_impl --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/thiserror-impl-1.0.40/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=bf767c319ff2d238 -C extra-filename=-bf767c319ff2d238 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
292 Compiling futures-macro v0.3.28
293 Running `rustc --crate-name futures_macro --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-macro-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=81cbf12cf17c7f91 -C extra-filename=-81cbf12cf17c7f91 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
294 Compiling tokio-macros v2.1.0
295 Running `rustc --crate-name tokio_macros --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-macros-2.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=129d27199cb07668 -C extra-filename=-129d27199cb07668 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
296 Compiling schemars_derive v0.8.12
297 Running `rustc --crate-name schemars_derive --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/schemars_derive-0.8.12/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=ca659e99c5534de4 -C extra-filename=-ca659e99c5534de4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern serde_derive_internals=/work/oxidecomputer/crucible/target/debug/deps/libserde_derive_internals-6aea929a774cf30f.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern proc_macro --cap-lints allow`
298 Compiling tracing-attributes v0.1.23
299 Running `rustc --crate-name tracing_attributes --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tracing-attributes-0.1.23/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=af60b1f7cb0d953c -C extra-filename=-af60b1f7cb0d953c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern proc_macro --cap-lints allow`
300 Compiling openssl-macros v0.1.0
301 Running `rustc --crate-name openssl_macros --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/openssl-macros-0.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=57d0261583d18db5 -C extra-filename=-57d0261583d18db5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern proc_macro --cap-lints allow`
302 Compiling ucd-trie v0.1.3
303 Running `rustc --crate-name ucd_trie --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ucd-trie-0.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=a4e8188b5963a3f1 -C extra-filename=-a4e8188b5963a3f1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
304 Running `rustc --crate-name futures_util --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-util-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="async-await"' --cfg 'feature="async-await-macro"' --cfg 'feature="channel"' --cfg 'feature="default"' --cfg 'feature="futures-channel"' --cfg 'feature="futures-io"' --cfg 'feature="futures-macro"' --cfg 'feature="futures-sink"' --cfg 'feature="io"' --cfg 'feature="memchr"' --cfg 'feature="sink"' --cfg 'feature="slab"' --cfg 'feature="std"' -C metadata=b4da5d5433271d56 -C extra-filename=-b4da5d5433271d56 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_channel=/work/oxidecomputer/crucible/target/debug/deps/libfutures_channel-34a7a018f58dc5a7.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_io=/work/oxidecomputer/crucible/target/debug/deps/libfutures_io-bcbbef0c8c581d67.rmeta --extern futures_macro=/work/oxidecomputer/crucible/target/debug/deps/libfutures_macro-81cbf12cf17c7f91.so --extern futures_sink=/work/oxidecomputer/crucible/target/debug/deps/libfutures_sink-a6d6ed8a846c5f8a.rmeta --extern futures_task=/work/oxidecomputer/crucible/target/debug/deps/libfutures_task-12b58f257ddc96a4.rmeta --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern pin_utils=/work/oxidecomputer/crucible/target/debug/deps/libpin_utils-bcfb754cd1ab8c67.rmeta --extern slab=/work/oxidecomputer/crucible/target/debug/deps/libslab-5b7c79e345d6363e.rmeta --cap-lints allow`
305 Running `rustc --crate-name thiserror --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/thiserror-1.0.40/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=07eca56f531a0e5d -C extra-filename=-07eca56f531a0e5d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern thiserror_impl=/work/oxidecomputer/crucible/target/debug/deps/libthiserror_impl-bf767c319ff2d238.so --cap-lints allow`
306 Running `rustc --crate-name openssl --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/openssl-0.10.55/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=4a7553c915defdd5 -C extra-filename=-4a7553c915defdd5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern foreign_types=/work/oxidecomputer/crucible/target/debug/deps/libforeign_types-07949a1c938aca9c.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern openssl_macros=/work/oxidecomputer/crucible/target/debug/deps/libopenssl_macros-57d0261583d18db5.so --extern ffi=/work/oxidecomputer/crucible/target/debug/deps/libopenssl_sys-fe2ced2b5ac7bf5a.rmeta --cap-lints allow -L native=/usr/ssl-3/lib/amd64 --cfg 'osslconf="OPENSSL_NO_SSL3_METHOD"' --cfg ossl101 --cfg ossl102 --cfg ossl110 --cfg ossl110g --cfg ossl110h --cfg ossl111 --cfg ossl300`
307 Compiling pest v2.1.3
308 Running `rustc --crate-name pest /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/pest-2.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e1aabf8a6c7bc1ff -C extra-filename=-e1aabf8a6c7bc1ff --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern ucd_trie=/work/oxidecomputer/crucible/target/debug/deps/libucd_trie-a4e8188b5963a3f1.rmeta --cap-lints allow`
309 Compiling unicode-xid v0.2.2
310 Running `rustc --crate-name unicode_xid /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unicode-xid-0.2.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no --cfg 'feature="default"' -C metadata=911d92403f8fb404 -C extra-filename=-911d92403f8fb404 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
311 Compiling maplit v1.0.2
312 Running `rustc --crate-name maplit /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/maplit-1.0.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=695e3e42e8316fa9 -C extra-filename=-695e3e42e8316fa9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
313 Compiling synstructure v0.12.6
314 Running `rustc --crate-name synstructure --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/synstructure-0.12.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="proc-macro"' -C metadata=48a8bcebf08faced -C extra-filename=-48a8bcebf08faced --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rmeta --extern unicode_xid=/work/oxidecomputer/crucible/target/debug/deps/libunicode_xid-911d92403f8fb404.rmeta --cap-lints allow`
315 Compiling slog v2.7.0
316 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-2.7.0/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="dynamic-keys"' --cfg 'feature="max_level_trace"' --cfg 'feature="release_max_level_debug"' --cfg 'feature="std"' -C metadata=125bc9f2670a64c3 -C extra-filename=-125bc9f2670a64c3 --out-dir /work/oxidecomputer/crucible/target/debug/build/slog-125bc9f2670a64c3 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
317 Compiling pest_meta v2.1.3
318 Running `rustc --crate-name pest_meta /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/pest_meta-2.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=74e8955a7d734149 -C extra-filename=-74e8955a7d734149 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern maplit=/work/oxidecomputer/crucible/target/debug/deps/libmaplit-695e3e42e8316fa9.rmeta --extern pest=/work/oxidecomputer/crucible/target/debug/deps/libpest-e1aabf8a6c7bc1ff.rmeta --cap-lints allow`
319 Compiling time-core v0.1.1
320 Running `rustc --crate-name time_core --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/time-core-0.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=fd216cb66fe61cb9 -C extra-filename=-fd216cb66fe61cb9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
321 Compiling subtle v2.4.1
322 Running `rustc --crate-name subtle /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/subtle-2.4.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=ee0c8d46ce57336b -C extra-filename=-ee0c8d46ce57336b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
323 Compiling tracing v0.1.37
324 Running `rustc --crate-name tracing --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tracing-0.1.37/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="attributes"' --cfg 'feature="default"' --cfg 'feature="std"' --cfg 'feature="tracing-attributes"' -C metadata=1de351a7f2b0ab55 -C extra-filename=-1de351a7f2b0ab55 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern tracing_attributes=/work/oxidecomputer/crucible/target/debug/deps/libtracing_attributes-af60b1f7cb0d953c.so --extern tracing_core=/work/oxidecomputer/crucible/target/debug/deps/libtracing_core-adac59f754126e83.rmeta --cap-lints allow`
325 Compiling rustversion v1.0.14
326 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustversion-1.0.14/build/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=417212d646cdbd4e -C extra-filename=-417212d646cdbd4e --out-dir /work/oxidecomputer/crucible/target/debug/build/rustversion-417212d646cdbd4e -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
327 Compiling equivalent v1.0.0
328 Running `rustc --crate-name equivalent /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/equivalent-1.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=7a40718821784752 -C extra-filename=-7a40718821784752 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
329 Compiling hashbrown v0.14.0
330 Running `rustc --crate-name hashbrown --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hashbrown-0.14.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="raw"' -C metadata=615db2aaa1e4d335 -C extra-filename=-615db2aaa1e4d335 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
331 Compiling zerocopy-derive v0.2.0
332 Running `rustc --crate-name zerocopy_derive --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/zerocopy-derive-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=f3f98c5abdba899d -C extra-filename=-f3f98c5abdba899d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern synstructure=/work/oxidecomputer/crucible/target/debug/deps/libsynstructure-48a8bcebf08faced.rlib --extern proc_macro --cap-lints allow`
333 Compiling time-macros v0.2.10
334 Running `rustc --crate-name time_macros --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/time-macros-0.2.10/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no --cfg 'feature="formatting"' --cfg 'feature="parsing"' -C metadata=e07155b8a4270ad7 -C extra-filename=-e07155b8a4270ad7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern time_core=/work/oxidecomputer/crucible/target/debug/deps/libtime_core-fd216cb66fe61cb9.rlib --extern proc_macro --cap-lints allow`
335 Running `/work/oxidecomputer/crucible/target/debug/build/rustversion-417212d646cdbd4e/build-script-build`
336 Compiling pest_generator v2.1.3
337 Running `rustc --crate-name pest_generator /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/pest_generator-2.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=01755500ddb0705d -C extra-filename=-01755500ddb0705d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern pest=/work/oxidecomputer/crucible/target/debug/deps/libpest-e1aabf8a6c7bc1ff.rmeta --extern pest_meta=/work/oxidecomputer/crucible/target/debug/deps/libpest_meta-74e8955a7d734149.rmeta --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rmeta --cap-lints allow`
338 Compiling indexmap v2.0.0
339 Running `rustc --crate-name indexmap --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/indexmap-2.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=32a62b7926f710bd -C extra-filename=-32a62b7926f710bd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern equivalent=/work/oxidecomputer/crucible/target/debug/deps/libequivalent-7a40718821784752.rmeta --extern hashbrown=/work/oxidecomputer/crucible/target/debug/deps/libhashbrown-615db2aaa1e4d335.rmeta --cap-lints allow`
340 Running `/work/oxidecomputer/crucible/target/debug/build/slog-125bc9f2670a64c3/build-script-build`
341 Compiling num_threads v0.1.5
342 Running `rustc --crate-name num_threads /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num_threads-0.1.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4e84d104f1db9110 -C extra-filename=-4e84d104f1db9110 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
343 Compiling winnow v0.4.6
344 Running `rustc --crate-name winnow --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/winnow-0.4.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=e34c187c773d92ef -C extra-filename=-e34c187c773d92ef --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
345 Compiling rustix v0.37.7
346 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustix-0.37.7/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="fs"' --cfg 'feature="io-lifetimes"' --cfg 'feature="libc"' --cfg 'feature="std"' --cfg 'feature="use-libc-auxv"' -C metadata=b6284316c7951b5c -C extra-filename=-b6284316c7951b5c --out-dir /work/oxidecomputer/crucible/target/debug/build/rustix-b6284316c7951b5c -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
347 Running `rustc --crate-name slog /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-2.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="dynamic-keys"' --cfg 'feature="max_level_trace"' --cfg 'feature="release_max_level_debug"' --cfg 'feature="std"' -C metadata=84fd25666c3c26ee -C extra-filename=-84fd25666c3c26ee --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg integer128`
348 Running `rustc --crate-name serde /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde-1.0.167/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="rc"' --cfg 'feature="serde_derive"' --cfg 'feature="std"' -C metadata=2779165e31567af2 -C extra-filename=-2779165e31567af2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde_derive=/work/oxidecomputer/crucible/target/debug/deps/libserde_derive-3441a1f9756a6d5b.so --cap-lints allow`
349 Compiling pest_derive v2.1.0
350 Running `rustc --crate-name pest_derive /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/pest_derive-2.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=8b18b291ae7a4e87 -C extra-filename=-8b18b291ae7a4e87 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern pest=/work/oxidecomputer/crucible/target/debug/deps/libpest-e1aabf8a6c7bc1ff.rlib --extern pest_generator=/work/oxidecomputer/crucible/target/debug/deps/libpest_generator-01755500ddb0705d.rlib --extern proc_macro --cap-lints allow`
351 Compiling futures-executor v0.3.28
352 Running `rustc --crate-name futures_executor --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-executor-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=61f53162107ffb32 -C extra-filename=-61f53162107ffb32 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_task=/work/oxidecomputer/crucible/target/debug/deps/libfutures_task-12b58f257ddc96a4.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --cap-lints allow`
353 Running `rustc --crate-name native_tls /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/native-tls-0.2.11/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=320c05ab5bbd33c9 -C extra-filename=-320c05ab5bbd33c9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern openssl=/work/oxidecomputer/crucible/target/debug/deps/libopenssl-4a7553c915defdd5.rmeta --extern openssl_probe=/work/oxidecomputer/crucible/target/debug/deps/libopenssl_probe-ef939aeef8872804.rmeta --extern openssl_sys=/work/oxidecomputer/crucible/target/debug/deps/libopenssl_sys-fe2ced2b5ac7bf5a.rmeta --cap-lints allow -L native=/usr/ssl-3/lib/amd64 --cfg have_min_max_version`
354 Compiling time v0.3.23
355 Running `rustc --crate-name time --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/time-0.3.23/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="formatting"' --cfg 'feature="local-offset"' --cfg 'feature="macros"' --cfg 'feature="parsing"' --cfg 'feature="std"' -C metadata=9b604407a0d52f86 -C extra-filename=-9b604407a0d52f86 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern num_threads=/work/oxidecomputer/crucible/target/debug/deps/libnum_threads-4e84d104f1db9110.rmeta --extern time_core=/work/oxidecomputer/crucible/target/debug/deps/libtime_core-fd216cb66fe61cb9.rmeta --extern time_macros=/work/oxidecomputer/crucible/target/debug/deps/libtime_macros-e07155b8a4270ad7.so --cap-lints allow`
356 Running `/work/oxidecomputer/crucible/target/debug/build/rustix-b6284316c7951b5c/build-script-build`
357 Compiling zerocopy v0.3.0
358 Running `rustc --crate-name zerocopy --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/zerocopy-0.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0b893e01c09e6e03 -C extra-filename=-0b893e01c09e6e03 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern byteorder=/work/oxidecomputer/crucible/target/debug/deps/libbyteorder-1306999913c8e1b3.rmeta --extern zerocopy_derive=/work/oxidecomputer/crucible/target/debug/deps/libzerocopy_derive-f3f98c5abdba899d.so --cap-lints allow`
359 Running `rustc --crate-name rustversion --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustversion-1.0.14/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=ae2ca065d7ce77a2 -C extra-filename=-ae2ca065d7ce77a2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro --cap-lints allow`
360 Compiling errno v0.3.1
361 Running `rustc --crate-name errno --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/errno-0.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e243723ea5172a32 -C extra-filename=-e243723ea5172a32 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
362 Compiling usdt-impl v0.3.5
363 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/usdt-impl-0.3.5/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="asm"' -C metadata=d6a2bc3071c67fed -C extra-filename=-d6a2bc3071c67fed --out-dir /work/oxidecomputer/crucible/target/debug/build/usdt-impl-d6a2bc3071c67fed -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern version_check=/work/oxidecomputer/crucible/target/debug/deps/libversion_check-23142ed5bf6178a0.rlib --cap-lints allow`
364 Compiling semver v1.0.18
365 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/semver-1.0.18/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="serde"' --cfg 'feature="std"' -C metadata=6244f63ae51a7b8f -C extra-filename=-6244f63ae51a7b8f --out-dir /work/oxidecomputer/crucible/target/debug/build/semver-6244f63ae51a7b8f -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
366 Compiling async-trait v0.1.73
367 Running `rustc --crate-name build_script_build --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/async-trait-0.1.73/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=72eabf91258cb242 -C extra-filename=-72eabf91258cb242 --out-dir /work/oxidecomputer/crucible/target/debug/build/async-trait-72eabf91258cb242 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
368 Running `/work/oxidecomputer/crucible/target/debug/build/usdt-impl-d6a2bc3071c67fed/build-script-build`
369 Running `/work/oxidecomputer/crucible/target/debug/build/semver-6244f63ae51a7b8f/build-script-build`
370 Compiling dof v0.1.5
371 Running `rustc --crate-name dof --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dof-0.1.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=48f06b38719b0295 -C extra-filename=-48f06b38719b0295 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern zerocopy=/work/oxidecomputer/crucible/target/debug/deps/libzerocopy-0b893e01c09e6e03.rmeta --cap-lints allow`
372 Running `rustc --crate-name rustix --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustix-0.37.7/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="fs"' --cfg 'feature="io-lifetimes"' --cfg 'feature="libc"' --cfg 'feature="std"' --cfg 'feature="use-libc-auxv"' -C metadata=4f0213bb214bbfd6 -C extra-filename=-4f0213bb214bbfd6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern libc_errno=/work/oxidecomputer/crucible/target/debug/deps/liberrno-e243723ea5172a32.rmeta --extern io_lifetimes=/work/oxidecomputer/crucible/target/debug/deps/libio_lifetimes-df7ee936a2a5cbac.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow --cfg libc --cfg solarish`
373 Running `/work/oxidecomputer/crucible/target/debug/build/async-trait-72eabf91258cb242/build-script-build`
374 Compiling dtrace-parser v0.1.14
375 Running `rustc --crate-name dtrace_parser --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dtrace-parser-0.1.14/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=647a421ab06e4ff3 -C extra-filename=-647a421ab06e4ff3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern pest=/work/oxidecomputer/crucible/target/debug/deps/libpest-e1aabf8a6c7bc1ff.rmeta --extern pest_derive=/work/oxidecomputer/crucible/target/debug/deps/libpest_derive-8b18b291ae7a4e87.so --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --cap-lints allow`
376 Compiling thread-id v4.0.0
377 Running `rustc --crate-name thread_id /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/thread-id-4.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=eaa0d9ff93152533 -C extra-filename=-eaa0d9ff93152533 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
378 Compiling fastrand v1.7.0
379 Running `rustc --crate-name fastrand --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/fastrand-1.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=d1e60a5c45d284ad -C extra-filename=-d1e60a5c45d284ad --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
380 Running `rustc --crate-name async_trait --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/async-trait-0.1.73/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=a300e84178ee0ad1 -C extra-filename=-a300e84178ee0ad1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
381 Compiling block-buffer v0.10.2
382 Running `rustc --crate-name block_buffer --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/block-buffer-0.10.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0de17b0eee43f62e -C extra-filename=-0de17b0eee43f62e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern generic_array=/work/oxidecomputer/crucible/target/debug/deps/libgeneric_array-bc31b01978a602e7.rmeta --cap-lints allow`
383 Compiling thread_local v1.1.4
384 Running `rustc --crate-name thread_local --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/thread_local-1.1.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=771d112d9219dc9f -C extra-filename=-771d112d9219dc9f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --cap-lints allow`
385 Compiling strsim v0.10.0
386 Running `rustc --crate-name strsim /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/strsim-0.10.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=3314ed6444b5bb69 -C extra-filename=-3314ed6444b5bb69 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
387 Compiling digest v0.10.6
388 Running `rustc --crate-name digest --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/digest-0.10.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="block-buffer"' --cfg 'feature="core-api"' --cfg 'feature="default"' --cfg 'feature="mac"' --cfg 'feature="std"' --cfg 'feature="subtle"' -C metadata=d0d34473efe105d1 -C extra-filename=-d0d34473efe105d1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern block_buffer=/work/oxidecomputer/crucible/target/debug/deps/libblock_buffer-0de17b0eee43f62e.rmeta --extern crypto_common=/work/oxidecomputer/crucible/target/debug/deps/libcrypto_common-0953bfc5dcef84b9.rmeta --extern subtle=/work/oxidecomputer/crucible/target/debug/deps/libsubtle-ee0c8d46ce57336b.rmeta --cap-lints allow`
389 Compiling tempfile v3.5.0
390 Running `rustc --crate-name tempfile --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tempfile-3.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=05cb6a5f7e86cb66 -C extra-filename=-05cb6a5f7e86cb66 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern fastrand=/work/oxidecomputer/crucible/target/debug/deps/libfastrand-d1e60a5c45d284ad.rmeta --extern rustix=/work/oxidecomputer/crucible/target/debug/deps/librustix-4f0213bb214bbfd6.rmeta --cap-lints allow`
391 Compiling dirs-sys-next v0.1.2
392 Running `rustc --crate-name dirs_sys_next --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dirs-sys-next-0.1.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4419c59e785c767e -C extra-filename=-4419c59e785c767e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
393 Compiling dirs-next v2.0.0
394 Running `rustc --crate-name dirs_next --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dirs-next-2.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=369b0b19f2fe8c2c -C extra-filename=-369b0b19f2fe8c2c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern dirs_sys_next=/work/oxidecomputer/crucible/target/debug/deps/libdirs_sys_next-4419c59e785c767e.rmeta --cap-lints allow`
395 Compiling libz-sys v1.1.8
396 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libz-sys-1.1.8/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="libc"' -C metadata=664a34d733156495 -C extra-filename=-664a34d733156495 --out-dir /work/oxidecomputer/crucible/target/debug/build/libz-sys-664a34d733156495 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cc=/work/oxidecomputer/crucible/target/debug/deps/libcc-2976d4b8f46fa671.rlib --extern pkg_config=/work/oxidecomputer/crucible/target/debug/deps/libpkg_config-e05c47386f4bdcc0.rlib --cap-lints allow`
397 Compiling siphasher v0.3.10
398 Running `rustc --crate-name siphasher --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/siphasher-0.3.10/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=2eb27360f66646f3 -C extra-filename=-2eb27360f66646f3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
399 Compiling match_cfg v0.1.0
400 Running `rustc --crate-name match_cfg /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/match_cfg-0.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="use_core"' -C metadata=a8964d97fe20de7b -C extra-filename=-a8964d97fe20de7b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
401 Compiling slog-async v2.8.0
402 Compiling fallible-iterator v0.2.0
403 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-async-2.8.0/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' -C metadata=128f43cfd1e1203b -C extra-filename=-128f43cfd1e1203b --out-dir /work/oxidecomputer/crucible/target/debug/build/slog-async-128f43cfd1e1203b -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
404 Running `rustc --crate-name fallible_iterator --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/fallible-iterator-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=2f475e2aff163c98 -C extra-filename=-2f475e2aff163c98 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
405 Compiling hostname v0.3.1
406 Running `rustc --crate-name hostname /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hostname-0.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=4a0f8b1a56e5681a -C extra-filename=-4a0f8b1a56e5681a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern match_cfg=/work/oxidecomputer/crucible/target/debug/deps/libmatch_cfg-a8964d97fe20de7b.rmeta --cap-lints allow`
407 Compiling term v0.7.0
408 Running `rustc --crate-name term --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/term-0.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=9ecdb763f4f627bf -C extra-filename=-9ecdb763f4f627bf --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern dirs_next=/work/oxidecomputer/crucible/target/debug/deps/libdirs_next-369b0b19f2fe8c2c.rmeta --cap-lints allow`
409 Running `/work/oxidecomputer/crucible/target/debug/build/libz-sys-664a34d733156495/build-script-build`
410 Compiling futures v0.3.28
411 Running `rustc --crate-name futures --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="async-await"' --cfg 'feature="default"' --cfg 'feature="executor"' --cfg 'feature="futures-executor"' --cfg 'feature="std"' -C metadata=df761c89bfa71e54 -C extra-filename=-df761c89bfa71e54 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_channel=/work/oxidecomputer/crucible/target/debug/deps/libfutures_channel-34a7a018f58dc5a7.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_executor=/work/oxidecomputer/crucible/target/debug/deps/libfutures_executor-61f53162107ffb32.rmeta --extern futures_io=/work/oxidecomputer/crucible/target/debug/deps/libfutures_io-bcbbef0c8c581d67.rmeta --extern futures_sink=/work/oxidecomputer/crucible/target/debug/deps/libfutures_sink-a6d6ed8a846c5f8a.rmeta --extern futures_task=/work/oxidecomputer/crucible/target/debug/deps/libfutures_task-12b58f257ddc96a4.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --cap-lints allow`
412 Compiling atty v0.2.14
413 Running `rustc --crate-name atty /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/atty-0.2.14/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bfb6a2cdc762f7c4 -C extra-filename=-bfb6a2cdc762f7c4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
414 Compiling autocfg v0.1.8
415 Running `rustc --crate-name autocfg /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/autocfg-0.1.8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=0a34a0aae5d9487a -C extra-filename=-0a34a0aae5d9487a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-b1cec8cc882d2fdd.rmeta --cap-lints allow`
416 Running `/work/oxidecomputer/crucible/target/debug/build/slog-async-128f43cfd1e1203b/build-script-build`
417 Compiling cpufeatures v0.2.1
418 Running `rustc --crate-name cpufeatures --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/cpufeatures-0.2.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=6e25ef20a8fa218f -C extra-filename=-6e25ef20a8fa218f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
419 Compiling take_mut v0.2.2
420 Running `rustc --crate-name take_mut /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/take_mut-0.2.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0ebd75564ded4875 -C extra-filename=-0ebd75564ded4875 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
421 Compiling rand_core v0.4.2
422 Running `rustc --crate-name rand_core /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_core-0.4.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="std"' -C metadata=cf5252fd1c1f594a -C extra-filename=-cf5252fd1c1f594a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
423 Compiling libgit2-sys v0.15.2+1.6.4
424 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libgit2-sys-0.15.2+1.6.4/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=69da610a4d2f950f -C extra-filename=-69da610a4d2f950f --out-dir /work/oxidecomputer/crucible/target/debug/build/libgit2-sys-69da610a4d2f950f -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cc=/work/oxidecomputer/crucible/target/debug/deps/libcc-2976d4b8f46fa671.rlib --extern pkg_config=/work/oxidecomputer/crucible/target/debug/deps/libpkg_config-e05c47386f4bdcc0.rlib --cap-lints allow`
425 Compiling slog-term v2.9.0
426 Running `rustc --crate-name slog_term --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-term-2.9.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=31c687431f6dd53c -C extra-filename=-31c687431f6dd53c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern atty=/work/oxidecomputer/crucible/target/debug/deps/libatty-bfb6a2cdc762f7c4.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern term=/work/oxidecomputer/crucible/target/debug/deps/libterm-9ecdb763f4f627bf.rmeta --extern thread_local=/work/oxidecomputer/crucible/target/debug/deps/libthread_local-771d112d9219dc9f.rmeta --extern time=/work/oxidecomputer/crucible/target/debug/deps/libtime-9b604407a0d52f86.rmeta --cap-lints allow`
427 Running `rustc --crate-name slog_async /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-async-2.8.0/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=03e743f036119074 -C extra-filename=-03e743f036119074 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crossbeam_channel=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_channel-19ff6fa486e51c63.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern take_mut=/work/oxidecomputer/crucible/target/debug/deps/libtake_mut-0ebd75564ded4875.rmeta --extern thread_local=/work/oxidecomputer/crucible/target/debug/deps/libthread_local-771d112d9219dc9f.rmeta --cap-lints allow --cfg integer128`
428 Compiling bytes v1.4.0
429 Running `rustc --crate-name bytes --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bytes-1.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="serde"' --cfg 'feature="std"' -C metadata=64a8a55ef81e55dd -C extra-filename=-64a8a55ef81e55dd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
430 Running `rustc --crate-name serde_json --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_json-1.0.105/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=96027e7dd982d07a -C extra-filename=-96027e7dd982d07a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --extern ryu=/work/oxidecomputer/crucible/target/debug/deps/libryu-062e5ac4087417b3.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow --cfg limb_width_64`
431 Running `rustc --crate-name indexmap --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/indexmap-1.9.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="serde"' --cfg 'feature="serde-1"' --cfg 'feature="std"' -C metadata=c2c8f74266eebb64 -C extra-filename=-c2c8f74266eebb64 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern hashbrown=/work/oxidecomputer/crucible/target/debug/deps/libhashbrown-3ad6614047c487f9.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow --cfg has_std`
432 Compiling chrono v0.4.26
433 Running `rustc --crate-name chrono --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/chrono-0.4.26/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="clock"' --cfg 'feature="default"' --cfg 'feature="iana-time-zone"' --cfg 'feature="js-sys"' --cfg 'feature="oldtime"' --cfg 'feature="serde"' --cfg 'feature="std"' --cfg 'feature="time"' --cfg 'feature="wasm-bindgen"' --cfg 'feature="wasmbind"' --cfg 'feature="winapi"' -C metadata=0402a5e17dd99cdf -C extra-filename=-0402a5e17dd99cdf --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern iana_time_zone=/work/oxidecomputer/crucible/target/debug/deps/libiana_time_zone-3051bc3a975f54c9.rmeta --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern time=/work/oxidecomputer/crucible/target/debug/deps/libtime-fcbf6ea26d482f3a.rmeta --cap-lints allow`
434 Compiling uuid v1.4.1
435 Running `rustc --crate-name uuid --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/uuid-1.4.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="getrandom"' --cfg 'feature="rng"' --cfg 'feature="serde"' --cfg 'feature="std"' --cfg 'feature="v4"' -C metadata=7cc8e87b4149b49e -C extra-filename=-7cc8e87b4149b49e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern getrandom=/work/oxidecomputer/crucible/target/debug/deps/libgetrandom-567199de146d617e.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
436 Compiling tokio v1.32.0
437 Running `rustc --crate-name tokio --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-1.32.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="bytes"' --cfg 'feature="default"' --cfg 'feature="fs"' --cfg 'feature="full"' --cfg 'feature="io-std"' --cfg 'feature="io-util"' --cfg 'feature="libc"' --cfg 'feature="macros"' --cfg 'feature="mio"' --cfg 'feature="net"' --cfg 'feature="num_cpus"' --cfg 'feature="parking_lot"' --cfg 'feature="process"' --cfg 'feature="rt"' --cfg 'feature="rt-multi-thread"' --cfg 'feature="signal"' --cfg 'feature="signal-hook-registry"' --cfg 'feature="socket2"' --cfg 'feature="sync"' --cfg 'feature="test-util"' --cfg 'feature="time"' --cfg 'feature="tokio-macros"' --cfg 'feature="windows-sys"' -C metadata=cf190744403b2ee1 -C extra-filename=-cf190744403b2ee1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern mio=/work/oxidecomputer/crucible/target/debug/deps/libmio-27a8136cf12de2bb.rmeta --extern num_cpus=/work/oxidecomputer/crucible/target/debug/deps/libnum_cpus-67a451bebfcc5086.rmeta --extern parking_lot=/work/oxidecomputer/crucible/target/debug/deps/libparking_lot-970d5c0acece447c.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern signal_hook_registry=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook_registry-4d955479f235827e.rmeta --extern socket2=/work/oxidecomputer/crucible/target/debug/deps/libsocket2-b464b617227db85e.rmeta --extern tokio_macros=/work/oxidecomputer/crucible/target/debug/deps/libtokio_macros-129d27199cb07668.so --cap-lints allow`
438 Compiling http v0.2.9
439 Running `rustc --crate-name http --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/http-0.2.9/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4526ab951518c907 -C extra-filename=-4526ab951518c907 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern fnv=/work/oxidecomputer/crucible/target/debug/deps/libfnv-5641130f60a8056b.rmeta --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --cap-lints allow`
440 Compiling serde_urlencoded v0.7.1
441 Running `rustc --crate-name serde_urlencoded --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_urlencoded-0.7.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=779d3ac41d95557a -C extra-filename=-779d3ac41d95557a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern form_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libform_urlencoded-ef731295a29c9150.rmeta --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --extern ryu=/work/oxidecomputer/crucible/target/debug/deps/libryu-062e5ac4087417b3.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
442 Compiling serde_tokenstream v0.2.0
443 Running `rustc --crate-name serde_tokenstream --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_tokenstream-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=2a83027836bfd0fb -C extra-filename=-2a83027836bfd0fb --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rmeta --cap-lints allow`
444 Compiling serde_yaml v0.9.17
445 Running `rustc --crate-name serde_yaml --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_yaml-0.9.17/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=c7db3f53bbf8134b -C extra-filename=-c7db3f53bbf8134b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --extern ryu=/work/oxidecomputer/crucible/target/debug/deps/libryu-062e5ac4087417b3.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern unsafe_libyaml=/work/oxidecomputer/crucible/target/debug/deps/libunsafe_libyaml-18c99c193ab0b0f5.rmeta --cap-lints allow`
446 Compiling serde_spanned v0.6.3
447 Running `rustc --crate-name serde_spanned --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_spanned-0.6.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="serde"' -C metadata=92d13640a4d5d836 -C extra-filename=-92d13640a4d5d836 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
448 Compiling openapiv3 v1.0.2
449 Running `rustc --crate-name openapiv3 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/openapiv3-1.0.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="skip_serializing_defaults"' -C metadata=f84072cef6d0d68c -C extra-filename=-f84072cef6d0d68c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --cap-lints allow`
450 Compiling toml_datetime v0.6.3
451 Running `rustc --crate-name toml_datetime --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/toml_datetime-0.6.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="serde"' -C metadata=c003080b79dfe49d -C extra-filename=-c003080b79dfe49d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
452 Running `rustc --crate-name usdt_impl --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/usdt-impl-0.3.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="asm"' -C metadata=aa84cb563c29877d -C extra-filename=-aa84cb563c29877d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern byteorder=/work/oxidecomputer/crucible/target/debug/deps/libbyteorder-1306999913c8e1b3.rmeta --extern dof=/work/oxidecomputer/crucible/target/debug/deps/libdof-48f06b38719b0295.rmeta --extern dtrace_parser=/work/oxidecomputer/crucible/target/debug/deps/libdtrace_parser-647a421ab06e4ff3.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern thread_id=/work/oxidecomputer/crucible/target/debug/deps/libthread_id-eaa0d9ff93152533.rmeta --cap-lints allow --cfg usdt_stable_asm --cfg usdt_backend_standard`
453 Compiling toml_edit v0.19.12
454 Running `rustc --crate-name toml_edit --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/toml_edit-0.19.12/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="serde"' -C metadata=29ed73c573391add -C extra-filename=-29ed73c573391add --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-32a62b7926f710bd.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_spanned=/work/oxidecomputer/crucible/target/debug/deps/libserde_spanned-92d13640a4d5d836.rmeta --extern toml_datetime=/work/oxidecomputer/crucible/target/debug/deps/libtoml_datetime-c003080b79dfe49d.rmeta --extern winnow=/work/oxidecomputer/crucible/target/debug/deps/libwinnow-e34c187c773d92ef.rmeta --cap-lints allow`
455 Running `rustc --crate-name schemars --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/schemars-0.8.12/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="bytes"' --cfg 'feature="chrono"' --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="schemars_derive"' --cfg 'feature="uuid1"' -C metadata=83d20014cee5d9b5 -C extra-filename=-83d20014cee5d9b5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern dyn_clone=/work/oxidecomputer/crucible/target/debug/deps/libdyn_clone-6ca330b50294836a.rmeta --extern schemars_derive=/work/oxidecomputer/crucible/target/debug/deps/libschemars_derive-ca659e99c5534de4.so --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern uuid1=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --cap-lints allow --cfg std_atomic64 --cfg std_atomic`
456 Compiling http-body v0.4.4
457 Running `rustc --crate-name http_body --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/http-body-0.4.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bb1d69dd918c127f -C extra-filename=-bb1d69dd918c127f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --cap-lints allow`
458 Running `rustc --crate-name semver --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/semver-1.0.18/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="serde"' --cfg 'feature="std"' -C metadata=8c1c5827befd93e7 -C extra-filename=-8c1c5827befd93e7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
459 Compiling serde_tokenstream v0.1.6
460 Running `rustc --crate-name serde_tokenstream --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_tokenstream-0.1.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=a3f756c2f035b223 -C extra-filename=-a3f756c2f035b223 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rmeta --cap-lints allow`
461 Compiling slog-json v2.6.1
462 Running `rustc --crate-name slog_json --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-json-2.6.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=d8408f8f3a6dd5b7 -C extra-filename=-d8408f8f3a6dd5b7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern time=/work/oxidecomputer/crucible/target/debug/deps/libtime-9b604407a0d52f86.rmeta --cap-lints allow`
463 Compiling usdt-macro v0.3.5
464 Running `rustc --crate-name usdt_macro --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/usdt-macro-0.3.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no --cfg 'feature="asm"' -C metadata=0ea5e72aa4bb61c5 -C extra-filename=-0ea5e72aa4bb61c5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern dtrace_parser=/work/oxidecomputer/crucible/target/debug/deps/libdtrace_parser-647a421ab06e4ff3.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern serde_tokenstream=/work/oxidecomputer/crucible/target/debug/deps/libserde_tokenstream-a3f756c2f035b223.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern usdt_impl=/work/oxidecomputer/crucible/target/debug/deps/libusdt_impl-aa84cb563c29877d.rlib --extern proc_macro --cap-lints allow`
465 Compiling typify-impl v0.0.13 (https://github.com/oxidecomputer/typify#92bfed8b)
466 Running `rustc --crate-name typify_impl --edition=2021 /home/build/.cargo/git/checkouts/typify-288d5a84bbbe6a46/92bfed8/typify-impl/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=5385296f2ea50467 -C extra-filename=-5385296f2ea50467 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern heck=/work/oxidecomputer/crucible/target/debug/deps/libheck-29af75c938b110f7.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern regress=/work/oxidecomputer/crucible/target/debug/deps/libregress-10da65958da1c830.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern unicode_ident=/work/oxidecomputer/crucible/target/debug/deps/libunicode_ident-81e7752fff89e70f.rmeta --cap-lints allow`
467 Compiling usdt-attr-macro v0.3.5
468 Running `rustc --crate-name usdt_attr_macro --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/usdt-attr-macro-0.3.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no --cfg 'feature="asm"' -C metadata=1e49091236cd57b8 -C extra-filename=-1e49091236cd57b8 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern dtrace_parser=/work/oxidecomputer/crucible/target/debug/deps/libdtrace_parser-647a421ab06e4ff3.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern serde_tokenstream=/work/oxidecomputer/crucible/target/debug/deps/libserde_tokenstream-a3f756c2f035b223.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern usdt_impl=/work/oxidecomputer/crucible/target/debug/deps/libusdt_impl-aa84cb563c29877d.rlib --extern proc_macro --cap-lints allow`
469 Compiling slog-bunyan v2.4.0
470 Running `rustc --crate-name slog_bunyan --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-bunyan-2.4.0/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=dce051a6775f1d99 -C extra-filename=-dce051a6775f1d99 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern hostname=/work/oxidecomputer/crucible/target/debug/deps/libhostname-4a0f8b1a56e5681a.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern slog_json=/work/oxidecomputer/crucible/target/debug/deps/libslog_json-d8408f8f3a6dd5b7.rmeta --extern time=/work/oxidecomputer/crucible/target/debug/deps/libtime-9b604407a0d52f86.rmeta --cap-lints allow`
471 Compiling toml v0.7.6
472 Running `rustc --crate-name toml --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/toml-0.7.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="display"' --cfg 'feature="parse"' -C metadata=de0eb3fcc3b95b5c -C extra-filename=-de0eb3fcc3b95b5c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_spanned=/work/oxidecomputer/crucible/target/debug/deps/libserde_spanned-92d13640a4d5d836.rmeta --extern toml_datetime=/work/oxidecomputer/crucible/target/debug/deps/libtoml_datetime-c003080b79dfe49d.rmeta --extern toml_edit=/work/oxidecomputer/crucible/target/debug/deps/libtoml_edit-29ed73c573391add.rmeta --cap-lints allow`
473 Running `/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-69da610a4d2f950f/build-script-build`
474 Running `rustc --crate-name libz_sys --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libz-sys-1.1.8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no --cfg 'feature="libc"' -C metadata=a3111f279c2174e3 -C extra-filename=-a3111f279c2174e3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow -L native=/usr/lib/amd64 -l z`
475 Compiling errno v0.2.8
476 Running `rustc --crate-name errno /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/errno-0.2.8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=af3769d2acbbbf20 -C extra-filename=-af3769d2acbbbf20 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
477 Compiling usdt v0.3.5
478 Running `rustc --crate-name usdt --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/usdt-0.3.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="asm"' --cfg 'feature="default"' --cfg 'feature="dtrace-parser"' -C metadata=86bb76e3b8fcea87 -C extra-filename=-86bb76e3b8fcea87 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern dtrace_parser=/work/oxidecomputer/crucible/target/debug/deps/libdtrace_parser-647a421ab06e4ff3.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern usdt_attr_macro=/work/oxidecomputer/crucible/target/debug/deps/libusdt_attr_macro-1e49091236cd57b8.so --extern usdt_impl=/work/oxidecomputer/crucible/target/debug/deps/libusdt_impl-aa84cb563c29877d.rmeta --extern usdt_macro=/work/oxidecomputer/crucible/target/debug/deps/libusdt_macro-0ea5e72aa4bb61c5.so --cap-lints allow`
479 Compiling utf8parse v0.2.1
480 Compiling vergen v8.2.4
481 Running `rustc --crate-name utf8parse --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/utf8parse-0.2.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=7be01c1e08c14495 -C extra-filename=-7be01c1e08c14495 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
482 Running `rustc --crate-name build_script_build --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/vergen-8.2.4/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="cargo"' --cfg 'feature="default"' --cfg 'feature="git"' --cfg 'feature="git2"' --cfg 'feature="git2-rs"' --cfg 'feature="rustc"' --cfg 'feature="rustc_version"' --cfg 'feature="time"' -C metadata=43caef821611c916 -C extra-filename=-43caef821611c916 --out-dir /work/oxidecomputer/crucible/target/debug/build/vergen-43caef821611c916 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rustversion=/work/oxidecomputer/crucible/target/debug/deps/librustversion-ae2ca065d7ce77a2.so --cap-lints allow`
483 Compiling memoffset v0.7.1
484 Compiling num-integer v0.1.44
485 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/memoffset-0.7.1/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' -C metadata=4019f7a15f0e6805 -C extra-filename=-4019f7a15f0e6805 --out-dir /work/oxidecomputer/crucible/target/debug/build/memoffset-4019f7a15f0e6805 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-b1cec8cc882d2fdd.rlib --cap-lints allow`
486 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-integer-0.1.44/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="std"' -C metadata=b9de1f088f036bd8 -C extra-filename=-b9de1f088f036bd8 --out-dir /work/oxidecomputer/crucible/target/debug/build/num-integer-b9de1f088f036bd8 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-b1cec8cc882d2fdd.rlib --cap-lints allow`
487 Compiling vcpkg v0.2.15
488 Running `rustc --crate-name vcpkg /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/vcpkg-0.2.15/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=768a328be6bdd65b -C extra-filename=-768a328be6bdd65b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
489 Compiling rustc_version v0.4.0
490 Running `rustc --crate-name rustc_version --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustc_version-0.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=201ef6100eba532b -C extra-filename=-201ef6100eba532b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern semver=/work/oxidecomputer/crucible/target/debug/deps/libsemver-8c1c5827befd93e7.rmeta --cap-lints allow`
491 Running `/work/oxidecomputer/crucible/target/debug/build/vergen-43caef821611c916/build-script-build`
492 Compiling rand_core v0.3.1
493 Running `rustc --crate-name rand_core /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_core-0.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=588d7d54223a4bae -C extra-filename=-588d7d54223a4bae --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-cf5252fd1c1f594a.rmeta --cap-lints allow`
494 Compiling slog-dtrace v0.2.3
495 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-dtrace-0.2.3/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=a361b6965b7c4d75 -C extra-filename=-a361b6965b7c4d75 --out-dir /work/oxidecomputer/crucible/target/debug/build/slog-dtrace-a361b6965b7c4d75 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern version_check=/work/oxidecomputer/crucible/target/debug/deps/libversion_check-23142ed5bf6178a0.rlib --cap-lints allow`
496 Running `/work/oxidecomputer/crucible/target/debug/build/memoffset-4019f7a15f0e6805/build-script-build`
497 Running `/work/oxidecomputer/crucible/target/debug/build/num-integer-b9de1f088f036bd8/build-script-build`
498 Compiling static_assertions v1.1.0
499 Running `rustc --crate-name static_assertions /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/static_assertions-1.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=457e7de98121159f -C extra-filename=-457e7de98121159f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
500 Compiling rustix v0.36.5
501 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustix-0.36.5/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="io-lifetimes"' --cfg 'feature="libc"' --cfg 'feature="std"' --cfg 'feature="termios"' --cfg 'feature="use-libc-auxv"' -C metadata=40dacc7b426369ca -C extra-filename=-40dacc7b426369ca --out-dir /work/oxidecomputer/crucible/target/debug/build/rustix-40dacc7b426369ca -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
502 Compiling ident_case v1.0.1
503 Running `rustc --crate-name ident_case /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ident_case-1.0.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=90a2253e7db4d178 -C extra-filename=-90a2253e7db4d178 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
504 Running `rustc --crate-name num_integer /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-integer-0.1.44/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=ada2de842cf2fef0 -C extra-filename=-ada2de842cf2fef0 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow --cfg has_i128`
505 Running `/work/oxidecomputer/crucible/target/debug/build/slog-dtrace-a361b6965b7c4d75/build-script-build`
506 Running `rustc --crate-name memoffset /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/memoffset-0.7.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=7efa3f7abe9851cf -C extra-filename=-7efa3f7abe9851cf --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg tuple_ty --cfg allow_clippy --cfg maybe_uninit --cfg doctests --cfg raw_ref_macros`
507 Compiling darling_core v0.20.1
508 Running `rustc --crate-name darling_core --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/darling_core-0.20.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no --cfg 'feature="strsim"' --cfg 'feature="suggestions"' -C metadata=9504cdbd254aa6a2 -C extra-filename=-9504cdbd254aa6a2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern fnv=/work/oxidecomputer/crucible/target/debug/deps/libfnv-5641130f60a8056b.rmeta --extern ident_case=/work/oxidecomputer/crucible/target/debug/deps/libident_case-90a2253e7db4d178.rmeta --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern strsim=/work/oxidecomputer/crucible/target/debug/deps/libstrsim-3314ed6444b5bb69.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rmeta --cap-lints allow`
509 Compiling tokio-util v0.7.3
510 Running `rustc --crate-name tokio_util --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-util-0.7.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="codec"' --cfg 'feature="default"' --cfg 'feature="io"' --cfg 'feature="tracing"' -C metadata=279b3765a2b5aad1 -C extra-filename=-279b3765a2b5aad1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_sink=/work/oxidecomputer/crucible/target/debug/deps/libfutures_sink-a6d6ed8a846c5f8a.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rmeta --cap-lints allow`
511 Compiling tokio-native-tls v0.3.0
512 Running `rustc --crate-name tokio_native_tls --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-native-tls-0.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=f56aba82a642e205 -C extra-filename=-f56aba82a642e205 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern native_tls=/work/oxidecomputer/crucible/target/debug/deps/libnative_tls-320c05ab5bbd33c9.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --cap-lints allow -L native=/usr/ssl-3/lib/amd64`
513 Running `/work/oxidecomputer/crucible/target/debug/build/rustix-40dacc7b426369ca/build-script-build`
514 Compiling num-bigint v0.2.6
515 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-bigint-0.2.6/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="std"' -C metadata=7d556b8a17e8e692 -C extra-filename=-7d556b8a17e8e692 --out-dir /work/oxidecomputer/crucible/target/debug/build/num-bigint-7d556b8a17e8e692 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-b1cec8cc882d2fdd.rlib --cap-lints allow`
516 Compiling camino v1.1.4
517 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/camino-1.1.4/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="serde"' --cfg 'feature="serde1"' -C metadata=e2fc3fcdbebb7f61 -C extra-filename=-e2fc3fcdbebb7f61 --out-dir /work/oxidecomputer/crucible/target/debug/build/camino-e2fc3fcdbebb7f61 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
518 Compiling libsqlite3-sys v0.26.0
519 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libsqlite3-sys-0.26.0/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="min_sqlite_version_3_14_0"' --cfg 'feature="pkg-config"' --cfg 'feature="vcpkg"' -C metadata=9a39b2f8aed45f72 -C extra-filename=-9a39b2f8aed45f72 --out-dir /work/oxidecomputer/crucible/target/debug/build/libsqlite3-sys-9a39b2f8aed45f72 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern pkg_config=/work/oxidecomputer/crucible/target/debug/deps/libpkg_config-e05c47386f4bdcc0.rlib --extern vcpkg=/work/oxidecomputer/crucible/target/debug/deps/libvcpkg-768a328be6bdd65b.rlib --cap-lints allow`
520 Compiling h2 v0.3.20
521 Running `rustc --crate-name h2 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/h2-0.3.20/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=3e2d8390f23dd48a -C extra-filename=-3e2d8390f23dd48a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern fnv=/work/oxidecomputer/crucible/target/debug/deps/libfnv-5641130f60a8056b.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_sink=/work/oxidecomputer/crucible/target/debug/deps/libfutures_sink-a6d6ed8a846c5f8a.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern slab=/work/oxidecomputer/crucible/target/debug/deps/libslab-5b7c79e345d6363e.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rmeta --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rmeta --cap-lints allow`
522 Compiling semver v0.1.20
523 Running `rustc --crate-name semver /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/semver-0.1.20/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=395e5508bfb3cbcf -C extra-filename=-395e5508bfb3cbcf --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
524 Running `/work/oxidecomputer/crucible/target/debug/build/libsqlite3-sys-9a39b2f8aed45f72/build-script-build`
525 Compiling bitflags v2.3.3
526 Running `rustc --crate-name bitflags --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bitflags-2.3.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=109244799287a8c3 -C extra-filename=-109244799287a8c3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
527 Running `rustc --crate-name libsqlite3_sys --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libsqlite3-sys-0.26.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="min_sqlite_version_3_14_0"' --cfg 'feature="pkg-config"' --cfg 'feature="vcpkg"' -C metadata=d6c034ac2a660d5f -C extra-filename=-d6c034ac2a660d5f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow -L native=/usr/lib/amd64 -l sqlite3`
528 Compiling rustc_version v0.1.7
529 Running `rustc --crate-name rustc_version /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustc_version-0.1.7/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=478fd37c27f4f33b -C extra-filename=-478fd37c27f4f33b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern semver=/work/oxidecomputer/crucible/target/debug/deps/libsemver-395e5508bfb3cbcf.rmeta --cap-lints allow`
530 Running `/work/oxidecomputer/crucible/target/debug/build/camino-e2fc3fcdbebb7f61/build-script-build`
531 Running `/work/oxidecomputer/crucible/target/debug/build/num-bigint-7d556b8a17e8e692/build-script-build`
532 Running `rustc --crate-name rustix --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustix-0.36.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="io-lifetimes"' --cfg 'feature="libc"' --cfg 'feature="std"' --cfg 'feature="termios"' --cfg 'feature="use-libc-auxv"' -C metadata=06529533a7816508 -C extra-filename=-06529533a7816508 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern libc_errno=/work/oxidecomputer/crucible/target/debug/deps/liberrno-af3769d2acbbbf20.rmeta --extern io_lifetimes=/work/oxidecomputer/crucible/target/debug/deps/libio_lifetimes-df7ee936a2a5cbac.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow --cfg libc`
533 Compiling nix v0.26.2
534 Running `rustc --crate-name nix --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/nix-0.26.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="acct"' --cfg 'feature="aio"' --cfg 'feature="default"' --cfg 'feature="dir"' --cfg 'feature="env"' --cfg 'feature="event"' --cfg 'feature="feature"' --cfg 'feature="fs"' --cfg 'feature="hostname"' --cfg 'feature="inotify"' --cfg 'feature="ioctl"' --cfg 'feature="kmod"' --cfg 'feature="memoffset"' --cfg 'feature="mman"' --cfg 'feature="mount"' --cfg 'feature="mqueue"' --cfg 'feature="net"' --cfg 'feature="personality"' --cfg 'feature="pin-utils"' --cfg 'feature="poll"' --cfg 'feature="process"' --cfg 'feature="pthread"' --cfg 'feature="ptrace"' --cfg 'feature="quota"' --cfg 'feature="reboot"' --cfg 'feature="resource"' --cfg 'feature="sched"' --cfg 'feature="signal"' --cfg 'feature="socket"' --cfg 'feature="term"' --cfg 'feature="time"' --cfg 'feature="ucontext"' --cfg 'feature="uio"' --cfg 'feature="user"' --cfg 'feature="zerocopy"' -C metadata=3635aff9412bf811 -C extra-filename=-3635aff9412bf811 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern memoffset=/work/oxidecomputer/crucible/target/debug/deps/libmemoffset-7efa3f7abe9851cf.rmeta --extern pin_utils=/work/oxidecomputer/crucible/target/debug/deps/libpin_utils-bcfb754cd1ab8c67.rmeta --extern static_assertions=/work/oxidecomputer/crucible/target/debug/deps/libstatic_assertions-457e7de98121159f.rmeta --cap-lints allow`
535 Running `rustc --crate-name slog_dtrace --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-dtrace-0.2.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=8024beacfb95325b -C extra-filename=-8024beacfb95325b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rmeta --cap-lints allow`
536 Compiling anstyle-parse v0.2.0
537 Running `rustc --crate-name anstyle_parse --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/anstyle-parse-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="utf8"' -C metadata=1cddf38a13db6213 -C extra-filename=-1cddf38a13db6213 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern utf8parse=/work/oxidecomputer/crucible/target/debug/deps/libutf8parse-7be01c1e08c14495.rmeta --cap-lints allow`
538 Compiling rand_pcg v0.1.2
539 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_pcg-0.1.2/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=0d95134e750a04ec -C extra-filename=-0d95134e750a04ec --out-dir /work/oxidecomputer/crucible/target/debug/build/rand_pcg-0d95134e750a04ec -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-0a34a0aae5d9487a.rlib --cap-lints allow`
540 Compiling rand_chacha v0.1.1
541 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_chacha-0.1.1/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=f5c658d3512bc715 -C extra-filename=-f5c658d3512bc715 --out-dir /work/oxidecomputer/crucible/target/debug/build/rand_chacha-f5c658d3512bc715 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-0a34a0aae5d9487a.rlib --cap-lints allow`
542 Compiling async-stream-impl v0.3.5
543 Running `rustc --crate-name async_stream_impl --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/async-stream-impl-0.3.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=499322f076b313bc -C extra-filename=-499322f076b313bc --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
544 Compiling hashlink v0.8.0
545 Running `rustc --crate-name hashlink --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hashlink-0.8.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=20d837d226474f7f -C extra-filename=-20d837d226474f7f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern hashbrown=/work/oxidecomputer/crucible/target/debug/deps/libhashbrown-3ad6614047c487f9.rmeta --cap-lints allow`
546 Compiling num-rational v0.2.4
547 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-rational-0.2.4/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="bigint"' --cfg 'feature="num-bigint"' --cfg 'feature="std"' -C metadata=ff937c196b938c41 -C extra-filename=-ff937c196b938c41 --out-dir /work/oxidecomputer/crucible/target/debug/build/num-rational-ff937c196b938c41 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-b1cec8cc882d2fdd.rlib --cap-lints allow`
548 Compiling num-complex v0.2.4
549 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-complex-0.2.4/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="std"' -C metadata=4def68cf859a947c -C extra-filename=-4def68cf859a947c --out-dir /work/oxidecomputer/crucible/target/debug/build/num-complex-4def68cf859a947c -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-b1cec8cc882d2fdd.rlib --cap-lints allow`
550 Compiling num-iter v0.1.42
551 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-iter-0.1.42/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="std"' -C metadata=4bc024ee16b2fd63 -C extra-filename=-4bc024ee16b2fd63 --out-dir /work/oxidecomputer/crucible/target/debug/build/num-iter-4bc024ee16b2fd63 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-b1cec8cc882d2fdd.rlib --cap-lints allow`
552 Compiling anstyle-query v1.0.0
553 Running `rustc --crate-name anstyle_query --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/anstyle-query-1.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=f6b83c0a54455ea0 -C extra-filename=-f6b83c0a54455ea0 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
554 Compiling fallible-streaming-iterator v0.1.9
555 Running `rustc --crate-name fallible_streaming_iterator /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/fallible-streaming-iterator-0.1.9/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=95608b7ccd5e88f6 -C extra-filename=-95608b7ccd5e88f6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
556 Compiling colorchoice v1.0.0
557 Running `rustc --crate-name colorchoice --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/colorchoice-1.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=88cb686aa85d1c3b -C extra-filename=-88cb686aa85d1c3b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
558 Compiling paste v1.0.14
559 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/paste-1.0.14/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=9a388a178f36d25e -C extra-filename=-9a388a178f36d25e --out-dir /work/oxidecomputer/crucible/target/debug/build/paste-9a388a178f36d25e -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
560 Compiling anstyle v1.0.0
561 Running `rustc --crate-name anstyle --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/anstyle-1.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=b22d2b8ea1e30552 -C extra-filename=-b22d2b8ea1e30552 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
562 Compiling rusqlite v0.29.0
563 Running `rustc --crate-name rusqlite --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rusqlite-0.29.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e4d2316a88b06837 -C extra-filename=-e4d2316a88b06837 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-109244799287a8c3.rmeta --extern fallible_iterator=/work/oxidecomputer/crucible/target/debug/deps/libfallible_iterator-2f475e2aff163c98.rmeta --extern fallible_streaming_iterator=/work/oxidecomputer/crucible/target/debug/deps/libfallible_streaming_iterator-95608b7ccd5e88f6.rmeta --extern hashlink=/work/oxidecomputer/crucible/target/debug/deps/libhashlink-20d837d226474f7f.rmeta --extern libsqlite3_sys=/work/oxidecomputer/crucible/target/debug/deps/liblibsqlite3_sys-d6c034ac2a660d5f.rmeta --extern smallvec=/work/oxidecomputer/crucible/target/debug/deps/libsmallvec-397f26bd8c84e528.rmeta --cap-lints allow -L native=/usr/lib/amd64`
564 Compiling darling_macro v0.20.1
565 Running `rustc --crate-name darling_macro --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/darling_macro-0.20.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=9054a39daee42f78 -C extra-filename=-9054a39daee42f78 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern darling_core=/work/oxidecomputer/crucible/target/debug/deps/libdarling_core-9504cdbd254aa6a2.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
566 Compiling hyper v0.14.27
567 Running `rustc --crate-name hyper --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hyper-0.14.27/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="client"' --cfg 'feature="default"' --cfg 'feature="full"' --cfg 'feature="h2"' --cfg 'feature="http1"' --cfg 'feature="http2"' --cfg 'feature="runtime"' --cfg 'feature="server"' --cfg 'feature="socket2"' --cfg 'feature="stream"' --cfg 'feature="tcp"' -C metadata=dad943d3b7cc33e9 -C extra-filename=-dad943d3b7cc33e9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern futures_channel=/work/oxidecomputer/crucible/target/debug/deps/libfutures_channel-34a7a018f58dc5a7.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern h2=/work/oxidecomputer/crucible/target/debug/deps/libh2-3e2d8390f23dd48a.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern http_body=/work/oxidecomputer/crucible/target/debug/deps/libhttp_body-bb1d69dd918c127f.rmeta --extern httparse=/work/oxidecomputer/crucible/target/debug/deps/libhttparse-59406412a39ce707.rmeta --extern httpdate=/work/oxidecomputer/crucible/target/debug/deps/libhttpdate-e3ef82a990113a54.rmeta --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern socket2=/work/oxidecomputer/crucible/target/debug/deps/libsocket2-3c3e3607c1c6d64e.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tower_service=/work/oxidecomputer/crucible/target/debug/deps/libtower_service-51da71f2ad5117ee.rmeta --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rmeta --extern want=/work/oxidecomputer/crucible/target/debug/deps/libwant-5737a0d118420ef7.rmeta --cap-lints allow`
568 Running `/work/oxidecomputer/crucible/target/debug/build/paste-9a388a178f36d25e/build-script-build`
569 Compiling anstream v0.5.0
570 Running `rustc --crate-name anstream --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/anstream-0.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="auto"' --cfg 'feature="default"' --cfg 'feature="wincon"' -C metadata=ca70a88d33d17944 -C extra-filename=-ca70a88d33d17944 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anstyle=/work/oxidecomputer/crucible/target/debug/deps/libanstyle-b22d2b8ea1e30552.rmeta --extern anstyle_parse=/work/oxidecomputer/crucible/target/debug/deps/libanstyle_parse-1cddf38a13db6213.rmeta --extern anstyle_query=/work/oxidecomputer/crucible/target/debug/deps/libanstyle_query-f6b83c0a54455ea0.rmeta --extern colorchoice=/work/oxidecomputer/crucible/target/debug/deps/libcolorchoice-88cb686aa85d1c3b.rmeta --extern utf8parse=/work/oxidecomputer/crucible/target/debug/deps/libutf8parse-7be01c1e08c14495.rmeta --cap-lints allow`
571 Compiling async-stream v0.3.5
572 Running `rustc --crate-name async_stream --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/async-stream-0.3.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0486f21173e73f9c -C extra-filename=-0486f21173e73f9c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern async_stream_impl=/work/oxidecomputer/crucible/target/debug/deps/libasync_stream_impl-499322f076b313bc.so --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --cap-lints allow`
573 Compiling darling v0.20.1
574 Running `rustc --crate-name darling --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/darling-0.20.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="suggestions"' -C metadata=00ecdd47ee30dd62 -C extra-filename=-00ecdd47ee30dd62 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern darling_core=/work/oxidecomputer/crucible/target/debug/deps/libdarling_core-9504cdbd254aa6a2.rmeta --extern darling_macro=/work/oxidecomputer/crucible/target/debug/deps/libdarling_macro-9054a39daee42f78.so --cap-lints allow`
575 Running `/work/oxidecomputer/crucible/target/debug/build/num-iter-4bc024ee16b2fd63/build-script-build`
576 Running `/work/oxidecomputer/crucible/target/debug/build/num-complex-4def68cf859a947c/build-script-build`
577 Running `/work/oxidecomputer/crucible/target/debug/build/num-rational-ff937c196b938c41/build-script-build`
578 Running `/work/oxidecomputer/crucible/target/debug/build/rand_chacha-f5c658d3512bc715/build-script-build`
579 Running `/work/oxidecomputer/crucible/target/debug/build/rand_pcg-0d95134e750a04ec/build-script-build`
580 Compiling terminal_size v0.2.5
581 Running `rustc --crate-name terminal_size --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/terminal_size-0.2.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0b293d2f7e4e73b7 -C extra-filename=-0b293d2f7e4e73b7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rustix=/work/oxidecomputer/crucible/target/debug/deps/librustix-06529533a7816508.rmeta --cap-lints allow`
582 Running `rustc --crate-name num_bigint /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-bigint-0.2.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=fb487451ba2d3918 -C extra-filename=-fb487451ba2d3918 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_integer=/work/oxidecomputer/crucible/target/debug/deps/libnum_integer-ada2de842cf2fef0.rmeta --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow --cfg has_i128`
583 Compiling newtype_derive v0.1.6
584 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/newtype_derive-0.1.6/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=b4faed40373b7a32 -C extra-filename=-b4faed40373b7a32 --out-dir /work/oxidecomputer/crucible/target/debug/build/newtype_derive-b4faed40373b7a32 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rustc_version=/work/oxidecomputer/crucible/target/debug/deps/librustc_version-478fd37c27f4f33b.rlib --cap-lints allow`
585 Running `rustc --crate-name camino --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/camino-1.1.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="serde"' --cfg 'feature="serde1"' -C metadata=45f0f4a2c258f934 -C extra-filename=-45f0f4a2c258f934 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow --cfg path_buf_capacity --cfg shrink_to --cfg try_reserve_2 --cfg path_buf_deref_mut`
586 Compiling twox-hash v1.6.3
587 Running `rustc --crate-name twox_hash --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/twox-hash-1.6.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="rand"' --cfg 'feature="std"' -C metadata=9f5dd4f7319ca539 -C extra-filename=-9f5dd4f7319ca539 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern static_assertions=/work/oxidecomputer/crucible/target/debug/deps/libstatic_assertions-457e7de98121159f.rmeta --cap-lints allow`
588 Compiling hex v0.4.3
589 Running `rustc --crate-name hex --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hex-0.4.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="serde"' --cfg 'feature="std"' -C metadata=6531b11cb72de3e5 -C extra-filename=-6531b11cb72de3e5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
590 Compiling sha2 v0.10.2
591 Running `rustc --crate-name sha2 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/sha2-0.10.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=9b09b4b286e2cb62 -C extra-filename=-9b09b4b286e2cb62 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern cpufeatures=/work/oxidecomputer/crucible/target/debug/deps/libcpufeatures-6e25ef20a8fa218f.rmeta --extern digest=/work/oxidecomputer/crucible/target/debug/deps/libdigest-d0d34473efe105d1.rmeta --cap-lints allow`
592 Compiling rand v0.6.5
593 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand-0.6.5/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="rand_os"' --cfg 'feature="std"' -C metadata=f0b68220a1e4759e -C extra-filename=-f0b68220a1e4759e --out-dir /work/oxidecomputer/crucible/target/debug/build/rand-f0b68220a1e4759e -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-0a34a0aae5d9487a.rlib --cap-lints allow`
594 Compiling md-5 v0.10.1
595 Running `rustc --crate-name md5 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/md-5-0.10.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=161e8167d58df2d6 -C extra-filename=-161e8167d58df2d6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern digest=/work/oxidecomputer/crucible/target/debug/deps/libdigest-d0d34473efe105d1.rmeta --cap-lints allow`
596 Compiling hmac v0.12.1
597 Running `rustc --crate-name hmac --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hmac-0.12.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=dbc6bea790b908e1 -C extra-filename=-dbc6bea790b908e1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern digest=/work/oxidecomputer/crucible/target/debug/deps/libdigest-d0d34473efe105d1.rmeta --cap-lints allow`
598 Compiling structmeta-derive v0.1.5
599 Running `rustc --crate-name structmeta_derive --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/structmeta-derive-0.1.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=fab85507a465b1dd -C extra-filename=-fab85507a465b1dd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern proc_macro --cap-lints allow`
600 Compiling stringprep v0.1.2
601 Running `rustc --crate-name stringprep /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/stringprep-0.1.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=ca9dbc365b4e987e -C extra-filename=-ca9dbc365b4e987e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern unicode_bidi=/work/oxidecomputer/crucible/target/debug/deps/libunicode_bidi-2c825f5beb05a037.rmeta --extern unicode_normalization=/work/oxidecomputer/crucible/target/debug/deps/libunicode_normalization-3df8261a03d4248e.rmeta --cap-lints allow`
602 Compiling dropshot v0.9.1-dev (https://github.com/oxidecomputer/dropshot?branch=main#aca6de3c)
603 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/git/checkouts/dropshot-a4a923d29dccc492/aca6de3/dropshot/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="usdt"' --cfg 'feature="usdt-probes"' -C metadata=713c18277b5bcf4b -C extra-filename=-713c18277b5bcf4b --out-dir /work/oxidecomputer/crucible/target/debug/build/dropshot-713c18277b5bcf4b -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern version_check=/work/oxidecomputer/crucible/target/debug/deps/libversion_check-23142ed5bf6178a0.rlib --cap-lints allow`
604 Compiling regex-syntax v0.6.28
605 Running `rustc --crate-name regex_syntax --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/regex-syntax-0.6.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="unicode"' --cfg 'feature="unicode-age"' --cfg 'feature="unicode-bool"' --cfg 'feature="unicode-case"' --cfg 'feature="unicode-gencat"' --cfg 'feature="unicode-perl"' --cfg 'feature="unicode-script"' --cfg 'feature="unicode-segment"' -C metadata=b9593ef3338880de -C extra-filename=-b9593ef3338880de --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
606 Compiling clap_lex v0.5.0
607 Running `rustc --crate-name clap_lex --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/clap_lex-0.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=cd6fc5d5239978f4 -C extra-filename=-cd6fc5d5239978f4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
608 Compiling atomic-waker v1.1.1
609 Running `rustc --crate-name atomic_waker --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/atomic-waker-1.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=2b745687caafccb6 -C extra-filename=-2b745687caafccb6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
610 Compiling base64 v0.13.1
611 Running `rustc --crate-name base64 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/base64-0.13.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=480956047b7063a4 -C extra-filename=-480956047b7063a4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
612 Compiling waitgroup v0.1.2
613 Running `rustc --crate-name waitgroup --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/waitgroup-0.1.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=db859ead02bd709e -C extra-filename=-db859ead02bd709e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern atomic_waker=/work/oxidecomputer/crucible/target/debug/deps/libatomic_waker-2b745687caafccb6.rmeta --cap-lints allow`
614 Compiling hyper-tls v0.5.0
615 Running `rustc --crate-name hyper_tls --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hyper-tls-0.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=2dbf57f91f681e2c -C extra-filename=-2dbf57f91f681e2c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern native_tls=/work/oxidecomputer/crucible/target/debug/deps/libnative_tls-320c05ab5bbd33c9.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_native_tls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_native_tls-f56aba82a642e205.rmeta --cap-lints allow -L native=/usr/ssl-3/lib/amd64`
616 Compiling postgres-protocol v0.6.4
617 Running `rustc --crate-name postgres_protocol --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/postgres-protocol-0.6.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=2544e50ad0920d44 -C extra-filename=-2544e50ad0920d44 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-480956047b7063a4.rmeta --extern byteorder=/work/oxidecomputer/crucible/target/debug/deps/libbyteorder-1306999913c8e1b3.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern fallible_iterator=/work/oxidecomputer/crucible/target/debug/deps/libfallible_iterator-2f475e2aff163c98.rmeta --extern hmac=/work/oxidecomputer/crucible/target/debug/deps/libhmac-dbc6bea790b908e1.rmeta --extern md5=/work/oxidecomputer/crucible/target/debug/deps/libmd5-161e8167d58df2d6.rmeta --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rmeta --extern stringprep=/work/oxidecomputer/crucible/target/debug/deps/libstringprep-ca9dbc365b4e987e.rmeta --cap-lints allow`
618 Compiling clap_builder v4.4.0
619 Running `rustc --crate-name clap_builder --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/clap_builder-4.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="color"' --cfg 'feature="env"' --cfg 'feature="error-context"' --cfg 'feature="help"' --cfg 'feature="std"' --cfg 'feature="suggestions"' --cfg 'feature="usage"' --cfg 'feature="wrap_help"' -C metadata=f6e4b2a8b78f1399 -C extra-filename=-f6e4b2a8b78f1399 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anstream=/work/oxidecomputer/crucible/target/debug/deps/libanstream-ca70a88d33d17944.rmeta --extern anstyle=/work/oxidecomputer/crucible/target/debug/deps/libanstyle-b22d2b8ea1e30552.rmeta --extern clap_lex=/work/oxidecomputer/crucible/target/debug/deps/libclap_lex-cd6fc5d5239978f4.rmeta --extern strsim=/work/oxidecomputer/crucible/target/debug/deps/libstrsim-3314ed6444b5bb69.rmeta --extern terminal_size=/work/oxidecomputer/crucible/target/debug/deps/libterminal_size-0b293d2f7e4e73b7.rmeta --cap-lints allow`
620 Running `/work/oxidecomputer/crucible/target/debug/build/dropshot-713c18277b5bcf4b/build-script-build`
621 Running `rustc --crate-name num_rational /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-rational-0.2.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="bigint"' --cfg 'feature="num-bigint"' --cfg 'feature="std"' -C metadata=ff1997054aaa4c62 -C extra-filename=-ff1997054aaa4c62 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_bigint=/work/oxidecomputer/crucible/target/debug/deps/libnum_bigint-fb487451ba2d3918.rmeta --extern num_integer=/work/oxidecomputer/crucible/target/debug/deps/libnum_integer-ada2de842cf2fef0.rmeta --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow --cfg has_i128 --cfg has_const_fn`
622 Running `/work/oxidecomputer/crucible/target/debug/build/rand-f0b68220a1e4759e/build-script-build`
623 Running `/work/oxidecomputer/crucible/target/debug/build/newtype_derive-b4faed40373b7a32/build-script-build`
624 Running `rustc --crate-name rand_pcg /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_pcg-0.1.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=64095680c19f1d75 -C extra-filename=-64095680c19f1d75 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-cf5252fd1c1f594a.rmeta --cap-lints allow --cfg rustc_1_26`
625 Running `rustc --crate-name rand_chacha /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_chacha-0.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=60a86e6d4f82a03e -C extra-filename=-60a86e6d4f82a03e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-588d7d54223a4bae.rmeta --cap-lints allow --cfg rustc_1_26`
626 Running `rustc --crate-name num_complex /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-complex-0.2.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=b0ad3f1350de6722 -C extra-filename=-b0ad3f1350de6722 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow --cfg has_i128 --cfg has_const_fn`
627 Running `rustc --crate-name num_iter /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-iter-0.1.42/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=ea5f21eb64e222e3 -C extra-filename=-ea5f21eb64e222e3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_integer=/work/oxidecomputer/crucible/target/debug/deps/libnum_integer-ada2de842cf2fef0.rmeta --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow --cfg has_i128`
628 Compiling structmeta v0.1.5
629 Running `rustc --crate-name structmeta --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/structmeta-0.1.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=478a2e76026e2d21 -C extra-filename=-478a2e76026e2d21 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern structmeta_derive=/work/oxidecomputer/crucible/target/debug/deps/libstructmeta_derive-fab85507a465b1dd.so --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rmeta --cap-lints allow`
630 Compiling serde_with_macros v2.3.3
631 Running `rustc --crate-name serde_with_macros --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_with_macros-2.3.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=0356ef5a55ed4c76 -C extra-filename=-0356ef5a55ed4c76 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern darling=/work/oxidecomputer/crucible/target/debug/deps/libdarling-00ecdd47ee30dd62.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
632 Running `rustc --crate-name paste --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/paste-1.0.14/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=251489637fc3d2bc -C extra-filename=-251489637fc3d2bc --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro --cap-lints allow`
633 Compiling rand_xorshift v0.1.1
634 Running `rustc --crate-name rand_xorshift /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_xorshift-0.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=516db51379fddd21 -C extra-filename=-516db51379fddd21 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-588d7d54223a4bae.rmeta --cap-lints allow`
635 Compiling rand_hc v0.1.0
636 Running `rustc --crate-name rand_hc /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_hc-0.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e624ce70fff78cce -C extra-filename=-e624ce70fff78cce --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-588d7d54223a4bae.rmeta --cap-lints allow`
637 Compiling rand_isaac v0.1.1
638 Running `rustc --crate-name rand_isaac /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_isaac-0.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=2a69a705c11dd4e8 -C extra-filename=-2a69a705c11dd4e8 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-588d7d54223a4bae.rmeta --cap-lints allow`
639 Compiling dropshot_endpoint v0.9.1-dev (https://github.com/oxidecomputer/dropshot?branch=main#aca6de3c)
640 Running `rustc --crate-name dropshot_endpoint --edition=2018 /home/build/.cargo/git/checkouts/dropshot-a4a923d29dccc492/aca6de3/dropshot_endpoint/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=1ff3a3dd0352c250 -C extra-filename=-1ff3a3dd0352c250 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_tokenstream=/work/oxidecomputer/crucible/target/debug/deps/libserde_tokenstream-2a83027836bfd0fb.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
641 Compiling rand_jitter v0.1.4
642 Running `rustc --crate-name rand_jitter /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_jitter-0.1.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=4375ed928e3b252c -C extra-filename=-4375ed928e3b252c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-cf5252fd1c1f594a.rmeta --cap-lints allow`
643 Compiling rand_os v0.1.3
644 Running `rustc --crate-name rand_os /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_os-0.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=5662c3e15991676f -C extra-filename=-5662c3e15991676f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-cf5252fd1c1f594a.rmeta --cap-lints allow`
645 Compiling serde_path_to_error v0.1.14
646 Running `rustc --crate-name serde_path_to_error --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_path_to_error-0.1.14/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=f8fffad8b554a310 -C extra-filename=-f8fffad8b554a310 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
647 Compiling sha1 v0.10.5
648 Running `rustc --crate-name sha1 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/sha1-0.10.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=8fdbd7a715f3bef1 -C extra-filename=-8fdbd7a715f3bef1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern cpufeatures=/work/oxidecomputer/crucible/target/debug/deps/libcpufeatures-6e25ef20a8fa218f.rmeta --extern digest=/work/oxidecomputer/crucible/target/debug/deps/libdigest-d0d34473efe105d1.rmeta --cap-lints allow`
649 Compiling phf_shared v0.10.0
650 Running `rustc --crate-name phf_shared --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/phf_shared-0.10.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=ded46b90978b786e -C extra-filename=-ded46b90978b786e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern siphasher=/work/oxidecomputer/crucible/target/debug/deps/libsiphasher-2eb27360f66646f3.rmeta --cap-lints allow`
651 Compiling strum_macros v0.25.2
652 Running `rustc --crate-name strum_macros --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/strum_macros-0.25.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=bc907f623478289d -C extra-filename=-bc907f623478289d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern heck=/work/oxidecomputer/crucible/target/debug/deps/libheck-29af75c938b110f7.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern rustversion=/work/oxidecomputer/crucible/target/debug/deps/librustversion-ae2ca065d7ce77a2.so --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
653 Compiling clap_derive v4.4.0
654 Running `rustc --crate-name clap_derive --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/clap_derive-4.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no --cfg 'feature="default"' -C metadata=09787059b4668753 -C extra-filename=-09787059b4668753 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern heck=/work/oxidecomputer/crucible/target/debug/deps/libheck-29af75c938b110f7.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
655 Compiling debug-ignore v1.0.5
656 Running `rustc --crate-name debug_ignore /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/debug-ignore-1.0.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=2303f500fcbc7093 -C extra-filename=-2303f500fcbc7093 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
657 Compiling either v1.6.1
658 Running `rustc --crate-name either /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/either-1.6.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="use_std"' -C metadata=e075e5427abad613 -C extra-filename=-e075e5427abad613 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
659 Compiling fixedbitset v0.4.1
660 Running `rustc --crate-name fixedbitset /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/fixedbitset-0.4.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=c5fa142e7e51f766 -C extra-filename=-c5fa142e7e51f766 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
661 Compiling serde_with v2.3.3
662 Running `rustc --crate-name serde_with --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_with-2.3.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="macros"' --cfg 'feature="std"' -C metadata=4f9ddd30b380d6cf -C extra-filename=-4f9ddd30b380d6cf --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_with_macros=/work/oxidecomputer/crucible/target/debug/deps/libserde_with_macros-0356ef5a55ed4c76.so --cap-lints allow`
663 Compiling petgraph v0.6.3
664 Running `rustc --crate-name petgraph --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/petgraph-0.6.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="graphmap"' --cfg 'feature="matrix_graph"' --cfg 'feature="serde"' --cfg 'feature="serde-1"' --cfg 'feature="serde_derive"' --cfg 'feature="stable_graph"' -C metadata=8e9556a1882bb1d5 -C extra-filename=-8e9556a1882bb1d5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern fixedbitset=/work/oxidecomputer/crucible/target/debug/deps/libfixedbitset-c5fa142e7e51f766.rmeta --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_derive=/work/oxidecomputer/crucible/target/debug/deps/libserde_derive-3441a1f9756a6d5b.so --cap-lints allow`
665 Compiling phf v0.10.1
666 Running `rustc --crate-name phf --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/phf-0.10.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=692e1d997f5b6ce1 -C extra-filename=-692e1d997f5b6ce1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern phf_shared=/work/oxidecomputer/crucible/target/debug/deps/libphf_shared-ded46b90978b786e.rmeta --cap-lints allow`
667 Running `rustc --crate-name rand /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand-0.6.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="rand_os"' --cfg 'feature="std"' -C metadata=d54af7a747f291ab -C extra-filename=-d54af7a747f291ab --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-60a86e6d4f82a03e.rmeta --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-cf5252fd1c1f594a.rmeta --extern rand_hc=/work/oxidecomputer/crucible/target/debug/deps/librand_hc-e624ce70fff78cce.rmeta --extern rand_isaac=/work/oxidecomputer/crucible/target/debug/deps/librand_isaac-2a69a705c11dd4e8.rmeta --extern rand_jitter=/work/oxidecomputer/crucible/target/debug/deps/librand_jitter-4375ed928e3b252c.rmeta --extern rand_os=/work/oxidecomputer/crucible/target/debug/deps/librand_os-5662c3e15991676f.rmeta --extern rand_pcg=/work/oxidecomputer/crucible/target/debug/deps/librand_pcg-64095680c19f1d75.rmeta --extern rand_xorshift=/work/oxidecomputer/crucible/target/debug/deps/librand_xorshift-516db51379fddd21.rmeta --cap-lints allow --cfg rustc_1_25 --cfg rustc_1_26 --cfg rustc_1_27`
668 Compiling parse-display-derive v0.7.0
669 Running `rustc --crate-name parse_display_derive --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/parse-display-derive-0.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=95e2e5d955c96bcf -C extra-filename=-95e2e5d955c96bcf --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern regex=/work/oxidecomputer/crucible/target/debug/deps/libregex-f9e3a4eb3da387ce.rlib --extern regex_syntax=/work/oxidecomputer/crucible/target/debug/deps/libregex_syntax-b9593ef3338880de.rlib --extern structmeta=/work/oxidecomputer/crucible/target/debug/deps/libstructmeta-478a2e76026e2d21.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern proc_macro --cap-lints allow`
670 Compiling num v0.2.1
671 Running `rustc --crate-name num /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-0.2.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="num-bigint"' --cfg 'feature="std"' -C metadata=1148753de5cf68fd -C extra-filename=-1148753de5cf68fd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_bigint=/work/oxidecomputer/crucible/target/debug/deps/libnum_bigint-fb487451ba2d3918.rmeta --extern num_complex=/work/oxidecomputer/crucible/target/debug/deps/libnum_complex-b0ad3f1350de6722.rmeta --extern num_integer=/work/oxidecomputer/crucible/target/debug/deps/libnum_integer-ada2de842cf2fef0.rmeta --extern num_iter=/work/oxidecomputer/crucible/target/debug/deps/libnum_iter-ea5f21eb64e222e3.rmeta --extern num_rational=/work/oxidecomputer/crucible/target/debug/deps/libnum_rational-ff1997054aaa4c62.rmeta --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow`
672 Running `rustc --crate-name newtype_derive /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/newtype_derive-0.1.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=fedf432d6b5251a4 -C extra-filename=-fedf432d6b5251a4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg op_assign`
673 Compiling strum v0.25.0
674 Running `rustc --crate-name strum --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/strum-0.25.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="std"' --cfg 'feature="strum_macros"' -C metadata=59ea3c6704348e58 -C extra-filename=-59ea3c6704348e58 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern strum_macros=/work/oxidecomputer/crucible/target/debug/deps/libstrum_macros-bc907f623478289d.so --cap-lints allow`
675 Compiling postgres-types v0.2.3
676 Running `rustc --crate-name postgres_types --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/postgres-types-0.2.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="chrono-04"' --cfg 'feature="uuid-1"' --cfg 'feature="with-chrono-0_4"' --cfg 'feature="with-uuid-1"' -C metadata=3d7a8ed523cd84fa -C extra-filename=-3d7a8ed523cd84fa --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern chrono_04=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern fallible_iterator=/work/oxidecomputer/crucible/target/debug/deps/libfallible_iterator-2f475e2aff163c98.rmeta --extern postgres_protocol=/work/oxidecomputer/crucible/target/debug/deps/libpostgres_protocol-2544e50ad0920d44.rmeta --extern uuid_1=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --cap-lints allow`
677 Compiling proc-macro-crate v1.3.1
678 Running `rustc --crate-name proc_macro_crate --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/proc-macro-crate-1.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=3935aefc56866ee2 -C extra-filename=-3935aefc56866ee2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern toml_edit=/work/oxidecomputer/crucible/target/debug/deps/libtoml_edit-29ed73c573391add.rmeta --cap-lints allow`
679 Compiling instant v0.1.12
680 Running `rustc --crate-name instant --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/instant-0.1.12/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=3f52fffb2a7c0a97 -C extra-filename=-3f52fffb2a7c0a97 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --cap-lints allow`
681 Compiling clap v4.4.0
682 Running `rustc --crate-name clap --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/clap-4.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="color"' --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="env"' --cfg 'feature="error-context"' --cfg 'feature="help"' --cfg 'feature="std"' --cfg 'feature="suggestions"' --cfg 'feature="usage"' --cfg 'feature="wrap_help"' -C metadata=49bc17aade028e79 -C extra-filename=-49bc17aade028e79 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern clap_builder=/work/oxidecomputer/crucible/target/debug/deps/libclap_builder-f6e4b2a8b78f1399.rmeta --extern clap_derive=/work/oxidecomputer/crucible/target/debug/deps/libclap_derive-09787059b4668753.so --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --cap-lints allow`
683 Compiling backoff v0.4.0
684 Running `rustc --crate-name backoff --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/backoff-0.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="futures"' --cfg 'feature="futures-core"' --cfg 'feature="pin-project-lite"' --cfg 'feature="tokio"' --cfg 'feature="tokio_1"' -C metadata=2bc4a2fd075cf434 -C extra-filename=-2bc4a2fd075cf434 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern getrandom=/work/oxidecomputer/crucible/target/debug/deps/libgetrandom-567199de146d617e.rmeta --extern instant=/work/oxidecomputer/crucible/target/debug/deps/libinstant-3f52fffb2a7c0a97.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern tokio_1=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --cap-lints allow`
685 Compiling tokio-postgres v0.7.6
686 Running `rustc --crate-name tokio_postgres --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-postgres-0.7.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="runtime"' --cfg 'feature="with-chrono-0_4"' --cfg 'feature="with-uuid-1"' -C metadata=5628b93feb58339b -C extra-filename=-5628b93feb58339b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern byteorder=/work/oxidecomputer/crucible/target/debug/deps/libbyteorder-1306999913c8e1b3.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern fallible_iterator=/work/oxidecomputer/crucible/target/debug/deps/libfallible_iterator-2f475e2aff163c98.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern parking_lot=/work/oxidecomputer/crucible/target/debug/deps/libparking_lot-970d5c0acece447c.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern phf=/work/oxidecomputer/crucible/target/debug/deps/libphf-692e1d997f5b6ce1.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern postgres_protocol=/work/oxidecomputer/crucible/target/debug/deps/libpostgres_protocol-2544e50ad0920d44.rmeta --extern postgres_types=/work/oxidecomputer/crucible/target/debug/deps/libpostgres_types-3d7a8ed523cd84fa.rmeta --extern socket2=/work/oxidecomputer/crucible/target/debug/deps/libsocket2-3c3e3607c1c6d64e.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rmeta --cap-lints allow`
687 Compiling steno v0.4.0
688 Running `rustc --crate-name steno --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/steno-0.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=d1d3ce30296926ad -C extra-filename=-d1d3ce30296926ad --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --extern newtype_derive=/work/oxidecomputer/crucible/target/debug/deps/libnewtype_derive-fedf432d6b5251a4.rmeta --extern petgraph=/work/oxidecomputer/crucible/target/debug/deps/libpetgraph-8e9556a1882bb1d5.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --cap-lints allow`
689 Compiling num_enum_derive v0.7.0
690 Running `rustc --crate-name num_enum_derive --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num_enum_derive-0.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no --cfg 'feature="proc-macro-crate"' --cfg 'feature="std"' -C metadata=2bef02e01d5b06d3 -C extra-filename=-2bef02e01d5b06d3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro_crate=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro_crate-3935aefc56866ee2.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
691 Compiling statistical v1.0.0
692 Running `rustc --crate-name statistical /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/statistical-1.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=f2809857d7fe1c91 -C extra-filename=-f2809857d7fe1c91 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num=/work/oxidecomputer/crucible/target/debug/deps/libnum-1148753de5cf68fd.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-d54af7a747f291ab.rmeta --cap-lints allow`
693 Compiling serde_human_bytes v0.1.0 (http://github.com/oxidecomputer/serde_human_bytes?branch=main#0a097945)
694 Running `rustc --crate-name serde_human_bytes --edition=2018 /home/build/.cargo/git/checkouts/serde_human_bytes-25cac76e2ef2d15c/0a09794/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=7f54d0fcbf9b36f3 -C extra-filename=-7f54d0fcbf9b36f3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
695 Compiling camino-tempfile v1.0.2
696 Running `rustc --crate-name camino_tempfile --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/camino-tempfile-1.0.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=38b6a8d85c9dc0c0 -C extra-filename=-38b6a8d85c9dc0c0 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern camino=/work/oxidecomputer/crucible/target/debug/deps/libcamino-45f0f4a2c258f934.rmeta --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rmeta --cap-lints allow`
697 Compiling ipnetwork v0.20.0
698 Running `rustc --crate-name ipnetwork --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ipnetwork-0.20.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="schemars"' --cfg 'feature="serde"' -C metadata=0e9e550a49db2c52 -C extra-filename=-0e9e550a49db2c52 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
699 Compiling macaddr v1.0.1
700 Running `rustc --crate-name macaddr --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/macaddr-1.0.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="serde"' --cfg 'feature="serde_std"' --cfg 'feature="std"' -C metadata=98e89df75c36be48 -C extra-filename=-98e89df75c36be48 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
701 Compiling typify-macro v0.0.13 (https://github.com/oxidecomputer/typify#92bfed8b)
702 Running `rustc --crate-name typify_macro --edition=2021 /home/build/.cargo/git/checkouts/typify-288d5a84bbbe6a46/92bfed8/typify-macro/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=f2da351574d5abd0 -C extra-filename=-f2da351574d5abd0 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern serde_tokenstream=/work/oxidecomputer/crucible/target/debug/deps/libserde_tokenstream-2a83027836bfd0fb.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern typify_impl=/work/oxidecomputer/crucible/target/debug/deps/libtypify_impl-5385296f2ea50467.rlib --extern proc_macro --cap-lints allow`
703 Compiling parse-display v0.7.0
704 Running `rustc --crate-name parse_display --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/parse-display-0.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="once_cell"' --cfg 'feature="regex"' --cfg 'feature="std"' -C metadata=34a1a5d52375b70b -C extra-filename=-34a1a5d52375b70b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern parse_display_derive=/work/oxidecomputer/crucible/target/debug/deps/libparse_display_derive-95e2e5d955c96bcf.so --extern regex=/work/oxidecomputer/crucible/target/debug/deps/libregex-f9e3a4eb3da387ce.rmeta --cap-lints allow`
705 Compiling api_identity v0.1.0 (https://github.com/oxidecomputer/omicron?branch=main#4c05962d)
706 Running `rustc --crate-name api_identity --edition=2021 /home/build/.cargo/git/checkouts/omicron-d039c41f152bda83/4c05962/api_identity/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=90d45ecc06c8f773 -C extra-filename=-90d45ecc06c8f773 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
707 Compiling base64ct v1.6.0
708 Running `rustc --crate-name base64ct --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/base64ct-1.6.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="std"' -C metadata=5aa1f74fadba3334 -C extra-filename=-5aa1f74fadba3334 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
709 Compiling bincode v1.3.3
710 Running `rustc --crate-name bincode /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bincode-1.3.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bcb925e8faac86cd -C extra-filename=-bcb925e8faac86cd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
711 Compiling password-hash v0.5.0
712 Running `rustc --crate-name password_hash --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/password-hash-0.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="rand_core"' --cfg 'feature="std"' -C metadata=6571709e5cce3ff3 -C extra-filename=-6571709e5cce3ff3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern base64ct=/work/oxidecomputer/crucible/target/debug/deps/libbase64ct-5aa1f74fadba3334.rmeta --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-e2870cc0e01c33c9.rmeta --extern subtle=/work/oxidecomputer/crucible/target/debug/deps/libsubtle-ee0c8d46ce57336b.rmeta --cap-lints allow`
713 Compiling blake2 v0.10.6
714 Running `rustc --crate-name blake2 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/blake2-0.10.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=39c009971635667f -C extra-filename=-39c009971635667f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern digest=/work/oxidecomputer/crucible/target/debug/deps/libdigest-d0d34473efe105d1.rmeta --cap-lints allow`
715 Compiling inout v0.1.3
716 Running `rustc --crate-name inout --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/inout-0.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=63af7c577909eb44 -C extra-filename=-63af7c577909eb44 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern generic_array=/work/oxidecomputer/crucible/target/debug/deps/libgeneric_array-bc31b01978a602e7.rmeta --cap-lints allow`
717 Compiling universal-hash v0.5.0
718 Running `rustc --crate-name universal_hash --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/universal-hash-0.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4588bd931f24d922 -C extra-filename=-4588bd931f24d922 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crypto_common=/work/oxidecomputer/crucible/target/debug/deps/libcrypto_common-0953bfc5dcef84b9.rmeta --extern subtle=/work/oxidecomputer/crucible/target/debug/deps/libsubtle-ee0c8d46ce57336b.rmeta --cap-lints allow`
719 Compiling cipher v0.4.3
720 Running `rustc --crate-name cipher --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/cipher-0.4.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=fe5b83e4e2b73d72 -C extra-filename=-fe5b83e4e2b73d72 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crypto_common=/work/oxidecomputer/crucible/target/debug/deps/libcrypto_common-0953bfc5dcef84b9.rmeta --extern inout=/work/oxidecomputer/crucible/target/debug/deps/libinout-63af7c577909eb44.rmeta --cap-lints allow`
721 Compiling argon2 v0.5.0
722 Running `rustc --crate-name argon2 --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/argon2-0.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="password-hash"' --cfg 'feature="rand"' --cfg 'feature="std"' -C metadata=8f03ab3a8d12bcba -C extra-filename=-8f03ab3a8d12bcba --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern base64ct=/work/oxidecomputer/crucible/target/debug/deps/libbase64ct-5aa1f74fadba3334.rmeta --extern blake2=/work/oxidecomputer/crucible/target/debug/deps/libblake2-39c009971635667f.rmeta --extern password_hash=/work/oxidecomputer/crucible/target/debug/deps/libpassword_hash-6571709e5cce3ff3.rmeta --cap-lints allow`
723 Compiling oximeter-macro-impl v0.1.0 (https://github.com/oxidecomputer/omicron?branch=main#4c05962d)
724 Running `rustc --crate-name oximeter_macro_impl --edition=2021 /home/build/.cargo/git/checkouts/omicron-d039c41f152bda83/4c05962/oximeter/oximeter-macro-impl/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=e4cc949eda20c416 -C extra-filename=-e4cc949eda20c416 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
725 Compiling num_enum v0.7.0
726 Running `rustc --crate-name num_enum --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num_enum-0.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=9cd7a6d9dcf1dd5a -C extra-filename=-9cd7a6d9dcf1dd5a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_enum_derive=/work/oxidecomputer/crucible/target/debug/deps/libnum_enum_derive-2bef02e01d5b06d3.so --cap-lints allow`
727 Compiling opaque-debug v0.3.0
728 Running `rustc --crate-name opaque_debug --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opaque-debug-0.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bdee85e4e8a367bc -C extra-filename=-bdee85e4e8a367bc --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
729 Compiling aes v0.8.2
730 Running `rustc --crate-name aes --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/aes-0.8.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e0592e8e1d03d860 -C extra-filename=-e0592e8e1d03d860 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern cipher=/work/oxidecomputer/crucible/target/debug/deps/libcipher-fe5b83e4e2b73d72.rmeta --extern cpufeatures=/work/oxidecomputer/crucible/target/debug/deps/libcpufeatures-6e25ef20a8fa218f.rmeta --cap-lints allow`
731 Compiling omicron-passwords v0.1.0 (https://github.com/oxidecomputer/omicron?branch=main#4c05962d)
732 Running `rustc --crate-name omicron_passwords --edition=2021 /home/build/.cargo/git/checkouts/omicron-d039c41f152bda83/4c05962/passwords/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=ac6e3a602e6ad041 -C extra-filename=-ac6e3a602e6ad041 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern argon2=/work/oxidecomputer/crucible/target/debug/deps/libargon2-8f03ab3a8d12bcba.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_with=/work/oxidecomputer/crucible/target/debug/deps/libserde_with-4f9ddd30b380d6cf.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --cap-lints allow`
733 Compiling polyval v0.6.0
734 Running `rustc --crate-name polyval --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/polyval-0.6.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e4b47d7c1f31f998 -C extra-filename=-e4b47d7c1f31f998 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern cpufeatures=/work/oxidecomputer/crucible/target/debug/deps/libcpufeatures-6e25ef20a8fa218f.rmeta --extern opaque_debug=/work/oxidecomputer/crucible/target/debug/deps/libopaque_debug-bdee85e4e8a367bc.rmeta --extern universal_hash=/work/oxidecomputer/crucible/target/debug/deps/libuniversal_hash-4588bd931f24d922.rmeta --cap-lints allow`
735 Compiling ctr v0.9.2
736 Running `rustc --crate-name ctr --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ctr-0.9.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=583e100d27882194 -C extra-filename=-583e100d27882194 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cipher=/work/oxidecomputer/crucible/target/debug/deps/libcipher-fe5b83e4e2b73d72.rmeta --cap-lints allow`
737 Compiling crucible-client-types v0.1.0 (/work/oxidecomputer/crucible/crucible-client-types)
738 Running `rustc --crate-name crucible_client_types --edition=2021 crucible-client-types/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bd54c4335d2370bd -C extra-filename=-bd54c4335d2370bd --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta`
739 Compiling aead v0.5.1
740 Running `rustc --crate-name aead --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/aead-0.5.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="getrandom"' --cfg 'feature="rand_core"' -C metadata=f30da292094eb963 -C extra-filename=-f30da292094eb963 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crypto_common=/work/oxidecomputer/crucible/target/debug/deps/libcrypto_common-0953bfc5dcef84b9.rmeta --extern generic_array=/work/oxidecomputer/crucible/target/debug/deps/libgeneric_array-bc31b01978a602e7.rmeta --cap-lints allow`
741 Compiling crucible v0.0.1 (/work/oxidecomputer/crucible/upstairs)
742 Running `rustc --crate-name build_script_build --edition=2021 upstairs/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=1852afdb7baa90de -C extra-filename=-1852afdb7baa90de --out-dir /work/oxidecomputer/crucible/target/debug/build/crucible-1852afdb7baa90de -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern version_check=/work/oxidecomputer/crucible/target/debug/deps/libversion_check-23142ed5bf6178a0.rlib`
743 Compiling zeroize v1.3.0
744 Running `rustc --crate-name zeroize --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/zeroize-1.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=8e99cf4813483d58 -C extra-filename=-8e99cf4813483d58 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
745 Compiling itertools v0.11.0
746 Running `rustc --crate-name itertools --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/itertools-0.11.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="use_alloc"' --cfg 'feature="use_std"' -C metadata=b06e69badd72e55c -C extra-filename=-b06e69badd72e55c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern either=/work/oxidecomputer/crucible/target/debug/deps/libeither-e075e5427abad613.rmeta --cap-lints allow`
747 Running `/work/oxidecomputer/crucible/target/debug/build/crucible-1852afdb7baa90de/build-script-build`
748 Compiling aes-gcm-siv v0.11.1
749 Running `rustc --crate-name aes_gcm_siv --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/aes-gcm-siv-0.11.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="aes"' --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="getrandom"' -C metadata=21495b616a07c9a4 -C extra-filename=-21495b616a07c9a4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern aead=/work/oxidecomputer/crucible/target/debug/deps/libaead-f30da292094eb963.rmeta --extern aes=/work/oxidecomputer/crucible/target/debug/deps/libaes-e0592e8e1d03d860.rmeta --extern cipher=/work/oxidecomputer/crucible/target/debug/deps/libcipher-fe5b83e4e2b73d72.rmeta --extern ctr=/work/oxidecomputer/crucible/target/debug/deps/libctr-583e100d27882194.rmeta --extern polyval=/work/oxidecomputer/crucible/target/debug/deps/libpolyval-e4b47d7c1f31f998.rmeta --extern subtle=/work/oxidecomputer/crucible/target/debug/deps/libsubtle-ee0c8d46ce57336b.rmeta --extern zeroize=/work/oxidecomputer/crucible/target/debug/deps/libzeroize-8e99cf4813483d58.rmeta --cap-lints allow`
750 Compiling async-recursion v1.0.5
751 Running `rustc --crate-name async_recursion --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/async-recursion-1.0.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=ce9499495a1cb858 -C extra-filename=-ce9499495a1cb858 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
752 Compiling ringbuffer v0.14.2
753 Running `rustc --crate-name ringbuffer --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ringbuffer-0.14.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' -C metadata=e91d75e4694e6351 -C extra-filename=-e91d75e4694e6351 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
754 Compiling unicode-segmentation v1.10.0
755 Running `rustc --crate-name unicode_segmentation --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unicode-segmentation-1.10.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=06176721b7b95955 -C extra-filename=-06176721b7b95955 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
756 Compiling console v0.15.5
757 Running `rustc --crate-name console --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/console-0.15.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="ansi-parsing"' --cfg 'feature="default"' --cfg 'feature="unicode-width"' -C metadata=4236472a6e29ce0a -C extra-filename=-4236472a6e29ce0a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern unicode_width=/work/oxidecomputer/crucible/target/debug/deps/libunicode_width-87c5262a4c4bb0e9.rmeta --cap-lints allow`
758 Compiling opentelemetry_api v0.18.0
759 Running `rustc --crate-name opentelemetry_api --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry_api-0.18.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="fnv"' --cfg 'feature="metrics"' --cfg 'feature="pin-project-lite"' --cfg 'feature="trace"' -C metadata=54be0f0e21b05b9c -C extra-filename=-54be0f0e21b05b9c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern fnv=/work/oxidecomputer/crucible/target/debug/deps/libfnv-5641130f60a8056b.rmeta --extern futures_channel=/work/oxidecomputer/crucible/target/debug/deps/libfutures_channel-34a7a018f58dc5a7.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --cap-lints allow`
760 Compiling dashmap v5.2.0
761 Running `rustc --crate-name dashmap --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dashmap-5.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="send_guard"' -C metadata=e6d3c88fce252f4a -C extra-filename=-e6d3c88fce252f4a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern num_cpus=/work/oxidecomputer/crucible/target/debug/deps/libnum_cpus-67a451bebfcc5086.rmeta --extern parking_lot=/work/oxidecomputer/crucible/target/debug/deps/libparking_lot-970d5c0acece447c.rmeta --cap-lints allow`
762 Compiling unicase v2.6.0
763 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unicase-2.6.0/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=a2e7dddab165438a -C extra-filename=-a2e7dddab165438a --out-dir /work/oxidecomputer/crucible/target/debug/build/unicase-a2e7dddab165438a -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern version_check=/work/oxidecomputer/crucible/target/debug/deps/libversion_check-23142ed5bf6178a0.rlib --cap-lints allow`
764 Compiling newline-converter v0.3.0
765 Running `rustc --crate-name newline_converter --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/newline-converter-0.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=54456d7e7e0ff19b -C extra-filename=-54456d7e7e0ff19b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern unicode_segmentation=/work/oxidecomputer/crucible/target/debug/deps/libunicode_segmentation-06176721b7b95955.rmeta --cap-lints allow`
766 Compiling num-derive v0.4.0
767 Running `rustc --crate-name num_derive --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-derive-0.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=814c8a0a0a713cba -C extra-filename=-814c8a0a0a713cba --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
768 Running `/work/oxidecomputer/crucible/target/debug/build/unicase-a2e7dddab165438a/build-script-build`
769 Compiling similar v2.2.1
770 Running `rustc --crate-name similar --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/similar-2.2.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="text"' -C metadata=c5a411995c7e1b53 -C extra-filename=-c5a411995c7e1b53 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
771 Compiling typify v0.0.13 (https://github.com/oxidecomputer/typify#92bfed8b)
772 Running `rustc --crate-name typify --edition=2021 /home/build/.cargo/git/checkouts/typify-288d5a84bbbe6a46/92bfed8/typify/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="macro"' --cfg 'feature="typify-macro"' -C metadata=60072fbcea665837 -C extra-filename=-60072fbcea665837 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern typify_impl=/work/oxidecomputer/crucible/target/debug/deps/libtypify_impl-5385296f2ea50467.rmeta --extern typify_macro=/work/oxidecomputer/crucible/target/debug/deps/libtypify_macro-f2da351574d5abd0.so --cap-lints allow`
773 Compiling opentelemetry_sdk v0.18.0
774 Running `rustc --crate-name opentelemetry_sdk --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry_sdk-0.18.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="async-trait"' --cfg 'feature="crossbeam-channel"' --cfg 'feature="dashmap"' --cfg 'feature="default"' --cfg 'feature="fnv"' --cfg 'feature="metrics"' --cfg 'feature="percent-encoding"' --cfg 'feature="rand"' --cfg 'feature="trace"' -C metadata=f1be95b27d4b99f6 -C extra-filename=-f1be95b27d4b99f6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern crossbeam_channel=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_channel-19ff6fa486e51c63.rmeta --extern dashmap=/work/oxidecomputer/crucible/target/debug/deps/libdashmap-e6d3c88fce252f4a.rmeta --extern fnv=/work/oxidecomputer/crucible/target/debug/deps/libfnv-5641130f60a8056b.rmeta --extern futures_channel=/work/oxidecomputer/crucible/target/debug/deps/libfutures_channel-34a7a018f58dc5a7.rmeta --extern futures_executor=/work/oxidecomputer/crucible/target/debug/deps/libfutures_executor-61f53162107ffb32.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern opentelemetry_api=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_api-54be0f0e21b05b9c.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --cap-lints allow`
775 Compiling convert_case v0.5.0
776 Running `rustc --crate-name convert_case --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/convert_case-0.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=1b3c1b1b6d16787f -C extra-filename=-1b3c1b1b6d16787f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
777 Compiling progenitor-impl v0.3.0 (https://github.com/oxidecomputer/progenitor?branch=main#76716eea)
778 Running `rustc --crate-name progenitor_impl --edition=2021 /home/build/.cargo/git/checkouts/progenitor-639bd64206ac5e43/76716ee/progenitor-impl/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4eeb9d4c0f08c454 -C extra-filename=-4eeb9d4c0f08c454 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern getopts=/work/oxidecomputer/crucible/target/debug/deps/libgetopts-93a8419d37acce69.rmeta --extern heck=/work/oxidecomputer/crucible/target/debug/deps/libheck-29af75c938b110f7.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rmeta --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern regex=/work/oxidecomputer/crucible/target/debug/deps/libregex-f9e3a4eb3da387ce.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern typify=/work/oxidecomputer/crucible/target/debug/deps/libtypify-60072fbcea665837.rmeta --extern unicode_ident=/work/oxidecomputer/crucible/target/debug/deps/libunicode_ident-81e7752fff89e70f.rmeta --cap-lints allow`
779 Compiling openapi-lint v0.1.0 (https://github.com/oxidecomputer/openapi-lint?branch=main#9e3ada82)
780 Running `rustc --crate-name openapi_lint --edition=2018 /home/build/.cargo/git/checkouts/openapi-lint-42a90d71bc44c8c6/9e3ada8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=848bb99097a9a843 -C extra-filename=-848bb99097a9a843 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern convert_case=/work/oxidecomputer/crucible/target/debug/deps/libconvert_case-1b3c1b1b6d16787f.rmeta --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rmeta --cap-lints allow`
781 Compiling expectorate v1.0.7
782 Running `rustc --crate-name expectorate --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/expectorate-1.0.7/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4e45b262baa473cc -C extra-filename=-4e45b262baa473cc --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern console=/work/oxidecomputer/crucible/target/debug/deps/libconsole-4236472a6e29ce0a.rmeta --extern newline_converter=/work/oxidecomputer/crucible/target/debug/deps/libnewline_converter-54456d7e7e0ff19b.rmeta --extern similar=/work/oxidecomputer/crucible/target/debug/deps/libsimilar-c5a411995c7e1b53.rmeta --cap-lints allow`
783 Running `rustc --crate-name unicase /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unicase-2.6.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=60e4f37ecb23afe1 -C extra-filename=-60e4f37ecb23afe1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg __unicase__iter_cmp --cfg __unicase__default_hasher --cfg __unicase__const_fns --cfg __unicase__core_and_alloc`
784 Compiling opentelemetry v0.18.0
785 Running `rustc --crate-name opentelemetry --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry-0.18.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="metrics"' --cfg 'feature="trace"' -C metadata=f41372bdd269a545 -C extra-filename=-f41372bdd269a545 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern opentelemetry_api=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_api-54be0f0e21b05b9c.rmeta --extern opentelemetry_sdk=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_sdk-f1be95b27d4b99f6.rmeta --cap-lints allow`
786 Compiling overload v0.1.1
787 Running `rustc --crate-name overload --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/overload-0.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=c7048b1d029106d7 -C extra-filename=-c7048b1d029106d7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
788 Compiling urlencoding v2.1.2
789 Compiling nu-ansi-term v0.46.0
790 Running `rustc --crate-name urlencoding --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/urlencoding-2.1.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=76c058d20df73e40 -C extra-filename=-76c058d20df73e40 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
791 Running `rustc --crate-name nu_ansi_term --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/nu-ansi-term-0.46.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=91641c4d652bb213 -C extra-filename=-91641c4d652bb213 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern overload=/work/oxidecomputer/crucible/target/debug/deps/liboverload-c7048b1d029106d7.rmeta --cap-lints allow`
792 Compiling crucible-smf v0.0.0 (/work/oxidecomputer/crucible/smf)
793 Running `rustc --crate-name crucible_smf --edition=2021 smf/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e1a82c6f17385dc6 -C extra-filename=-e1a82c6f17385dc6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern num_derive=/work/oxidecomputer/crucible/target/debug/deps/libnum_derive-814c8a0a0a713cba.so --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta`
794 Compiling opentelemetry_api v0.20.0
795 Compiling mime_guess v2.0.4
796 Running `rustc --crate-name opentelemetry_api --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry_api-0.20.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="pin-project-lite"' --cfg 'feature="trace"' -C metadata=04e099a48329a859 -C extra-filename=-04e099a48329a859 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_channel=/work/oxidecomputer/crucible/target/debug/deps/libfutures_channel-34a7a018f58dc5a7.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern urlencoding=/work/oxidecomputer/crucible/target/debug/deps/liburlencoding-76c058d20df73e40.rmeta --cap-lints allow`
797 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/mime_guess-2.0.4/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="rev-mappings"' -C metadata=dbd0371f2a220632 -C extra-filename=-dbd0371f2a220632 --out-dir /work/oxidecomputer/crucible/target/debug/build/mime_guess-dbd0371f2a220632 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern unicase=/work/oxidecomputer/crucible/target/debug/deps/libunicase-60e4f37ecb23afe1.rlib --cap-lints allow`
798 Compiling tracing-log v0.1.3
799 Running `rustc --crate-name tracing_log --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tracing-log-0.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="log-tracer"' --cfg 'feature="std"' -C metadata=d77683210f5d223d -C extra-filename=-d77683210f5d223d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern tracing_core=/work/oxidecomputer/crucible/target/debug/deps/libtracing_core-adac59f754126e83.rmeta --cap-lints allow`
800 Compiling sharded-slab v0.1.4
801 Running `rustc --crate-name sharded_slab --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/sharded-slab-0.1.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=9ba6a384880b5495 -C extra-filename=-9ba6a384880b5495 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --cap-lints allow`
802 Compiling ordered-float v1.1.1
803 Running `rustc --crate-name ordered_float /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ordered-float-1.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=b7d47a63e5afa582 -C extra-filename=-b7d47a63e5afa582 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow`
804 Compiling ordered-float v3.7.0
805 Running `rustc --crate-name ordered_float --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ordered-float-3.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=c435708391037060 -C extra-filename=-c435708391037060 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow`
806 Compiling threadpool v1.8.1
807 Running `rustc --crate-name threadpool /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/threadpool-1.8.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e4e99e14eaa48b3e -C extra-filename=-e4e99e14eaa48b3e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_cpus=/work/oxidecomputer/crucible/target/debug/deps/libnum_cpus-67a451bebfcc5086.rmeta --cap-lints allow`
808 Compiling signal-hook v0.3.17
809 Compiling integer-encoding v3.0.3
810 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/signal-hook-0.3.17/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="channel"' --cfg 'feature="default"' --cfg 'feature="iterator"' -C metadata=dc2729fde466a024 -C extra-filename=-dc2729fde466a024 --out-dir /work/oxidecomputer/crucible/target/debug/build/signal-hook-dc2729fde466a024 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
811 Running `rustc --crate-name integer_encoding --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/integer-encoding-3.0.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=d6f198d882d03cc4 -C extra-filename=-d6f198d882d03cc4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
812 Running `/work/oxidecomputer/crucible/target/debug/build/signal-hook-dc2729fde466a024/build-script-build`
813 Compiling opentelemetry_sdk v0.20.0
814 Running `rustc --crate-name opentelemetry_sdk --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry_sdk-0.20.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="async-trait"' --cfg 'feature="crossbeam-channel"' --cfg 'feature="default"' --cfg 'feature="percent-encoding"' --cfg 'feature="rand"' --cfg 'feature="trace"' -C metadata=6fef3b11a1793dc3 -C extra-filename=-6fef3b11a1793dc3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern crossbeam_channel=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_channel-19ff6fa486e51c63.rmeta --extern futures_channel=/work/oxidecomputer/crucible/target/debug/deps/libfutures_channel-34a7a018f58dc5a7.rmeta --extern futures_executor=/work/oxidecomputer/crucible/target/debug/deps/libfutures_executor-61f53162107ffb32.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern opentelemetry_api=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_api-04e099a48329a859.rmeta --extern ordered_float=/work/oxidecomputer/crucible/target/debug/deps/libordered_float-c435708391037060.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --cap-lints allow`
815 Compiling thrift v0.16.0
816 Running `rustc --crate-name thrift --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/thrift-0.16.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="log"' --cfg 'feature="server"' --cfg 'feature="threadpool"' -C metadata=1fac1fb5888ad036 -C extra-filename=-1fac1fb5888ad036 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern byteorder=/work/oxidecomputer/crucible/target/debug/deps/libbyteorder-1306999913c8e1b3.rmeta --extern integer_encoding=/work/oxidecomputer/crucible/target/debug/deps/libinteger_encoding-d6f198d882d03cc4.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern ordered_float=/work/oxidecomputer/crucible/target/debug/deps/libordered_float-b7d47a63e5afa582.rmeta --extern threadpool=/work/oxidecomputer/crucible/target/debug/deps/libthreadpool-e4e99e14eaa48b3e.rmeta --cap-lints allow`
817 Running `/work/oxidecomputer/crucible/target/debug/build/mime_guess-dbd0371f2a220632/build-script-build`
818 Compiling opentelemetry-semantic-conventions v0.10.0
819 Running `rustc --crate-name opentelemetry_semantic_conventions --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry-semantic-conventions-0.10.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=18113e5418aec521 -C extra-filename=-18113e5418aec521 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-f41372bdd269a545.rmeta --cap-lints allow`
820 Compiling tracing-subscriber v0.3.17
821 Running `rustc --crate-name tracing_subscriber --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tracing-subscriber-0.3.17/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="ansi"' --cfg 'feature="default"' --cfg 'feature="fmt"' --cfg 'feature="nu-ansi-term"' --cfg 'feature="registry"' --cfg 'feature="sharded-slab"' --cfg 'feature="smallvec"' --cfg 'feature="std"' --cfg 'feature="thread_local"' --cfg 'feature="tracing-log"' -C metadata=e39dae5ba339bc78 -C extra-filename=-e39dae5ba339bc78 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern nu_ansi_term=/work/oxidecomputer/crucible/target/debug/deps/libnu_ansi_term-91641c4d652bb213.rmeta --extern sharded_slab=/work/oxidecomputer/crucible/target/debug/deps/libsharded_slab-9ba6a384880b5495.rmeta --extern smallvec=/work/oxidecomputer/crucible/target/debug/deps/libsmallvec-397f26bd8c84e528.rmeta --extern thread_local=/work/oxidecomputer/crucible/target/debug/deps/libthread_local-771d112d9219dc9f.rmeta --extern tracing_core=/work/oxidecomputer/crucible/target/debug/deps/libtracing_core-adac59f754126e83.rmeta --extern tracing_log=/work/oxidecomputer/crucible/target/debug/deps/libtracing_log-d77683210f5d223d.rmeta --cap-lints allow`
822 Compiling phf_shared v0.11.1
823 Running `rustc --crate-name phf_shared --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/phf_shared-0.11.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=f4c611c5b95ef91c -C extra-filename=-f4c611c5b95ef91c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern siphasher=/work/oxidecomputer/crucible/target/debug/deps/libsiphasher-2eb27360f66646f3.rmeta --cap-lints allow`
824 Compiling memoffset v0.6.5
825 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/memoffset-0.6.5/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' -C metadata=98b281bc850662f7 -C extra-filename=-98b281bc850662f7 --out-dir /work/oxidecomputer/crucible/target/debug/build/memoffset-98b281bc850662f7 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-b1cec8cc882d2fdd.rlib --cap-lints allow`
826 Running `rustc --crate-name mime_guess /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/mime_guess-2.0.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="rev-mappings"' -C metadata=66974d6c31968dc2 -C extra-filename=-66974d6c31968dc2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern mime=/work/oxidecomputer/crucible/target/debug/deps/libmime-ac14a9115eddd3c2.rmeta --extern unicase=/work/oxidecomputer/crucible/target/debug/deps/libunicase-60e4f37ecb23afe1.rmeta --cap-lints allow`
827 Running `/work/oxidecomputer/crucible/target/debug/build/memoffset-98b281bc850662f7/build-script-build`
828 Compiling opentelemetry v0.20.0
829 Running `rustc --crate-name opentelemetry --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry-0.20.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="trace"' -C metadata=5524fd7817ad57fb -C extra-filename=-5524fd7817ad57fb --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern opentelemetry_api=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_api-04e099a48329a859.rmeta --extern opentelemetry_sdk=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_sdk-6fef3b11a1793dc3.rmeta --cap-lints allow`
830 Compiling opentelemetry-jaeger v0.17.0
831 Running `rustc --crate-name opentelemetry_jaeger --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry-jaeger-0.17.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=9ebf47742e5e063f -C extra-filename=-9ebf47742e5e063f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern futures_executor=/work/oxidecomputer/crucible/target/debug/deps/libfutures_executor-61f53162107ffb32.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-f41372bdd269a545.rmeta --extern opentelemetry_semantic_conventions=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_semantic_conventions-18113e5418aec521.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern thrift=/work/oxidecomputer/crucible/target/debug/deps/libthrift-1fac1fb5888ad036.rmeta --cap-lints allow`
832 Running `rustc --crate-name signal_hook --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/signal-hook-0.3.17/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="channel"' --cfg 'feature="default"' --cfg 'feature="iterator"' -C metadata=ae206b38743b6815 -C extra-filename=-ae206b38743b6815 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern signal_hook_registry=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook_registry-4d955479f235827e.rmeta --cap-lints allow`
833 Compiling crossbeam-epoch v0.9.8
834 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossbeam-epoch-0.9.8/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="alloc"' --cfg 'feature="lazy_static"' --cfg 'feature="std"' -C metadata=d874a78423ef46ce -C extra-filename=-d874a78423ef46ce --out-dir /work/oxidecomputer/crucible/target/debug/build/crossbeam-epoch-d874a78423ef46ce -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-b1cec8cc882d2fdd.rlib --cap-lints allow`
835 Compiling crucible-downstairs v0.0.1 (/work/oxidecomputer/crucible/downstairs)
836 Running `rustc --crate-name build_script_build --edition=2021 downstairs/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' -C metadata=66ed07e0d9dc333b -C extra-filename=-66ed07e0d9dc333b --out-dir /work/oxidecomputer/crucible/target/debug/build/crucible-downstairs-66ed07e0d9dc333b -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern version_check=/work/oxidecomputer/crucible/target/debug/deps/libversion_check-23142ed5bf6178a0.rlib`
837 Compiling tracing-opentelemetry v0.18.0
838 Running `rustc --crate-name tracing_opentelemetry --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tracing-opentelemetry-0.18.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="metrics"' --cfg 'feature="tracing-log"' -C metadata=8f8cfc1900c3a663 -C extra-filename=-8f8cfc1900c3a663 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-f41372bdd269a545.rmeta --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rmeta --extern tracing_core=/work/oxidecomputer/crucible/target/debug/deps/libtracing_core-adac59f754126e83.rmeta --extern tracing_log=/work/oxidecomputer/crucible/target/debug/deps/libtracing_log-d77683210f5d223d.rmeta --extern tracing_subscriber=/work/oxidecomputer/crucible/target/debug/deps/libtracing_subscriber-e39dae5ba339bc78.rmeta --cap-lints allow`
839 Compiling http-range v0.1.5
840 Running `rustc --crate-name http_range --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/http-range-0.1.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4885ae91fe1ea75f -C extra-filename=-4885ae91fe1ea75f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
841 Running `/work/oxidecomputer/crucible/target/debug/build/crucible-downstairs-66ed07e0d9dc333b/build-script-build`
842 Compiling hyper-staticfile v0.9.4
843 Running `rustc --crate-name hyper_staticfile --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hyper-staticfile-0.9.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=559b4389ef952563 -C extra-filename=-559b4389ef952563 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern http_range=/work/oxidecomputer/crucible/target/debug/deps/libhttp_range-4885ae91fe1ea75f.rmeta --extern httpdate=/work/oxidecomputer/crucible/target/debug/deps/libhttpdate-e3ef82a990113a54.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern mime_guess=/work/oxidecomputer/crucible/target/debug/deps/libmime_guess-66974d6c31968dc2.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern url=/work/oxidecomputer/crucible/target/debug/deps/liburl-ff56943ab9066fdc.rmeta --cap-lints allow`
844 Running `/work/oxidecomputer/crucible/target/debug/build/crossbeam-epoch-d874a78423ef46ce/build-script-build`
845 Running `rustc --crate-name memoffset /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/memoffset-0.6.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=508ecbf92309dc73 -C extra-filename=-508ecbf92309dc73 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg tuple_ty --cfg allow_clippy --cfg maybe_uninit --cfg doctests --cfg raw_ref_macros`
846 Compiling phf_generator v0.11.1
847 Running `rustc --crate-name phf_generator --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/phf_generator-0.11.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=41d436cb237437bf -C extra-filename=-41d436cb237437bf --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern phf_shared=/work/oxidecomputer/crucible/target/debug/deps/libphf_shared-f4c611c5b95ef91c.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --cap-lints allow`
848 Compiling io-lifetimes v0.5.3
849 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/io-lifetimes-0.5.3/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=eca1b29fac9a0fbe -C extra-filename=-eca1b29fac9a0fbe --out-dir /work/oxidecomputer/crucible/target/debug/build/io-lifetimes-eca1b29fac9a0fbe -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
850 Compiling rayon-core v1.9.2
851 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rayon-core-1.9.2/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=b3156e65197adcc4 -C extra-filename=-b3156e65197adcc4 --out-dir /work/oxidecomputer/crucible/target/debug/build/rayon-core-b3156e65197adcc4 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
852 Running `/work/oxidecomputer/crucible/target/debug/build/rayon-core-b3156e65197adcc4/build-script-build`
853 Compiling phf_codegen v0.11.1
854 Running `rustc --crate-name phf_codegen --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/phf_codegen-0.11.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=f8063558873b33a1 -C extra-filename=-f8063558873b33a1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern phf_generator=/work/oxidecomputer/crucible/target/debug/deps/libphf_generator-41d436cb237437bf.rmeta --extern phf_shared=/work/oxidecomputer/crucible/target/debug/deps/libphf_shared-f4c611c5b95ef91c.rmeta --cap-lints allow`
855 Running `/work/oxidecomputer/crucible/target/debug/build/io-lifetimes-eca1b29fac9a0fbe/build-script-build`
856 Running `rustc --crate-name crossbeam_epoch --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossbeam-epoch-0.9.8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="lazy_static"' --cfg 'feature="std"' -C metadata=44fc1cc817fcdd23 -C extra-filename=-44fc1cc817fcdd23 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern crossbeam_utils=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_utils-a1fb255bfa31483a.rmeta --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --extern memoffset=/work/oxidecomputer/crucible/target/debug/deps/libmemoffset-508ecbf92309dc73.rmeta --extern scopeguard=/work/oxidecomputer/crucible/target/debug/deps/libscopeguard-00b7ece4eb7b8e7e.rmeta --cap-lints allow --cfg crossbeam_const_fn_trait_bound`
857 Compiling rustix v0.33.4
858 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustix-0.33.4/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="io-lifetimes"' --cfg 'feature="std"' -C metadata=4d7937b0898004d1 -C extra-filename=-4d7937b0898004d1 --out-dir /work/oxidecomputer/crucible/target/debug/build/rustix-4d7937b0898004d1 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
859 Compiling crc32fast v1.3.2
860 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crc32fast-1.3.2/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=5d7fac4f45a97417 -C extra-filename=-5d7fac4f45a97417 --out-dir /work/oxidecomputer/crucible/target/debug/build/crc32fast-5d7fac4f45a97417 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
861 Compiling terminfo v0.8.0
862 Running `rustc --crate-name build_script_build --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/terminfo-0.8.0/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=bed6d7dcce5eac66 -C extra-filename=-bed6d7dcce5eac66 --out-dir /work/oxidecomputer/crucible/target/debug/build/terminfo-bed6d7dcce5eac66 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern phf_codegen=/work/oxidecomputer/crucible/target/debug/deps/libphf_codegen-f8063558873b33a1.rlib --cap-lints allow`
863 Running `/work/oxidecomputer/crucible/target/debug/build/rustix-4d7937b0898004d1/build-script-build`
864 Running `/work/oxidecomputer/crucible/target/debug/build/crc32fast-5d7fac4f45a97417/build-script-build`
865 Compiling crossbeam-deque v0.8.1
866 Running `rustc --crate-name crossbeam_deque --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossbeam-deque-0.8.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="crossbeam-epoch"' --cfg 'feature="crossbeam-utils"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=46df68c0a92e69f5 -C extra-filename=-46df68c0a92e69f5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern crossbeam_epoch=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_epoch-44fc1cc817fcdd23.rmeta --extern crossbeam_utils=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_utils-a1fb255bfa31483a.rmeta --cap-lints allow`
867 Running `rustc --crate-name io_lifetimes --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/io-lifetimes-0.5.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=59f01ff60b1f438f -C extra-filename=-59f01ff60b1f438f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
868 Compiling signal-hook-mio v0.2.3
869 Running `rustc --crate-name signal_hook_mio --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/signal-hook-mio-0.2.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="mio-0_8"' --cfg 'feature="support-v0_8"' -C metadata=b78bc232ff08be19 -C extra-filename=-b78bc232ff08be19 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern mio_0_8=/work/oxidecomputer/crucible/target/debug/deps/libmio-27a8136cf12de2bb.rmeta --extern signal_hook=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook-ae206b38743b6815.rmeta --cap-lints allow`
870 Compiling structmeta-derive v0.2.0
871 Running `rustc --crate-name structmeta_derive --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/structmeta-derive-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=6868cfc37bbab5f5 -C extra-filename=-6868cfc37bbab5f5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
872 Compiling vte_generate_state_changes v0.1.1
873 Running `rustc --crate-name vte_generate_state_changes --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/vte_generate_state_changes-0.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=52ea4d6c5be5fc21 -C extra-filename=-52ea4d6c5be5fc21 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern proc_macro --cap-lints allow`
874 Compiling subprocess v0.2.9
875 Running `rustc --crate-name subprocess --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/subprocess-0.2.9/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0acfc5c9b903588a -C extra-filename=-0acfc5c9b903588a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
876 Compiling dirs-sys v0.3.7
877 Running `rustc --crate-name dirs_sys /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dirs-sys-0.3.7/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=15239ab4fd89ac89 -C extra-filename=-15239ab4fd89ac89 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
878 Compiling wait-timeout v0.2.0
879 Running `rustc --crate-name wait_timeout /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/wait-timeout-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=509e2becba2710d2 -C extra-filename=-509e2becba2710d2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
880 Compiling rayon v1.5.2
881 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rayon-1.5.2/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=03d683edffe821df -C extra-filename=-03d683edffe821df --out-dir /work/oxidecomputer/crucible/target/debug/build/rayon-03d683edffe821df -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern autocfg=/work/oxidecomputer/crucible/target/debug/deps/libautocfg-b1cec8cc882d2fdd.rlib --cap-lints allow`
882 Compiling portable-atomic v1.4.1
883 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/portable-atomic-1.4.1/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="fallback"' -C metadata=89385d5be32ea8bc -C extra-filename=-89385d5be32ea8bc --out-dir /work/oxidecomputer/crucible/target/debug/build/portable-atomic-89385d5be32ea8bc -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
884 Compiling progenitor-macro v0.3.0 (https://github.com/oxidecomputer/progenitor?branch=main#76716eea)
885 Running `rustc --crate-name progenitor_macro --edition=2021 /home/build/.cargo/git/checkouts/progenitor-639bd64206ac5e43/76716ee/progenitor-macro/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=4c13a8353939c841 -C extra-filename=-4c13a8353939c841 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern progenitor_impl=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor_impl-4eeb9d4c0f08c454.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern serde_tokenstream=/work/oxidecomputer/crucible/target/debug/deps/libserde_tokenstream-2a83027836bfd0fb.rlib --extern serde_yaml=/work/oxidecomputer/crucible/target/debug/deps/libserde_yaml-c7db3f53bbf8134b.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
886 Compiling bit-vec v0.6.3
887 Running `rustc --crate-name bit_vec /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bit-vec-0.6.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=35546b1439b435ea -C extra-filename=-35546b1439b435ea --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
888 Compiling adler v1.0.2
889 Running `rustc --crate-name adler /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/adler-1.0.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=de7aee9f30a32373 -C extra-filename=-de7aee9f30a32373 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
890 Compiling regex-automata v0.1.10
891 Running `rustc --crate-name regex_automata /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/regex-automata-0.1.10/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=054d4c6c4feb4142 -C extra-filename=-054d4c6c4feb4142 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
892 Compiling minimal-lexical v0.2.1
893 Running `rustc --crate-name minimal_lexical --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/minimal-lexical-0.2.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=fec5e2c45ccf032b -C extra-filename=-fec5e2c45ccf032b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
894 Compiling quick-error v1.2.3
895 Running `rustc --crate-name quick_error /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/quick-error-1.2.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=c8c4727ecc8bb914 -C extra-filename=-c8c4727ecc8bb914 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
896 Compiling bit-set v0.5.3
897 Running `rustc --crate-name bit_set /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bit-set-0.5.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=ec12e43f0b47413c -C extra-filename=-ec12e43f0b47413c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bit_vec=/work/oxidecomputer/crucible/target/debug/deps/libbit_vec-35546b1439b435ea.rmeta --cap-lints allow`
898 Compiling bstr v0.2.17
899 Running `rustc --crate-name bstr --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bstr-0.2.17/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="lazy_static"' --cfg 'feature="regex-automata"' --cfg 'feature="std"' --cfg 'feature="unicode"' -C metadata=f0785d87dddebfcd -C extra-filename=-f0785d87dddebfcd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --extern regex_automata=/work/oxidecomputer/crucible/target/debug/deps/libregex_automata-054d4c6c4feb4142.rmeta --cap-lints allow`
900 Compiling nom v7.1.3
901 Running `rustc --crate-name nom --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/nom-7.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="std"' -C metadata=7d7f045245d4ae57 -C extra-filename=-7d7f045245d4ae57 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --extern minimal_lexical=/work/oxidecomputer/crucible/target/debug/deps/libminimal_lexical-fec5e2c45ccf032b.rmeta --cap-lints allow`
902 Running `/work/oxidecomputer/crucible/target/debug/build/portable-atomic-89385d5be32ea8bc/build-script-build`
903 Compiling rusty-fork v0.3.0
904 Running `rustc --crate-name rusty_fork --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rusty-fork-0.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="timeout"' --cfg 'feature="wait-timeout"' -C metadata=20bbf553eecf6baa -C extra-filename=-20bbf553eecf6baa --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern fnv=/work/oxidecomputer/crucible/target/debug/deps/libfnv-5641130f60a8056b.rmeta --extern quick_error=/work/oxidecomputer/crucible/target/debug/deps/libquick_error-c8c4727ecc8bb914.rmeta --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rmeta --extern wait_timeout=/work/oxidecomputer/crucible/target/debug/deps/libwait_timeout-509e2becba2710d2.rmeta --cap-lints allow`
905 Compiling miniz_oxide v0.6.2
906 Running `rustc --crate-name miniz_oxide --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/miniz_oxide-0.6.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="with-alloc"' -C metadata=f7bade0e2b1a9da4 -C extra-filename=-f7bade0e2b1a9da4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern adler=/work/oxidecomputer/crucible/target/debug/deps/libadler-de7aee9f30a32373.rmeta --cap-lints allow`
907 Compiling vte v0.11.1
908 Running `rustc --crate-name vte --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/vte-0.11.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=66f6ba7b673381f9 -C extra-filename=-66f6ba7b673381f9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern utf8parse=/work/oxidecomputer/crucible/target/debug/deps/libutf8parse-7be01c1e08c14495.rmeta --extern vte_generate_state_changes=/work/oxidecomputer/crucible/target/debug/deps/libvte_generate_state_changes-52ea4d6c5be5fc21.so --cap-lints allow`
909 Running `/work/oxidecomputer/crucible/target/debug/build/rayon-03d683edffe821df/build-script-build`
910 Running `/work/oxidecomputer/crucible/target/debug/build/terminfo-bed6d7dcce5eac66/build-script-build`
911 Compiling dirs v4.0.0
912 Running `rustc --crate-name dirs /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dirs-4.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=b6e400c266a3b937 -C extra-filename=-b6e400c266a3b937 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern dirs_sys=/work/oxidecomputer/crucible/target/debug/deps/libdirs_sys-15239ab4fd89ac89.rmeta --cap-lints allow`
913 Running `rustc --crate-name rayon_core --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rayon-core-1.9.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=000ce3790ebea6a1 -C extra-filename=-000ce3790ebea6a1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crossbeam_channel=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_channel-19ff6fa486e51c63.rmeta --extern crossbeam_deque=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_deque-46df68c0a92e69f5.rmeta --extern crossbeam_utils=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_utils-a1fb255bfa31483a.rmeta --extern num_cpus=/work/oxidecomputer/crucible/target/debug/deps/libnum_cpus-67a451bebfcc5086.rmeta --cap-lints allow`
914 Running `rustc --crate-name rustix --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustix-0.33.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="io-lifetimes"' --cfg 'feature="std"' -C metadata=f2ea1fb682f42c58 -C extra-filename=-f2ea1fb682f42c58 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern errno=/work/oxidecomputer/crucible/target/debug/deps/liberrno-af3769d2acbbbf20.rmeta --extern io_lifetimes=/work/oxidecomputer/crucible/target/debug/deps/libio_lifetimes-59f01ff60b1f438f.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow --cfg libc`
915 Compiling structmeta v0.2.0
916 Running `rustc --crate-name structmeta --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/structmeta-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=3c927247a3e97db4 -C extra-filename=-3c927247a3e97db4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern structmeta_derive=/work/oxidecomputer/crucible/target/debug/deps/libstructmeta_derive-6868cfc37bbab5f5.so --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rmeta --cap-lints allow`
917 Running `rustc --crate-name crc32fast /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crc32fast-1.3.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=bbced466bd6966b3 -C extra-filename=-bbced466bd6966b3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --cap-lints allow --cfg crc32fast_stdarchx86`
918 Compiling phf v0.11.1
919 Running `rustc --crate-name phf --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/phf-0.11.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=26f57b9bc310a574 -C extra-filename=-26f57b9bc310a574 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern phf_shared=/work/oxidecomputer/crucible/target/debug/deps/libphf_shared-f4c611c5b95ef91c.rmeta --cap-lints allow`
920 Compiling tokio-stream v0.1.8
921 Running `rustc --crate-name tokio_stream --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-stream-0.1.8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="time"' -C metadata=4c3a15ae8dcea990 -C extra-filename=-4c3a15ae8dcea990 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --cap-lints allow`
922 Compiling rand_xorshift v0.3.0
923 Running `rustc --crate-name rand_xorshift --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_xorshift-0.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=a099defc127d0ae4 -C extra-filename=-a099defc127d0ae4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-e2870cc0e01c33c9.rmeta --cap-lints allow`
924 Compiling csv-core v0.1.10
925 Running `rustc --crate-name csv_core --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/csv-core-0.1.10/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=6a47c452f1df15e2 -C extra-filename=-6a47c452f1df15e2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --cap-lints allow`
926 Compiling xattr v0.2.2
927 Running `rustc --crate-name xattr /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/xattr-0.2.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="unsupported"' -C metadata=70407947526a7a4b -C extra-filename=-70407947526a7a4b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
928 Compiling filetime v0.2.17
929 Running `rustc --crate-name filetime --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/filetime-0.2.17/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=337368c6d4c995d8 -C extra-filename=-337368c6d4c995d8 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
930 Compiling same-file v1.0.6
931 Running `rustc --crate-name same_file --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/same-file-1.0.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=b7a6da1044557b8b -C extra-filename=-b7a6da1044557b8b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
932 Compiling unarray v0.1.4
933 Running `rustc --crate-name unarray --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unarray-0.1.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bb988de2629d2530 -C extra-filename=-bb988de2629d2530 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
934 Compiling walkdir v2.3.2
935 Running `rustc --crate-name walkdir --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/walkdir-2.3.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=5232f739d2ba1b5e -C extra-filename=-5232f739d2ba1b5e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern same_file=/work/oxidecomputer/crucible/target/debug/deps/libsame_file-b7a6da1044557b8b.rmeta --cap-lints allow`
936 Compiling tokio-test v0.4.2
937 Running `rustc --crate-name tokio_test --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-test-0.4.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=12a28be646ff63e6 -C extra-filename=-12a28be646ff63e6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern async_stream=/work/oxidecomputer/crucible/target/debug/deps/libasync_stream-0486f21173e73f9c.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_stream=/work/oxidecomputer/crucible/target/debug/deps/libtokio_stream-4c3a15ae8dcea990.rmeta --cap-lints allow`
938 Compiling proptest v1.2.0
939 Running `rustc --crate-name proptest --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/proptest-1.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="bit-set"' --cfg 'feature="break-dead-code"' --cfg 'feature="default"' --cfg 'feature="fork"' --cfg 'feature="lazy_static"' --cfg 'feature="regex-syntax"' --cfg 'feature="rusty-fork"' --cfg 'feature="std"' --cfg 'feature="tempfile"' --cfg 'feature="timeout"' -C metadata=327f7f2cf6858f27 -C extra-filename=-327f7f2cf6858f27 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bit_set=/work/oxidecomputer/crucible/target/debug/deps/libbit_set-ec12e43f0b47413c.rmeta --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern byteorder=/work/oxidecomputer/crucible/target/debug/deps/libbyteorder-1306999913c8e1b3.rmeta --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rmeta --extern rand_xorshift=/work/oxidecomputer/crucible/target/debug/deps/librand_xorshift-a099defc127d0ae4.rmeta --extern regex_syntax=/work/oxidecomputer/crucible/target/debug/deps/libregex_syntax-b9593ef3338880de.rmeta --extern rusty_fork=/work/oxidecomputer/crucible/target/debug/deps/librusty_fork-20bbf553eecf6baa.rmeta --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rmeta --extern unarray=/work/oxidecomputer/crucible/target/debug/deps/libunarray-bb988de2629d2530.rmeta --cap-lints allow`
940 Compiling tar v0.4.38
941 Running `rustc --crate-name tar --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tar-0.4.38/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="xattr"' -C metadata=b33bc6012d78be3d -C extra-filename=-b33bc6012d78be3d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern filetime=/work/oxidecomputer/crucible/target/debug/deps/libfiletime-337368c6d4c995d8.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern xattr=/work/oxidecomputer/crucible/target/debug/deps/libxattr-70407947526a7a4b.rmeta --cap-lints allow`
942 Compiling fd-lock v3.0.4
943 Running `rustc --crate-name fd_lock --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/fd-lock-3.0.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=dd6f5c85295045f7 -C extra-filename=-dd6f5c85295045f7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern rustix=/work/oxidecomputer/crucible/target/debug/deps/librustix-f2ea1fb682f42c58.rmeta --cap-lints allow`
944 Running `rustc --crate-name terminfo --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/terminfo-0.8.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=cd93b6cd14f79089 -C extra-filename=-cd93b6cd14f79089 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern dirs=/work/oxidecomputer/crucible/target/debug/deps/libdirs-b6e400c266a3b937.rmeta --extern fnv=/work/oxidecomputer/crucible/target/debug/deps/libfnv-5641130f60a8056b.rmeta --extern nom=/work/oxidecomputer/crucible/target/debug/deps/libnom-7d7f045245d4ae57.rmeta --extern phf=/work/oxidecomputer/crucible/target/debug/deps/libphf-26f57b9bc310a574.rmeta --cap-lints allow`
945 Compiling csv v1.2.2
946 Running `rustc --crate-name csv --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/csv-1.2.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=187f0e890389cec3 -C extra-filename=-187f0e890389cec3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern csv_core=/work/oxidecomputer/crucible/target/debug/deps/libcsv_core-6a47c452f1df15e2.rmeta --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --extern ryu=/work/oxidecomputer/crucible/target/debug/deps/libryu-062e5ac4087417b3.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
947 Running `rustc --crate-name rayon --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rayon-1.5.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=f2d40ba22c8b185e -C extra-filename=-f2d40ba22c8b185e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crossbeam_deque=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_deque-46df68c0a92e69f5.rmeta --extern either=/work/oxidecomputer/crucible/target/debug/deps/libeither-e075e5427abad613.rmeta --extern rayon_core=/work/oxidecomputer/crucible/target/debug/deps/librayon_core-000ce3790ebea6a1.rmeta --cap-lints allow --cfg has_step_by_rev --cfg has_min_const_generics --cfg has_control_flow`
948 Compiling flate2 v1.0.25
949 Running `rustc --crate-name flate2 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/flate2-1.0.25/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="miniz_oxide"' --cfg 'feature="rust_backend"' -C metadata=e91a1b496d4e6ad4 -C extra-filename=-e91a1b496d4e6ad4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crc32fast=/work/oxidecomputer/crucible/target/debug/deps/libcrc32fast-bbced466bd6966b3.rmeta --extern miniz_oxide=/work/oxidecomputer/crucible/target/debug/deps/libminiz_oxide-f7bade0e2b1a9da4.rmeta --cap-lints allow`
950 Compiling test-strategy v0.3.1
951 Running `rustc --crate-name test_strategy --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/test-strategy-0.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=5eb6b90d55d9f739 -C extra-filename=-5eb6b90d55d9f739 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern structmeta=/work/oxidecomputer/crucible/target/debug/deps/libstructmeta-3c927247a3e97db4.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
952 Compiling strip-ansi-escapes v0.2.0
953 Running `rustc --crate-name strip_ansi_escapes /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/strip-ansi-escapes-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=8090d3de2e6bf9be -C extra-filename=-8090d3de2e6bf9be --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern vte=/work/oxidecomputer/crucible/target/debug/deps/libvte-66f6ba7b673381f9.rmeta --cap-lints allow`
954 Compiling httptest v0.15.4
955 Running `rustc --crate-name httptest --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/httptest-0.15.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=174da737d96e2af6 -C extra-filename=-174da737d96e2af6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bstr=/work/oxidecomputer/crucible/target/debug/deps/libbstr-f0785d87dddebfcd.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern crossbeam_channel=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_channel-19ff6fa486e51c63.rmeta --extern form_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libform_urlencoded-ef731295a29c9150.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern regex=/work/oxidecomputer/crucible/target/debug/deps/libregex-f9e3a4eb3da387ce.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern serde_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libserde_urlencoded-779d3ac41d95557a.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --cap-lints allow`
956 Running `rustc --crate-name portable_atomic --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/portable-atomic-1.4.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="fallback"' -C metadata=f0a1a94e9d6381ba -C extra-filename=-f0a1a94e9d6381ba --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg portable_atomic_llvm_16`
957 Compiling crossterm v0.26.1
958 Running `rustc --crate-name crossterm --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossterm-0.26.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="bracketed-paste"' --cfg 'feature="default"' --cfg 'feature="serde"' -C metadata=ddbacbe0f657f0ff -C extra-filename=-ddbacbe0f657f0ff --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern mio=/work/oxidecomputer/crucible/target/debug/deps/libmio-27a8136cf12de2bb.rmeta --extern parking_lot=/work/oxidecomputer/crucible/target/debug/deps/libparking_lot-970d5c0acece447c.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern signal_hook=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook-ae206b38743b6815.rmeta --extern signal_hook_mio=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook_mio-b78bc232ff08be19.rmeta --cap-lints allow`
959 Compiling signal-hook-tokio v0.3.1
960 Running `rustc --crate-name signal_hook_tokio --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/signal-hook-tokio-0.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="futures-core-0_3"' --cfg 'feature="futures-v0_3"' -C metadata=6a6b104c61918fa0 -C extra-filename=-6a6b104c61918fa0 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_core_0_3=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern signal_hook=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook-ae206b38743b6815.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --cap-lints allow`
961 Compiling itertools v0.10.5
962 Running `rustc --crate-name itertools --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/itertools-0.10.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="use_alloc"' --cfg 'feature="use_std"' -C metadata=09aeacd112427d42 -C extra-filename=-09aeacd112427d42 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern either=/work/oxidecomputer/crucible/target/debug/deps/libeither-e075e5427abad613.rmeta --cap-lints allow`
963 Compiling which v4.4.0
964 Running `rustc --crate-name which --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/which-4.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=23fb4550fe083323 -C extra-filename=-23fb4550fe083323 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern either=/work/oxidecomputer/crucible/target/debug/deps/libeither-e075e5427abad613.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
965 Compiling number_prefix v0.4.0
966 Running `rustc --crate-name number_prefix /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/number_prefix-0.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=be3728a6ec19cda2 -C extra-filename=-be3728a6ec19cda2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
967 Compiling nu-ansi-term v0.49.0
968 Running `rustc --crate-name nu_ansi_term --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/nu-ansi-term-0.49.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=cdbbf2d007fd1e63 -C extra-filename=-cdbbf2d007fd1e63 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
969 Compiling utf8-width v0.1.6
970 Running `rustc --crate-name utf8_width --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/utf8-width-0.1.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=7e583d5482ac364b -C extra-filename=-7e583d5482ac364b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
971 Compiling indicatif v0.17.6
972 Running `rustc --crate-name indicatif --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/indicatif-0.17.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="rayon"' --cfg 'feature="unicode-width"' -C metadata=297a26a70875006e -C extra-filename=-297a26a70875006e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern console=/work/oxidecomputer/crucible/target/debug/deps/libconsole-4236472a6e29ce0a.rmeta --extern number_prefix=/work/oxidecomputer/crucible/target/debug/deps/libnumber_prefix-be3728a6ec19cda2.rmeta --extern portable_atomic=/work/oxidecomputer/crucible/target/debug/deps/libportable_atomic-f0a1a94e9d6381ba.rmeta --extern rayon=/work/oxidecomputer/crucible/target/debug/deps/librayon-f2d40ba22c8b185e.rmeta --extern unicode_width=/work/oxidecomputer/crucible/target/debug/deps/libunicode_width-87c5262a4c4bb0e9.rmeta --cap-lints allow`
973 Compiling byte-unit v4.0.19
974 Running `rustc --crate-name byte_unit --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/byte-unit-4.0.19/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="serde"' --cfg 'feature="std"' --cfg 'feature="u128"' -C metadata=02cb17c857e20dac -C extra-filename=-02cb17c857e20dac --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern utf8_width=/work/oxidecomputer/crucible/target/debug/deps/libutf8_width-7e583d5482ac364b.rmeta --cap-lints allow`
975 Compiling crossterm v0.27.0
976 Running `rustc --crate-name crossterm --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossterm-0.27.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="bracketed-paste"' --cfg 'feature="default"' --cfg 'feature="events"' --cfg 'feature="windows"' -C metadata=3c787fd4c4d4bc45 -C extra-filename=-3c787fd4c4d4bc45 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-109244799287a8c3.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern mio=/work/oxidecomputer/crucible/target/debug/deps/libmio-27a8136cf12de2bb.rmeta --extern parking_lot=/work/oxidecomputer/crucible/target/debug/deps/libparking_lot-970d5c0acece447c.rmeta --extern signal_hook=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook-ae206b38743b6815.rmeta --extern signal_hook_mio=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook_mio-b78bc232ff08be19.rmeta --cap-lints allow`
977 Compiling reedline v0.23.0
978 Running `rustc --crate-name reedline --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/reedline-0.23.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=6e6244e0f6aa654d -C extra-filename=-6e6244e0f6aa654d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern crossterm=/work/oxidecomputer/crucible/target/debug/deps/libcrossterm-ddbacbe0f657f0ff.rmeta --extern fd_lock=/work/oxidecomputer/crucible/target/debug/deps/libfd_lock-dd6f5c85295045f7.rmeta --extern itertools=/work/oxidecomputer/crucible/target/debug/deps/libitertools-09aeacd112427d42.rmeta --extern nu_ansi_term=/work/oxidecomputer/crucible/target/debug/deps/libnu_ansi_term-cdbbf2d007fd1e63.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern strip_ansi_escapes=/work/oxidecomputer/crucible/target/debug/deps/libstrip_ansi_escapes-8090d3de2e6bf9be.rmeta --extern strum=/work/oxidecomputer/crucible/target/debug/deps/libstrum-59ea3c6704348e58.rmeta --extern strum_macros=/work/oxidecomputer/crucible/target/debug/deps/libstrum_macros-bc907f623478289d.so --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern unicode_segmentation=/work/oxidecomputer/crucible/target/debug/deps/libunicode_segmentation-06176721b7b95955.rmeta --extern unicode_width=/work/oxidecomputer/crucible/target/debug/deps/libunicode_width-87c5262a4c4bb0e9.rmeta --cap-lints allow`
979 Compiling nbd v0.2.3
980 Running `rustc --crate-name nbd /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/nbd-0.2.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=6a0c01a24def5e9a -C extra-filename=-6a0c01a24def5e9a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern byteorder=/work/oxidecomputer/crucible/target/debug/deps/libbyteorder-1306999913c8e1b3.rmeta --cap-lints allow`
981 Compiling crucible-integration-tests v0.1.0 (/work/oxidecomputer/crucible/integration_tests)
982 Running `rustc --crate-name crucible_integration_tests --edition=2021 integration_tests/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=521d4724b4b30c4a -C extra-filename=-521d4724b4b30c4a --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps`
983 Running `rustc --crate-name info --edition=2021 smf/examples/info.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 -C metadata=434b2d59a81079b5 -C extra-filename=-434b2d59a81079b5 --out-dir /work/oxidecomputer/crucible/target/debug/examples -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crucible_smf=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_smf-e1a82c6f17385dc6.rlib --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern num_derive=/work/oxidecomputer/crucible/target/debug/deps/libnum_derive-814c8a0a0a713cba.so --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rlib --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rlib`
984 Compiling clearscreen v2.0.1
985 Running `rustc --crate-name clearscreen --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/clearscreen-2.0.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=5e923be7ef236a41 -C extra-filename=-5e923be7ef236a41 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rmeta --extern terminfo=/work/oxidecomputer/crucible/target/debug/deps/libterminfo-cd93b6cd14f79089.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern which=/work/oxidecomputer/crucible/target/debug/deps/libwhich-23fb4550fe083323.rmeta --cap-lints allow`
986 Running `rustc --crate-name crucible_smf --edition=2021 smf/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=6114df38a9482a0c -C extra-filename=-6114df38a9482a0c --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern num_derive=/work/oxidecomputer/crucible/target/debug/deps/libnum_derive-814c8a0a0a713cba.so --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rlib --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rlib`
987 Running `rustc --crate-name crucible_client_types --edition=2021 crucible-client-types/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=5ba9f9d411803900 -C extra-filename=-5ba9f9d411803900 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib`
988 Running `rustc --crate-name ring --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ring-0.16.20/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="dev_urandom_fallback"' --cfg 'feature="once_cell"' -C metadata=76ccf829b8b489e1 -C extra-filename=-76ccf829b8b489e1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern spin=/work/oxidecomputer/crucible/target/debug/deps/libspin-bfb6115ad3135235.rmeta --extern untrusted=/work/oxidecomputer/crucible/target/debug/deps/libuntrusted-4b93784238d33e58.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -l static=ring-core -l static=ring-test`
989 Running `/work/oxidecomputer/crucible/target/debug/build/rustls-0c105edc866f624d/build-script-build`
990 Compiling sct v0.7.0
991 Running `rustc --crate-name sct --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/sct-0.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=78bb43c10db32a31 -C extra-filename=-78bb43c10db32a31 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern ring=/work/oxidecomputer/crucible/target/debug/deps/libring-76ccf829b8b489e1.rmeta --extern untrusted=/work/oxidecomputer/crucible/target/debug/deps/libuntrusted-4b93784238d33e58.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
992 Compiling rustls-webpki v0.101.4
993 Running `rustc --crate-name webpki --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustls-webpki-0.101.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=34d764dbf1af1e62 -C extra-filename=-34d764dbf1af1e62 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern ring=/work/oxidecomputer/crucible/target/debug/deps/libring-76ccf829b8b489e1.rmeta --extern untrusted=/work/oxidecomputer/crucible/target/debug/deps/libuntrusted-4b93784238d33e58.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
994 Running `rustc --crate-name rustls --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustls-0.21.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="dangerous_configuration"' --cfg 'feature="default"' --cfg 'feature="log"' --cfg 'feature="logging"' --cfg 'feature="tls12"' -C metadata=3df6867cfa5c4a0a -C extra-filename=-3df6867cfa5c4a0a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern ring=/work/oxidecomputer/crucible/target/debug/deps/libring-76ccf829b8b489e1.rmeta --extern webpki=/work/oxidecomputer/crucible/target/debug/deps/libwebpki-34d764dbf1af1e62.rmeta --extern sct=/work/oxidecomputer/crucible/target/debug/deps/libsct-78bb43c10db32a31.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
995 Running `rustc --crate-name libgit2_sys --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libgit2-sys-0.15.2+1.6.4/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=8cfb4e998561bba5 -C extra-filename=-8cfb4e998561bba5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern libz_sys=/work/oxidecomputer/crucible/target/debug/deps/liblibz_sys-a3111f279c2174e3.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -l static=git2 -L native=/usr/lib/amd64 --cfg libgit2_vendored`
996 Compiling git2 v0.17.2
997 Running `rustc --crate-name git2 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/git2-0.17.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=c3276e37b1dd24e5 -C extra-filename=-c3276e37b1dd24e5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern libgit2_sys=/work/oxidecomputer/crucible/target/debug/deps/liblibgit2_sys-8cfb4e998561bba5.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern url=/work/oxidecomputer/crucible/target/debug/deps/liburl-ff56943ab9066fdc.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/usr/lib/amd64`
998 Compiling tokio-rustls v0.24.1
999 Running `rustc --crate-name tokio_rustls --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-rustls-0.24.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="logging"' --cfg 'feature="tls12"' -C metadata=eafe4ab74a176b7d -C extra-filename=-eafe4ab74a176b7d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rustls=/work/oxidecomputer/crucible/target/debug/deps/librustls-3df6867cfa5c4a0a.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
1000 Compiling hyper-rustls v0.24.0
1001 Running `rustc --crate-name hyper_rustls --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hyper-rustls-0.24.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=9a83fdcd4675665e -C extra-filename=-9a83fdcd4675665e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern rustls=/work/oxidecomputer/crucible/target/debug/deps/librustls-3df6867cfa5c4a0a.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
1002 Running `rustc --crate-name dropshot --edition=2018 /home/build/.cargo/git/checkouts/dropshot-a4a923d29dccc492/aca6de3/dropshot/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="usdt"' --cfg 'feature="usdt-probes"' -C metadata=a49a4505c9c6b86f -C extra-filename=-a49a4505c9c6b86f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern async_stream=/work/oxidecomputer/crucible/target/debug/deps/libasync_stream-0486f21173e73f9c.rmeta --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern camino=/work/oxidecomputer/crucible/target/debug/deps/libcamino-45f0f4a2c258f934.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern debug_ignore=/work/oxidecomputer/crucible/target/debug/deps/libdebug_ignore-2303f500fcbc7093.rmeta --extern dropshot_endpoint=/work/oxidecomputer/crucible/target/debug/deps/libdropshot_endpoint-1ff3a3dd0352c250.so --extern form_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libform_urlencoded-ef731295a29c9150.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern hostname=/work/oxidecomputer/crucible/target/debug/deps/libhostname-4a0f8b1a56e5681a.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rmeta --extern paste=/work/oxidecomputer/crucible/target/debug/deps/libpaste-251489637fc3d2bc.so --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern rustls=/work/oxidecomputer/crucible/target/debug/deps/librustls-3df6867cfa5c4a0a.rmeta --extern rustls_pemfile=/work/oxidecomputer/crucible/target/debug/deps/librustls_pemfile-e52b2a6debfcae48.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern serde_path_to_error=/work/oxidecomputer/crucible/target/debug/deps/libserde_path_to_error-f8fffad8b554a310.rmeta --extern serde_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libserde_urlencoded-779d3ac41d95557a.rmeta --extern sha1=/work/oxidecomputer/crucible/target/debug/deps/libsha1-8fdbd7a715f3bef1.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rmeta --extern slog_bunyan=/work/oxidecomputer/crucible/target/debug/deps/libslog_bunyan-dce051a6775f1d99.rmeta --extern slog_json=/work/oxidecomputer/crucible/target/debug/deps/libslog_json-d8408f8f3a6dd5b7.rmeta --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rmeta --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rmeta --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --extern waitgroup=/work/oxidecomputer/crucible/target/debug/deps/libwaitgroup-db859ead02bd709e.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
1003 Compiling reqwest v0.11.20
1004 Running `rustc --crate-name reqwest --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/reqwest-0.11.20/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="__rustls"' --cfg 'feature="__tls"' --cfg 'feature="blocking"' --cfg 'feature="default"' --cfg 'feature="default-tls"' --cfg 'feature="hyper-rustls"' --cfg 'feature="hyper-tls"' --cfg 'feature="json"' --cfg 'feature="native-tls-crate"' --cfg 'feature="rustls"' --cfg 'feature="rustls-pemfile"' --cfg 'feature="rustls-tls"' --cfg 'feature="rustls-tls-webpki-roots"' --cfg 'feature="serde_json"' --cfg 'feature="stream"' --cfg 'feature="tokio-native-tls"' --cfg 'feature="tokio-rustls"' --cfg 'feature="tokio-util"' --cfg 'feature="wasm-streams"' --cfg 'feature="webpki-roots"' -C metadata=6407fc4e9374ca8e -C extra-filename=-6407fc4e9374ca8e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern encoding_rs=/work/oxidecomputer/crucible/target/debug/deps/libencoding_rs-3255048793b3f7a6.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern h2=/work/oxidecomputer/crucible/target/debug/deps/libh2-3e2d8390f23dd48a.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern http_body=/work/oxidecomputer/crucible/target/debug/deps/libhttp_body-bb1d69dd918c127f.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern hyper_rustls=/work/oxidecomputer/crucible/target/debug/deps/libhyper_rustls-9a83fdcd4675665e.rmeta --extern hyper_tls=/work/oxidecomputer/crucible/target/debug/deps/libhyper_tls-2dbf57f91f681e2c.rmeta --extern ipnet=/work/oxidecomputer/crucible/target/debug/deps/libipnet-8b250db103a32779.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern mime=/work/oxidecomputer/crucible/target/debug/deps/libmime-ac14a9115eddd3c2.rmeta --extern native_tls_crate=/work/oxidecomputer/crucible/target/debug/deps/libnative_tls-320c05ab5bbd33c9.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern rustls=/work/oxidecomputer/crucible/target/debug/deps/librustls-3df6867cfa5c4a0a.rmeta --extern rustls_pemfile=/work/oxidecomputer/crucible/target/debug/deps/librustls_pemfile-e52b2a6debfcae48.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern serde_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libserde_urlencoded-779d3ac41d95557a.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_native_tls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_native_tls-f56aba82a642e205.rmeta --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rmeta --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rmeta --extern tower_service=/work/oxidecomputer/crucible/target/debug/deps/libtower_service-51da71f2ad5117ee.rmeta --extern url=/work/oxidecomputer/crucible/target/debug/deps/liburl-ff56943ab9066fdc.rmeta --extern webpki_roots=/work/oxidecomputer/crucible/target/debug/deps/libwebpki_roots-31272bd9a7615638.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1005 Compiling progenitor-client v0.3.0 (https://github.com/oxidecomputer/progenitor?branch=main#76716eea)
1006 Compiling omicron-zone-package v0.9.1
1007 Running `rustc --crate-name progenitor_client --edition=2021 /home/build/.cargo/git/checkouts/progenitor-639bd64206ac5e43/76716ee/progenitor-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=a49921826ff8ec2a -C extra-filename=-a49921826ff8ec2a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern serde_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libserde_urlencoded-779d3ac41d95557a.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1008 Running `rustc --crate-name omicron_zone_package --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/omicron-zone-package-0.9.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=8423a7bf8bd88040 -C extra-filename=-8423a7bf8bd88040 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern filetime=/work/oxidecomputer/crucible/target/debug/deps/libfiletime-337368c6d4c995d8.rmeta --extern flate2=/work/oxidecomputer/crucible/target/debug/deps/libflate2-e91a1b496d4e6ad4.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern ring=/work/oxidecomputer/crucible/target/debug/deps/libring-76ccf829b8b489e1.rmeta --extern semver=/work/oxidecomputer/crucible/target/debug/deps/libsemver-8c1c5827befd93e7.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_derive=/work/oxidecomputer/crucible/target/debug/deps/libserde_derive-3441a1f9756a6d5b.so --extern tar=/work/oxidecomputer/crucible/target/debug/deps/libtar-b33bc6012d78be3d.rmeta --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rmeta --extern walkdir=/work/oxidecomputer/crucible/target/debug/deps/libwalkdir-5232f739d2ba1b5e.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1009 Compiling progenitor v0.3.0 (https://github.com/oxidecomputer/progenitor?branch=main#76716eea)
1010 Running `rustc --crate-name progenitor --edition=2021 /home/build/.cargo/git/checkouts/progenitor-639bd64206ac5e43/76716ee/progenitor/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0319e0dfd841f493 -C extra-filename=-0319e0dfd841f493 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern progenitor_client=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor_client-a49921826ff8ec2a.rmeta --extern progenitor_impl=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor_impl-4eeb9d4c0f08c454.rmeta --extern progenitor_macro=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor_macro-4c13a8353939c841.so --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1011 Compiling repair-client v0.0.1 (/work/oxidecomputer/crucible/repair-client)
1012 Running `rustc --crate-name repair_client --edition=2021 repair-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=1452d56087b6ccb7 -C extra-filename=-1452d56087b6ccb7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1013 Compiling crucible-pantry-client v0.0.1 (/work/oxidecomputer/crucible/pantry-client)
1014 Running `rustc --crate-name crucible_pantry_client --edition=2021 pantry-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=ccb9ddeebb23cea2 -C extra-filename=-ccb9ddeebb23cea2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1015 Compiling dsc-client v0.0.1 (/work/oxidecomputer/crucible/dsc-client)
1016 Running `rustc --crate-name dsc_client --edition=2021 dsc-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=15b0c81fa833cf0f -C extra-filename=-15b0c81fa833cf0f --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1017 Running `rustc --crate-name vergen --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/vergen-8.2.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no --cfg 'feature="cargo"' --cfg 'feature="default"' --cfg 'feature="git"' --cfg 'feature="git2"' --cfg 'feature="git2-rs"' --cfg 'feature="rustc"' --cfg 'feature="rustc_version"' --cfg 'feature="time"' -C metadata=e75c33287bd2547c -C extra-filename=-e75c33287bd2547c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern git2_rs=/work/oxidecomputer/crucible/target/debug/deps/libgit2-c3276e37b1dd24e5.rmeta --extern rustc_version=/work/oxidecomputer/crucible/target/debug/deps/librustc_version-201ef6100eba532b.rmeta --extern time=/work/oxidecomputer/crucible/target/debug/deps/libtime-9b604407a0d52f86.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/usr/lib/amd64 --cfg stable --cfg msrv`
1018 Compiling omicron-common v0.1.0 (https://github.com/oxidecomputer/omicron?branch=main#4c05962d)
1019 Running `rustc --crate-name omicron_common --edition=2021 /home/build/.cargo/git/checkouts/omicron-d039c41f152bda83/4c05962/common/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=52df1fff8b36d94c -C extra-filename=-52df1fff8b36d94c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern api_identity=/work/oxidecomputer/crucible/target/debug/deps/libapi_identity-90d45ecc06c8f773.so --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern backoff=/work/oxidecomputer/crucible/target/debug/deps/libbackoff-2bc4a2fd075cf434.rmeta --extern camino=/work/oxidecomputer/crucible/target/debug/deps/libcamino-45f0f4a2c258f934.rmeta --extern camino_tempfile=/work/oxidecomputer/crucible/target/debug/deps/libcamino_tempfile-38b6a8d85c9dc0c0.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern ipnetwork=/work/oxidecomputer/crucible/target/debug/deps/libipnetwork-0e9e550a49db2c52.rmeta --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --extern macaddr=/work/oxidecomputer/crucible/target/debug/deps/libmacaddr-98e89df75c36be48.rmeta --extern parse_display=/work/oxidecomputer/crucible/target/debug/deps/libparse_display-34a1a5d52375b70b.rmeta --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern ring=/work/oxidecomputer/crucible/target/debug/deps/libring-76ccf829b8b489e1.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern semver=/work/oxidecomputer/crucible/target/debug/deps/libsemver-8c1c5827befd93e7.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_derive=/work/oxidecomputer/crucible/target/debug/deps/libserde_derive-3441a1f9756a6d5b.so --extern serde_human_bytes=/work/oxidecomputer/crucible/target/debug/deps/libserde_human_bytes-7f54d0fcbf9b36f3.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern serde_with=/work/oxidecomputer/crucible/target/debug/deps/libserde_with-4f9ddd30b380d6cf.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern steno=/work/oxidecomputer/crucible/target/debug/deps/libsteno-d1d3ce30296926ad.rmeta --extern strum=/work/oxidecomputer/crucible/target/debug/deps/libstrum-59ea3c6704348e58.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_postgres=/work/oxidecomputer/crucible/target/debug/deps/libtokio_postgres-5628b93feb58339b.rmeta --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1020 Compiling crucible-control-client v0.0.1 (/work/oxidecomputer/crucible/control-client)
1021 Running `rustc --crate-name crucible_control_client --edition=2021 control-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=3d0142c7d3790e17 -C extra-filename=-3d0142c7d3790e17 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1022 Compiling crucible-agent-client v0.0.1 (/work/oxidecomputer/crucible/agent-client)
1023 Running `rustc --crate-name crucible_agent_client --edition=2021 agent-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=86e1c18945d61be3 -C extra-filename=-86e1c18945d61be3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1024 Compiling crucible-common v0.0.1 (/work/oxidecomputer/crucible/common)
1025 Running `rustc --crate-name build_script_build --edition=2021 common/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=e7c30bee09b7ef3b -C extra-filename=-e7c30bee09b7ef3b --out-dir /work/oxidecomputer/crucible/target/debug/build/crucible-common-e7c30bee09b7ef3b -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern vergen=/work/oxidecomputer/crucible/target/debug/deps/libvergen-e75c33287bd2547c.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/usr/lib/amd64`
1026 Running `/work/oxidecomputer/crucible/target/debug/build/crucible-common-e7c30bee09b7ef3b/build-script-build`
1027 Running `rustc --crate-name crucible_common --edition=2021 common/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=c45901e9152d33f4 -C extra-filename=-c45901e9152d33f4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern atty=/work/oxidecomputer/crucible/target/debug/deps/libatty-bfb6a2cdc762f7c4.rmeta --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rmeta --extern rusqlite=/work/oxidecomputer/crucible/target/debug/deps/librusqlite-e4d2316a88b06837.rmeta --extern rustls_pemfile=/work/oxidecomputer/crucible/target/debug/deps/librustls_pemfile-e52b2a6debfcae48.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rmeta --extern slog_bunyan=/work/oxidecomputer/crucible/target/debug/deps/libslog_bunyan-dce051a6775f1d99.rmeta --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rmeta --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rmeta --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rmeta --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rmeta --extern twox_hash=/work/oxidecomputer/crucible/target/debug/deps/libtwox_hash-9f5dd4f7319ca539.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
1028 Running `rustc --crate-name crucible_common --edition=2021 common/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=9a32809bdbdf85c4 -C extra-filename=-9a32809bdbdf85c4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern atty=/work/oxidecomputer/crucible/target/debug/deps/libatty-bfb6a2cdc762f7c4.rlib --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rlib --extern rusqlite=/work/oxidecomputer/crucible/target/debug/deps/librusqlite-e4d2316a88b06837.rlib --extern rustls_pemfile=/work/oxidecomputer/crucible/target/debug/deps/librustls_pemfile-e52b2a6debfcae48.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_bunyan=/work/oxidecomputer/crucible/target/debug/deps/libslog_bunyan-dce051a6775f1d99.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rlib --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern twox_hash=/work/oxidecomputer/crucible/target/debug/deps/libtwox_hash-9f5dd4f7319ca539.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
1029 Compiling crucible-protocol v0.0.0 (/work/oxidecomputer/crucible/protocol)
1030 Running `rustc --crate-name crucible_protocol --edition=2021 protocol/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0682e169c907a102 -C extra-filename=-0682e169c907a102 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rmeta --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern num_enum=/work/oxidecomputer/crucible/target/debug/deps/libnum_enum-9cd7a6d9dcf1dd5a.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
1031 Running `rustc --crate-name crucible_protocol --edition=2021 protocol/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=c776b78ce4b42bf6 -C extra-filename=-c776b78ce4b42bf6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern num_enum=/work/oxidecomputer/crucible/target/debug/deps/libnum_enum-9cd7a6d9dcf1dd5a.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
1032 Compiling dsc v0.1.0 (/work/oxidecomputer/crucible/dsc)
1033 Running `rustc --crate-name dsc_client --edition=2021 dsc-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=f8a5b497695371e1 -C extra-filename=-f8a5b497695371e1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1034 Running `rustc --crate-name repair_client --edition=2021 repair-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=5353c8de97b4615f -C extra-filename=-5353c8de97b4615f --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1035 Running `rustc --crate-name dsc --edition=2021 dsc/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=2b80980cbc3bac2c -C extra-filename=-2b80980cbc3bac2c --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern byte_unit=/work/oxidecomputer/crucible/target/debug/deps/libbyte_unit-02cb17c857e20dac.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern csv=/work/oxidecomputer/crucible/target/debug/deps/libcsv-187f0e890389cec3.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern dsc_client=/work/oxidecomputer/crucible/target/debug/deps/libdsc_client-15b0c81fa833cf0f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1036 Running `rustc --crate-name crucible_agent_client --edition=2021 agent-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=6f3900e8033b57ec -C extra-filename=-6f3900e8033b57ec --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1037 Compiling crucible-package v0.1.0 (/work/oxidecomputer/crucible/package)
1038 Running `rustc --crate-name crucible_package --edition=2021 package/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=e8ff0170d25e0da5 -C extra-filename=-e8ff0170d25e0da5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern omicron_zone_package=/work/oxidecomputer/crucible/target/debug/deps/libomicron_zone_package-8423a7bf8bd88040.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1039 Compiling oximeter v0.1.0 (https://github.com/oxidecomputer/omicron?branch=main#4c05962d)
1040 Running `rustc --crate-name oximeter --edition=2021 /home/build/.cargo/git/checkouts/omicron-d039c41f152bda83/4c05962/oximeter/oximeter/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=90ae047d6b643e4e -C extra-filename=-90ae047d6b643e4e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rmeta --extern oximeter_macro_impl=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_macro_impl-e4cc949eda20c416.so --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1041 Compiling nexus-client v0.1.0 (https://github.com/oxidecomputer/omicron?branch=main#4c05962d)
1042 Running `rustc --crate-name nexus_client --edition=2021 /home/build/.cargo/git/checkouts/omicron-d039c41f152bda83/4c05962/nexus-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=fd6034b144d15fe8 -C extra-filename=-fd6034b144d15fe8 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern ipnetwork=/work/oxidecomputer/crucible/target/debug/deps/libipnetwork-0e9e550a49db2c52.rmeta --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rmeta --extern omicron_passwords=/work/oxidecomputer/crucible/target/debug/deps/libomicron_passwords-ac6e3a602e6ad041.rmeta --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rmeta --extern regress=/work/oxidecomputer/crucible/target/debug/deps/libregress-10da65958da1c830.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1043 Running `rustc --crate-name crucible_control_client --edition=2021 control-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=d0a58354872d46d9 -C extra-filename=-d0a58354872d46d9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1044 Running `rustc --crate-name crucible_pantry_client --edition=2021 pantry-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=8a27f01eb086219e -C extra-filename=-8a27f01eb086219e --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1045 Running `rustc --crate-name crucible_protocol --edition=2021 protocol/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=d81e6562be2ffe77 -C extra-filename=-d81e6562be2ffe77 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern num_enum=/work/oxidecomputer/crucible/target/debug/deps/libnum_enum-9cd7a6d9dcf1dd5a.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
1046 Compiling crucible-agent v0.0.1 (/work/oxidecomputer/crucible/agent)
1047 Running `rustc --crate-name crucible_agent --edition=2021 agent/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=acbf118b39db417b -C extra-filename=-acbf118b39db417b --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_smf=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_smf-e1a82c6f17385dc6.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rlib --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern subprocess=/work/oxidecomputer/crucible/target/debug/deps/libsubprocess-0acfc5c9b903588a.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1048 Compiling oximeter-producer v0.1.0 (https://github.com/oxidecomputer/omicron?branch=main#4c05962d)
1049 Running `rustc --crate-name oximeter_producer --edition=2021 /home/build/.cargo/git/checkouts/omicron-d039c41f152bda83/4c05962/oximeter/producer/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=5dc4f732e258486e -C extra-filename=-5dc4f732e258486e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rmeta --extern nexus_client=/work/oxidecomputer/crucible/target/debug/deps/libnexus_client-fd6034b144d15fe8.rmeta --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rmeta --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1050 Running `rustc --crate-name crucible --edition=2021 upstairs/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=41ca439abdc23695 -C extra-filename=-41ca439abdc23695 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern aes_gcm_siv=/work/oxidecomputer/crucible/target/debug/deps/libaes_gcm_siv-21495b616a07c9a4.rmeta --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern async_recursion=/work/oxidecomputer/crucible/target/debug/deps/libasync_recursion-ce9499495a1cb858.so --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern crucible_client_types=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_client_types-bd54c4335d2370bd.rmeta --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rmeta --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rmeta --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern itertools=/work/oxidecomputer/crucible/target/debug/deps/libitertools-b06e69badd72e55c.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rmeta --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rmeta --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rmeta --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rmeta --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rmeta --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rmeta --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rmeta --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rmeta --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1051 Running `rustc --crate-name crucible --edition=2021 upstairs/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=53d074fabbf363e8 -C extra-filename=-53d074fabbf363e8 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern aes_gcm_siv=/work/oxidecomputer/crucible/target/debug/deps/libaes_gcm_siv-21495b616a07c9a4.rlib --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern async_recursion=/work/oxidecomputer/crucible/target/debug/deps/libasync_recursion-ce9499495a1cb858.so --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern crucible_client_types=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_client_types-bd54c4335d2370bd.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern itertools=/work/oxidecomputer/crucible/target/debug/deps/libitertools-b06e69badd72e55c.rlib --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rlib --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rlib --extern proptest=/work/oxidecomputer/crucible/target/debug/deps/libproptest-327f7f2cf6858f27.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern test_strategy=/work/oxidecomputer/crucible/target/debug/deps/libtest_strategy-5eb6b90d55d9f739.so --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rlib --extern tokio_test=/work/oxidecomputer/crucible/target/debug/deps/libtokio_test-12a28be646ff63e6.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rlib --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1052 Compiling crucible-pantry v0.0.1 (/work/oxidecomputer/crucible/pantry)
1053 Running `rustc --crate-name crucible_pantry --edition=2021 pantry/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=fe633af5059fe3a7 -C extra-filename=-fe633af5059fe3a7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rmeta --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rmeta --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rmeta --extern crucible_smf=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_smf-e1a82c6f17385dc6.rmeta --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1054 Running `rustc --crate-name crucible_downstairs --edition=2021 downstairs/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=6276be71be5284a4 -C extra-filename=-6276be71be5284a4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rmeta --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rmeta --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rmeta --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rmeta --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern hyper_staticfile=/work/oxidecomputer/crucible/target/debug/deps/libhyper_staticfile-559b4389ef952563.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern mime_guess=/work/oxidecomputer/crucible/target/debug/deps/libmime_guess-66974d6c31968dc2.rmeta --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rmeta --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rmeta --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-5524fd7817ad57fb.rmeta --extern opentelemetry_jaeger=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_jaeger-9ebf47742e5e063f.rmeta --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rmeta --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern repair_client=/work/oxidecomputer/crucible/target/debug/deps/librepair_client-1452d56087b6ccb7.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rmeta --extern rusqlite=/work/oxidecomputer/crucible/target/debug/deps/librusqlite-e4d2316a88b06837.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rmeta --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rmeta --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rmeta --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rmeta --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rmeta --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rmeta --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rmeta --extern tracing_opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libtracing_opentelemetry-8f8cfc1900c3a663.rmeta --extern tracing_subscriber=/work/oxidecomputer/crucible/target/debug/deps/libtracing_subscriber-e39dae5ba339bc78.rmeta --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1055 Compiling crucible-hammer v0.1.0 (/work/oxidecomputer/crucible/hammer)
1056 Compiling cmon v0.1.0 (/work/oxidecomputer/crucible/cmon)
1057 Compiling measure-iops v0.0.1 (/work/oxidecomputer/crucible/measure_iops)
1058 Running `rustc --crate-name crucible_pantry --edition=2021 pantry/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=b51bc30f7a0cbfa5 -C extra-filename=-b51bc30f7a0cbfa5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_pantry=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_pantry-fe633af5059fe3a7.rlib --extern crucible_smf=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_smf-e1a82c6f17385dc6.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rlib --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern subprocess=/work/oxidecomputer/crucible/target/debug/deps/libsubprocess-0acfc5c9b903588a.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1059 Running `rustc --crate-name crucible_hammer --edition=2021 hammer/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=9622fb9be260fb45 -C extra-filename=-9622fb9be260fb45 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-5524fd7817ad57fb.rlib --extern opentelemetry_jaeger=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_jaeger-9ebf47742e5e063f.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tracing_opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libtracing_opentelemetry-8f8cfc1900c3a663.rlib --extern tracing_subscriber=/work/oxidecomputer/crucible/target/debug/deps/libtracing_subscriber-e39dae5ba339bc78.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1060 Running `rustc --crate-name measure_iops --edition=2021 measure_iops/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=cbdca99bf515defe -C extra-filename=-cbdca99bf515defe --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1061 Running `rustc --crate-name crucible_downstairs --edition=2021 downstairs/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test --cfg 'feature="default"' -C metadata=dce67baac661a5f4 -C extra-filename=-dce67baac661a5f4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rlib --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rlib --extern hyper_staticfile=/work/oxidecomputer/crucible/target/debug/deps/libhyper_staticfile-559b4389ef952563.rlib --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern mime_guess=/work/oxidecomputer/crucible/target/debug/deps/libmime_guess-66974d6c31968dc2.rlib --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-5524fd7817ad57fb.rlib --extern opentelemetry_jaeger=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_jaeger-9ebf47742e5e063f.rlib --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rlib --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rlib --extern repair_client=/work/oxidecomputer/crucible/target/debug/deps/librepair_client-1452d56087b6ccb7.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern rusqlite=/work/oxidecomputer/crucible/target/debug/deps/librusqlite-e4d2316a88b06837.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rlib --extern tracing_opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libtracing_opentelemetry-8f8cfc1900c3a663.rlib --extern tracing_subscriber=/work/oxidecomputer/crucible/target/debug/deps/libtracing_subscriber-e39dae5ba339bc78.rlib --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1062 Running `rustc --crate-name cmon --edition=2021 cmon/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=2a9909624d24c98d -C extra-filename=-2a9909624d24c98d --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern clearscreen=/work/oxidecomputer/crucible/target/debug/deps/libclearscreen-5e923be7ef236a41.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_control_client=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_control_client-3d0142c7d3790e17.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1063 Running `rustc --crate-name crucible_pantry --edition=2021 pantry/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=8e1bf492bfe90e8c -C extra-filename=-8e1bf492bfe90e8c --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_smf=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_smf-e1a82c6f17385dc6.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rlib --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern subprocess=/work/oxidecomputer/crucible/target/debug/deps/libsubprocess-0acfc5c9b903588a.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1064 Compiling crudd v0.1.0 (/work/oxidecomputer/crucible/crudd)
1065 Running `rustc --crate-name crudd --edition=2021 crudd/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=3e9b00990c25260e -C extra-filename=-3e9b00990c25260e --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern signal_hook=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook-ae206b38743b6815.rlib --extern signal_hook_tokio=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook_tokio-6a6b104c61918fa0.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1066 Compiling crucible-nbd-server v0.1.0 (/work/oxidecomputer/crucible/nbd_server)
1067 Running `rustc --crate-name crucible_nbd_server --edition=2021 nbd_server/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=8c1612631a1669fd -C extra-filename=-8c1612631a1669fd --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern nbd=/work/oxidecomputer/crucible/target/debug/deps/libnbd-6a0c01a24def5e9a.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1068 Compiling crutest v0.1.0 (/work/oxidecomputer/crucible/crutest)
1069 Running `rustc --crate-name crutest --edition=2021 crutest/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=af78e92d646e2d06 -C extra-filename=-af78e92d646e2d06 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crossterm=/work/oxidecomputer/crucible/target/debug/deps/libcrossterm-3c787fd4c4d4bc45.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern csv=/work/oxidecomputer/crucible/target/debug/deps/libcsv-187f0e890389cec3.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern dsc_client=/work/oxidecomputer/crucible/target/debug/deps/libdsc_client-15b0c81fa833cf0f.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern indicatif=/work/oxidecomputer/crucible/target/debug/deps/libindicatif-297a26a70875006e.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rlib --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rlib --extern reedline=/work/oxidecomputer/crucible/target/debug/deps/libreedline-6e6244e0f6aa654d.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern signal_hook=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook-ae206b38743b6815.rlib --extern signal_hook_tokio=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook_tokio-6a6b104c61918fa0.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1070 Running `rustc --crate-name crucible_integration_tests --edition=2021 integration_tests/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=8902d603847d3610 -C extra-filename=-8902d603847d3610 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_client_types=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_client_types-bd54c4335d2370bd.rlib --extern crucible_downstairs=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_downstairs-6276be71be5284a4.rlib --extern crucible_pantry=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_pantry-fe633af5059fe3a7.rlib --extern crucible_pantry_client=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_pantry_client-ccb9ddeebb23cea2.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern httptest=/work/oxidecomputer/crucible/target/debug/deps/libhttptest-174da737d96e2af6.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1071 Running `rustc --crate-name crucible_downstairs --edition=2021 downstairs/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test --cfg 'feature="default"' -C metadata=3ed9735920c1592d -C extra-filename=-3ed9735920c1592d --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_downstairs=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_downstairs-6276be71be5284a4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rlib --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rlib --extern hyper_staticfile=/work/oxidecomputer/crucible/target/debug/deps/libhyper_staticfile-559b4389ef952563.rlib --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern mime_guess=/work/oxidecomputer/crucible/target/debug/deps/libmime_guess-66974d6c31968dc2.rlib --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-5524fd7817ad57fb.rlib --extern opentelemetry_jaeger=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_jaeger-9ebf47742e5e063f.rlib --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rlib --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rlib --extern repair_client=/work/oxidecomputer/crucible/target/debug/deps/librepair_client-1452d56087b6ccb7.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern rusqlite=/work/oxidecomputer/crucible/target/debug/deps/librusqlite-e4d2316a88b06837.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rlib --extern tracing_opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libtracing_opentelemetry-8f8cfc1900c3a663.rlib --extern tracing_subscriber=/work/oxidecomputer/crucible/target/debug/deps/libtracing_subscriber-e39dae5ba339bc78.rlib --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
1072 Finished test [unoptimized + debuginfo] target(s) in 5m 36s
1073 Running `/work/oxidecomputer/crucible/target/debug/deps/cmon-2a9909624d24c98d --nocapture`
1074 
1075 running 0 tests
1076 
1077 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
1078 
1079 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible-53d074fabbf363e8 --nocapture`
1080 
1081 running 351 tests
1082 test block_req::test::test_blockreq_and_blockreqwaiter_err ... ok
1083 test block_req::test::test_blockreqwaiter_send ... ok
1084 test block_req::test::test_blockreq_and_blockreqwaiter ... ok
1085 {{{"msg""msg"::""Upstairs startsUpstairs starts""{,",v""v""msg":"Upstairs starts":,:"v":000,,""name"name:"":,"crucible""name"crucible":"crucible",,""levellevel""::,"level":303030"msg":"Upstairs starts","v":0,"name":"crucible","level":30,",,"time""time":":time"":"2023-09-22T23:14:58.571033243Z"2023-09-22T23:14:58.571099761Z"2023-09-22T23:14:58.570989567Z",",",hostname":""hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,"pid"pidpid""::"47594759:4759,"upstairs":,"1upstairs":1}
1086 }
1087 {{"msg":"Crucible Version: BuildInfo {\n version: \"0.0.1\",\n git_sha: \"ed48f294784d46ea7d4bb99336918b74358eca46\",\n git_commit_timestamp: \"2023-09-22T22:51:18.000000000Z\",\n git_branch: \"main\",\n rustc_semver: \"1.70.0\",\n rustc_channel: \"stable\",\n rustc_host_triple: \"x86_64-unknown-illumos\",\n rustc_commit_sha: \"90c541806f23a127002de5b4038be731ba1458ca\",\n cargo_triple: "\"msg":"x86_64-unknown-illumos\",\nCrucible Version: BuildInfo { debug: true,\n\n version: \" opt_level: 0,0.0.1\n\"},"\n git_sha: \",,""v"ed48f294784d46ea7d4bb99336918b74358eca46\":upstairs0",:,"\nname1":" git_commit_timestamp: }crucible"
1088 ,\""level":2023-09-22T22:51:18.000000000Z\"30,\n{ git_branch: \"main\",\n rustc_semver: \"1.70.0\",\n"msg": rustc_channel: \""stable\",\nCrucible Version: BuildInfo {\n rustc_host_triple: \" version: \"x86_64-unknown-illumos0.0.1\"\",\n,\n git_sha: \" rustc_commit_sha: \"ed48f294784d46ea7d4bb99336918b74358eca46\",\n,"90c541806f23a127002de5b4038be731ba1458ca git_commit_timestamp: time\""\":2023-09-22T22:51:18.000000000Z\"",\n,\n git_branch: 2023-09-22T23:14:58.571766951Z\""main\" cargo_triple: \",,\n"x86_64-unknown-illumos\"hostname rustc_semver: "\":"1.70.0\",,ip-10-150-1-55.us-west-2.compute.internal"\n,"\npid rustc_channel: "\": debug: true,\n4759stable\", opt_level: 0,\n,"\n rustc_host_triple: upstairs}""\":1}x86_64-unknown-illumos
1089 \",,\n"v": rustc_commit_sha: \"0{,"name"90c541806f23a127002de5b4038be731ba1458ca"\":"msg,"\n:"crucible" cargo_triple: \",Upstairs <-> Downstairs Message Version: 4x86_64-unknown-illumos\"",\n,"" debug: true,v\n":level0 opt_level: 0,,\n"}name"""::,""30cruciblev"":,"0level",:"30name":"crucible","level":30,"time":"2023-09-22T23:14:58.571840911Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"time",:""pid,":2023-09-22T23:14:58.571844237Z4759",""time,upstairs"":hostname1"}"
1090 :":"{ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.571839079Z","",msgpid""::""4759hostname":","Crucible stats registered with UUID: 28a12560-c07c-41d3-8d35-7e06b0c337aaupstairs"":1,ip-10-150-1-55.us-west-2.compute.internal"}"
1091 v":,0"{,pid"":"name":msg""crucible":,""4759level"Upstairs <-> Downstairs Message Version: 4:"30,,""vupstairs""::10,}"
1092 ,name"":time"":"crucible","level2023-09-22T23:14:58.571895162Z"":30,"hostname"{:"ip-10-150-1-55.us-west-2.compute.internal",""pidmsg":"":,4759"time",:""Upstairs <-> Downstairs Message Version: 4"upstairs2023-09-22T23:14:58.571911239Z"",",:"1v"hostname}"
1093 ::"0{,"name"ip-10-150-1-55.us-west-2.compute.internal""msg,"":pid""::"4759crucible",,"Crucible 28a12560-c07c-41d3-8d35-7e06b0c337aa has session id: 85a5640b-a5e5-4a5f-a0f6-5c5b8b866c68"upstairs",:"1v"}:
1094 0","{name":"level"cruciblemsg"":",""level"::Crucible stats registered with UUID: 04482c5c-f564-49f3-897c-51c4de58239a30"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.571966155Z","hostname":,"","time"ip-10-150-1-55.us-west-2.compute.internal:"","time"pid2023-09-22T23:14:58.571971701Z"":,4759":"hostname,"":"upstairs"2023-09-22T23:14:58.571967359Z":ip-10-150-1-55.us-west-2.compute.internal"1,,""}hostname
1095 pid"":":4759,"ip-10-150-1-55.us-west-2.compute.internal"upstairs,""pid"::47591}
1096 ,"upstairs":{1}
1097 "msg":"{Crucible 04482c5c-f564-49f3-897c-51c4de58239a has session id: a9998b98-45c5-45f8-8be8-f4cf49aa5402"",msg"":v"":0,"name":"crucible",Crucible stats registered with UUID: 85682a66-27fd-403c-bf22-d04b4ff66805""level":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.572032802Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"time,""pid":":47592023-09-22T23:14:58.572038155Z",",upstairs":"1hostname"}:"
1098 ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
10992023-09-22T23:14:58.572ZINFOcrucible: Crucible 85682a66-27fd-403c-bf22-d04b4ff66805 has session id: 498b136b-9c8b-4715-a815-2b4d0646e0b2 upstairs = 1
1100 ,"time":"2023-09-22T23:14:58.570990413Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
11012023-09-22T23:14:58.572ZINFOcrucible: Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, } upstairs = 1
11022023-09-22T23:14:58.572ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4 upstairs = 1
11032023-09-22T23:14:58.572ZINFOcrucible: Crucible stats registered with UUID: 5c1ab357-2b46-4e37-877a-5089e53cee0e upstairs = 1
11042023-09-22T23:14:58.572ZINFOcrucible: Crucible 5c1ab357-2b46-4e37-877a-5089e53cee0e has session id: 7f7beff6-5728-4471-a291-79266d94fadb upstairs = 1
11052023-09-22T23:14:58.572ZINFOcrucible: [0] connecting to 127.0.0.1:58766 looper = 0 upstairs = 1
1106 {"msg":"[0] connecting to 127.0.0.1:39040","v":0,"name":"crucible","level":30,"{time":"2023-09-22T23:14:58.57260439Z",""hostname":"msg":"ip-10-150-1-55.us-west-2.compute.internal","[0] connecting to 127.0.0.1:36984"pid":4759,"v":,"0looper":","0name":""crucible",","upstairs"level"::130}
11072023-09-22T23:14:58.572ZINFOcrucible: [1] connecting to 127.0.0.1:62630 looper = 0 upstairs = 1
1108 ,"time":"2023-09-22T23:14:58.572670529Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"looper":"1","upstairs":1}
11092023-09-22T23:14:58.572ZINFOcrucible: [2] connecting to 127.0.0.1:41929 looper = 2 upstairs = 1
11102023-09-22T23:14:58.572ZINFOcrucible: up_listen starts task = up_listen upstairs = 1
11112023-09-22T23:14:58.572ZINFOcrucible: Wait for all three downstairs to come online upstairs = 1
11122023-09-22T23:14:58.573ZINFOcrucible: Flush timeout: 86400 upstairs = 1
11132023-09-22T23:14:58.573ZINFOcrucible: [0] connecting to 127.0.0.1:36921 looper = 0 upstairs = 1
11142023-09-22T23:14:58.573ZINFOcrucible: [1] connecting to 127.0.0.1:37306 looper = 1 upstairs = 1
11152023-09-22T23:14:58.573ZINFOcrucible: [1] connecting to 127.0.0.1:47390 looper = 1 upstairs = 1
11162023-09-22T23:14:58.573ZINFOcrucible: [2] connecting to 127.0.0.1:35000 looper = 2 upstairs = 1
1117 {"msg":"up_listen starts","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.573313456Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1{,"task":"up_listen"}
1118 {"msg":"Wait for all three downstairs to come online","v":0,"name":"crucible","level":30"msg":"[1] connecting to 127.0.0.1:46704",","time"v":":0,2023-09-22T23:14:58.573413634Z""name":","crucible"hostname":","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1119 {,"time":""msg":"2023-09-22T23:14:58.573447929Z"Flush timeout: 86400",","hostnamev":":"0,"name":"ip-10-150-1-55.us-west-2.compute.internal"crucible",","pid"level"::475930,"looper":"1","upstairs":1}
1120 {,"time"":msg":""[2] connecting to 127.0.0.1:611412023-09-22T23:14:58.573484133Z"",","v"hostname"::"0,"nameip-10-150-1-55.us-west-2.compute.internal"":,""pid":crucible4759",,""upstairs":level1":}30
1121 ,"time":"2023-09-22T23:14:58.573526064Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"looper":"2","upstairs":1}
11222023-09-22T23:14:58.573ZINFOcrucible: up_listen starts task = up_listen upstairs = 1
11232023-09-22T23:14:58.573ZINFOcrucible: Wait for all three downstairs to come online upstairs = 1
11242023-09-22T23:14:58.573ZINFOcrucible: Flush timeout: 86400 upstairs = 1
1125 {"msg":"[1] 28a12560-c07c-41d3-8d35-7e06b0c337aa looper connected","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.573697367Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759{,"looper":"1","upstairs":1"}
1126 msg":"{[0] 5c1ab357-2b46-4e37-877a-5089e53cee0e looper connected"","msg"v"::"0,"name":"[1] Proc runs for 127.0.0.1:46704 in state New"crucible",",v"":level0",:"30name":"crucible","level":30,"time":"2023-09-22T23:14:58.573750561Z","hostname,":""time":"ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:14:58.573747831Z"",","pid"hostname"::"4759,"ip-10-150-1-55.us-west-2.compute.internal"upstairs,":"1pid":}
1127 4759,"looper":"0","upstairs":1}
1128 {"msg":"[0] Proc runs for 127.0.0.1:58766 in state New","v":{0,"name":"crucible"","msg":level":"30{[2] connecting to 127.0.0.1:55683","v":0,""name":"msg"crucible":,""level":30[0] 85682a66-27fd-403c-bf22-d04b4ff66805 looper connected","v":0,,""name":time":""crucible"2023-09-22T23:14:58.573822503Z,""level",:"30,hostname"":time"":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.573832561Z","pid,"":hostname4759":","upstairs":1ip-10-150-1-55.us-west-2.compute.internal"},
1129 ","pid"time"::"47592023-09-22T23:14:58.573848094Z,""looper",":"hostname2"":,""upstairs":1ip-10-150-1-55.us-west-2.compute.internal"},"
1130 pid":4759,"{looper":"0",""upstairsmsg""::"1up_listen starts"},
1131 "v":0,"name":"crucible"{,"level":30"msg":"[0] Proc runs for 127.0.0.1:39040 in state New","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.573916202Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":,"1time":","task":2023-09-22T23:14:58.573925761Z"","up_listen"hostname}"
1132 :"ip-10-150-1-55.us-west-2.compute.internal","{pid":4759","msg"upstairs:"":1}
1133 Wait for all three downstairs to come online","v":0,"{name":"crucible",""levelmsg"":":30[1] 85682a66-27fd-403c-bf22-d04b4ff66805 looper connected","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.573977041Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":,"1time":"}
1134 2023-09-22T23:14:58.573985105Z","hostname":"{{ip-10-150-1-55.us-west-2.compute.internal"","msg"pid":"msg":"4759:"Flush timeout: 86400",","looperv[1] 5c1ab357-2b46-4e37-877a-5089e53cee0e looper connected""":":1,"","vupstairs0"":,"0:,"name1"name"::"}"cruciblecrucible"","
1135 ,level"":level":3030,"time":"2023-09-22T23:14:58.574056303Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",","timepid":"4759:","upstairs":2023-09-22T23:14:58.574056445Z"1},
1136 "hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid"":msg"4759:","looper":"1"[0] 04482c5c-f564-49f3-897c-51c4de58239a looper connected",","upstairs":v"1:0},
1137 "name":"crucible","level":30,"time":"2023-09-22T23:14:58.574115845Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"looper":"0","upstairs":1}
11382023-09-22T23:14:58.574ZINFOcrucible: [0] Proc runs for 127.0.0.1:36921 in state New upstairs = 1
11392023-09-22T23:14:58.574ZINFOcrucible: [1] 04482c5c-f564-49f3-897c-51c4de58239a looper connected looper = 1 upstairs = 1
11402023-09-22T23:14:58.574ZINFOcrucible: [1] Proc runs for 127.0.0.1:37306 in state New upstairs = 1
11412023-09-22T23:14:58.574ZINFOcrucible: [2] 04482c5c-f564-49f3-897c-51c4de58239a looper connected looper = 2 upstairs = 1
11422023-09-22T23:14:58.574ZINFOcrucible: [2] Proc runs for 127.0.0.1:55683 in state New upstairs = 1
1143 {"msg":"[2] 28a12560-c07c-41d3-8d35-7e06b0c337aa looper connected","v":0,"name":"crucible","level":30{"msg":"[1] Proc runs for 127.0.0.1:47390 in state New","v":,"0time",:""name":"2023-09-22T23:14:58.574424762Z"crucible",,""hostname":"level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"looper":"2","upstairs":1}The guest has requested activation
1144 ,"time":"2023-09-22T23:14:58.5744514Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
11452023-09-22T23:14:58.574ZINFOcrucible: [2] 5c1ab357-2b46-4e37-877a-5089e53cee0e looper connected looper = 2 upstairs = 1
11462023-09-22T23:14:58.574ZINFOcrucible: [2] Proc runs for 127.0.0.1:35000 in state New upstairs = 1
1147 The guest has requested activation
1148 {"msg":"[1] Proc runs for 127.0.0.1:62630 in state New","v":0,"name":"crucible","level":30The guest has requested activation
1149 ,"time":"2023-09-22T23:14:58.5746426Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1150 {"msg":"[2] 85682a66-27fd-403c-bf22-d04b4ff66805 looper connected","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.574689154Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"
1151 {"{msg":""[2] Proc runs for 127.0.0.1:61141 in state New"msg":","v":0,"5c1ab357-2b46-4e37-877a-5089e53cee0e active request set"name":","crucible"v":,0","levelname":"":crucible"30,"level":30,,""time"time"::""2023-09-22T23:14:58.574758151Z2023-09-22T23:14:58.574759835Z"",","hostname":hostname":""ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"","pid"pid"::47594759,,""upstairsupstairs""::11}
1152 }
1153 {"msg":"[0] 28a12560-c07c-41d3-8d35-7e06b0c337aa looper connected","v":0,"name":"crucible","level":30looper":"2","upstairs":1}
1154 ,"time":"{2023-09-22T23:14:58.574833972Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":[2] Proc runs for 127.0.0.1:41929 in state New"4759,"v,"":looper0",The guest has requested activation
1155 ":"name"0":","crucible"upstairs",:"1level":}
1156 30{"msg":"[0] Proc runs for 127.0.0.1:36984 in state New","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.574884193Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,","upstairs"time"::"1}
1157 2023-09-22T23:14:58.574898126Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":"4759msg":","upstairs":185682a66-27fd-403c-bf22-d04b4ff66805 active request set"}
1158 ,"v":0,"name{":"crucible",""msg"level":":3028a12560-c07c-41d3-8d35-7e06b0c337aa active request set","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.57494455Z","hostname",:""time":"ip-10-150-1-55.us-west-2.compute.internal",2023-09-22T23:14:58.574952288Z"","pid"hostname"::"4759,"ip-10-150-1-55.us-west-2.compute.internalupstairs":"1,"}pid
1159 ":4759,"upstairs":1}
11602023-09-22T23:14:58.575ZINFOcrucible: 04482c5c-f564-49f3-897c-51c4de58239a active request set upstairs = 1
1161 {"msg":"negotiate packet HereIAm { version: 4, upstairs_id: 04482c5c-f564-49f3-897c-51c4de58239a, session_id: cc12d06e-bd83-45fe-987f-779d8c6339ec, gen: 1, read_only: false, encrypted: false, alternate_versions: [] }","v":0,"name":"crucible","level":30{"msg":","time":"2023-09-22T23:14:58.575079275Z"negotiate packet HereIAm { version: 4, upstairs_id: 5c1ab357-2b46-4e37-877a-5089e53cee0e, session_id: fcec4285-eaa1-46a4-819c-725bdeb5d523, gen: 1, read_only: false, encrypted: false, alternate_versions: [] }","hostname,"":v"":0,"ip-10-150-1-55.us-west-2.compute.internal"name,"":pid"":crucible"4759,","level":downstairs30":1}
1162 {"msg":","time":"2023-09-22T23:14:58.575112938Z","hostname":"negotiate packet HereIAm { version: 4, upstairs_id: 04482c5c-f564-49f3-897c-51c4de58239a, session_id: cc12d06e-bd83-45fe-987f-779d8c6339ec, gen: 1, read_only: false, encrypted: false, alternate_versions: [] }"ip-10-150-1-55.us-west-2.compute.internal",,""pid"v"::47590,"name,"":"downstairs"crucible":,"1level":}30
1163 {"msg":","time":"2023-09-22T23:14:58.57515318Z","hostname":"negotiate packet HereIAm { version: 4, upstairs_id: 5c1ab357-2b46-4e37-877a-5089e53cee0e, session_id: fcec4285-eaa1-46a4-819c-725bdeb5d523, gen: 1, read_only: false, encrypted: false, alternate_versions: [] }"ip-10-150-1-55.us-west-2.compute.internal",",v""pid"::04759,"name,"":"downstairs":crucible2"},"
1164 level":30,"time":"2023-09-22T23:14:58.575192243Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"downstairs":2}
11652023-09-22T23:14:58.575ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: 5c1ab357-2b46-4e37-877a-5089e53cee0e, session_id: fcec4285-eaa1-46a4-819c-725bdeb5d523, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 3
11662023-09-22T23:14:58.575ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: 04482c5c-f564-49f3-897c-51c4de58239a, session_id: cc12d06e-bd83-45fe-987f-779d8c6339ec, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 3
11672023-09-22T23:14:58.575ZINFOcrucible: [0] 04482c5c-f564-49f3-897c-51c4de58239a (cc12d06e-bd83-45fe-987f-779d8c6339ec) New New New ds_transition to WaitActive upstairs = 1
11682023-09-22T23:14:58.575ZINFOcrucible: [0] Transition from New to WaitActive upstairs = 1
11692023-09-22T23:14:58.575ZINFOcrucible: [0] client is_active_req TRUE, promote! session cc12d06e-bd83-45fe-987f-779d8c6339ec upstairs = 1
11702023-09-22T23:14:58.575ZINFOcrucible: [1] 04482c5c-f564-49f3-897c-51c4de58239a (cc12d06e-bd83-45fe-987f-779d8c6339ec) WaitActive New New ds_transition to WaitActive upstairs = 1
11712023-09-22T23:14:58.575ZINFOcrucible: [1] Transition from New to WaitActive upstairs = 1
11722023-09-22T23:14:58.575ZINFOcrucible: [1] client is_active_req TRUE, promote! session cc12d06e-bd83-45fe-987f-779d8c6339ec upstairs = 1
11732023-09-22T23:14:58.575ZINFOcrucible: [2] 04482c5c-f564-49f3-897c-51c4de58239a (cc12d06e-bd83-45fe-987f-779d8c6339ec) WaitActive WaitActive New ds_transition to WaitActive upstairs = 1
11742023-09-22T23:14:58.575ZINFOcrucible: [2] Transition from New to WaitActive upstairs = 1
11752023-09-22T23:14:58.575ZINFOcrucible: [2] client is_active_req TRUE, promote! session cc12d06e-bd83-45fe-987f-779d8c6339ec upstairs = 1
11762023-09-22T23:14:58.575ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: 04482c5c-f564-49f3-897c-51c4de58239a, session_id: cc12d06e-bd83-45fe-987f-779d8c6339ec, gen: 1 } downstairs = 1
11772023-09-22T23:14:58.575ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: 04482c5c-f564-49f3-897c-51c4de58239a, session_id: cc12d06e-bd83-45fe-987f-779d8c6339ec, gen: 1 } downstairs = 2
11782023-09-22T23:14:58.575ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: 04482c5c-f564-49f3-897c-51c4de58239a, session_id: cc12d06e-bd83-45fe-987f-779d8c6339ec, gen: 1 } downstairs = 3
11792023-09-22T23:14:58.575ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: 28a12560-c07c-41d3-8d35-7e06b0c337aa, session_id: 7f1ba66d-4c7a-4444-a654-4b5bfde1244a, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 1
11802023-09-22T23:14:58.575ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: 28a12560-c07c-41d3-8d35-7e06b0c337aa, session_id: 7f1ba66d-4c7a-4444-a654-4b5bfde1244a, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 2
11812023-09-22T23:14:58.576ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: 28a12560-c07c-41d3-8d35-7e06b0c337aa, session_id: 7f1ba66d-4c7a-4444-a654-4b5bfde1244a, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 3
11822023-09-22T23:14:58.576ZINFOcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa (7f1ba66d-4c7a-4444-a654-4b5bfde1244a) New New New ds_transition to WaitActive upstairs = 1
1183 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"{2023-09-22T23:14:58.576124225Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1184 {"msg":"[0] client is_active_req TRUE, promote! session 7f1ba66d-4c7a-4444-a654-4b5bfde1244a","v":0,"name":"crucible","level":30"msg":","time":"[0] 5c1ab357-2b46-4e37-877a-5089e53cee0e (fcec4285-eaa1-46a4-819c-725bdeb5d523) New New New ds_transition to WaitActive"2023-09-22T23:14:58.576165198Z",",v"":hostname":"0,"name":"ip-10-150-1-55.us-west-2.compute.internal"crucible",","pid"level:"4759:30,"upstairs":1}
1185 {"msg":"[1] 28a12560-c07c-41d3-8d35-7e06b0c337aa (7f1ba66d-4c7a-4444-a654-4b5bfde1244a) WaitActive New New ds_transition to WaitActive","v":0,"name":","crucible"time",":"level":302023-09-22T23:14:58.576196202Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1186 ,"time":"2023-09-22T23:14:58.576222332Z","{hostname":""ip-10-150-1-55.us-west-2.compute.internal"msg":","pid":4759[0] Transition from New to WaitActive",","upstairs":v"1:}0
1187 ,"name":"crucible","level{":30"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.576265344Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1188 ,"time":"2023-09-22T23:14:58.576275563Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,[0] client is_active_req TRUE, promote! session fcec4285-eaa1-46a4-819c-725bdeb5d523"","upstairs"v"::10,}"
1189 name":"crucible","level":{30"msg":"[1] client is_active_req TRUE, promote! session 7f1ba66d-4c7a-4444-a654-4b5bfde1244a","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.576319558Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1190 ,"time":"{2023-09-22T23:14:58.576332234Z",""hostnamemsg"":":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1[1] 5c1ab357-2b46-4e37-877a-5089e53cee0e (fcec4285-eaa1-46a4-819c-725bdeb5d523) WaitActive New New ds_transition to WaitActive"}
1191 ,"v":0,"name":{"crucible",""levelmsg":"30:"[2] 28a12560-c07c-41d3-8d35-7e06b0c337aa (7f1ba66d-4c7a-4444-a654-4b5bfde1244a) WaitActive WaitActive New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.576376652Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1192 ,"time":"{2023-09-22T23:14:58.576388258Z",""hostname":"msg":"ip-10-150-1-55.us-west-2.compute.internal",[1] Transition from New to WaitActive""pid":,"4759v":,0","upstairs"name:"1:"}
1193 crucible","level":30{"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.576430139Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1194 ,"time":"{2023-09-22T23:14:58.576440847Z",""hostnamemsg"":":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759[1] client is_active_req TRUE, promote! session fcec4285-eaa1-46a4-819c-725bdeb5d523",","upstairs"v"::10,}"
1195 name":"crucible","level"{:30"msg":"[2] client is_active_req TRUE, promote! session 7f1ba66d-4c7a-4444-a654-4b5bfde1244a","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.5764846Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1196 ,"time":"{2023-09-22T23:14:58.576496819Z"","msg":hostname":""ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1[2] 5c1ab357-2b46-4e37-877a-5089e53cee0e (fcec4285-eaa1-46a4-819c-725bdeb5d523) WaitActive WaitActive New ds_transition to WaitActive}"
1197 ,"v":0,"name":"{crucible","level":30"msg":"negotiate packet PromoteToActive { upstairs_id: 28a12560-c07c-41d3-8d35-7e06b0c337aa, session_id: 7f1ba66d-4c7a-4444-a654-4b5bfde1244a, gen: 1 }","v":0,"name":"crucible",",level"":time":30"2023-09-22T23:14:58.576541556Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1198 {,"time"":"msg":"2023-09-22T23:14:58.576558314Z",[2] Transition from New to WaitActive"","hostname":v"":0,"name"ip-10-150-1-55.us-west-2.compute.internal":","crucible"pid,"":level"4759:30,"downstairs":1}
1199 {,""time"msg":":"2023-09-22T23:14:58.576594265Z","hostname":"negotiate packet PromoteToActive { upstairs_id: 28a12560-c07c-41d3-8d35-7e06b0c337aa, session_id: 7f1ba66d-4c7a-4444-a654-4b5bfde1244a, gen: 1 }ip-10-150-1-55.us-west-2.compute.internal"",",pid":"4759v":,"0upstairs,""name:"1:"}
1200 crucible","level":{30"msg":"[2] client is_active_req TRUE, promote! session fcec4285-eaa1-46a4-819c-725bdeb5d523","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.576634135Z","hostname":",ip-10-150-1-55.us-west-2.compute.internal""time,":""pid":47592023-09-22T23:14:58.576645723Z",","hostname":"downstairs":2}ip-10-150-1-55.us-west-2.compute.internal"
1201 ,"pid":4759,"upstairs"{:1}
1202 "msg":"{"msg":"negotiate packet PromoteToActive { upstairs_id: 28a12560-c07c-41d3-8d35-7e06b0c337aa, session_id: 7f1ba66d-4c7a-4444-a654-4b5bfde1244a, gen: 1 }","v":0,"name":"crucible"negotiate packet PromoteToActive { upstairs_id: 5c1ab357-2b46-4e37-877a-5089e53cee0e, session_id: fcec4285-eaa1-46a4-819c-725bdeb5d523, gen: 1 },"","level"v"::300,"name":"crucible","level":30,",time"":"time":"2023-09-22T23:14:58.57670099Z"2023-09-22T23:14:58.576704972Z",",hostname"":hostname"":"ip-10-150-1-55.us-west-2.compute.internal",ip-10-150-1-55.us-west-2.compute.internal"",pid""pid"::47594759,","downstairs":downstairs"1:}3
1203 }
1204 {"msg":"{"msg":"negotiate packet RegionInfoPlease","v":negotiate packet PromoteToActive { upstairs_id: 5c1ab357-2b46-4e37-877a-5089e53cee0e, session_id: fcec4285-eaa1-46a4-819c-725bdeb5d523, gen: 1 }0",",name"":"v":crucible0",","name"level"::30"crucible","level":30,"time":","2023-09-22T23:14:58.576768052Z"time":","hostname":2023-09-22T23:14:58.576770695Z"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,","pid":downstairs4759":1,"}
1205 downstairs":2}
1206 {"msg"{:""negotiate packet RegionInfoPlease"msg",":"v":0,"name":"crucible","level":30negotiate packet PromoteToActive { upstairs_id: 5c1ab357-2b46-4e37-877a-5089e53cee0e, session_id: fcec4285-eaa1-46a4-819c-725bdeb5d523, gen: 1 }","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.576819735Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid,"":time4759":","downstairs":2023-09-22T23:14:58.576827899Z"2,"}
1207 hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid"":msg4759":","downstairs":negotiate packet RegionInfoPlease"3,"}v"
1208 :0,"name":"{crucible","level":"30msg":"negotiate packet RegionInfoPlease","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.576874066Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,4759"time",:""downstairs":2023-09-22T23:14:58.57688217Z"3,"}
1209 hostname":"ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4759,""msg":downstairs"":1}
1210 [0] downstairs client at 127.0.0.1:36984 has UUID 8dfe8526-b15f-436d-8c37-dad574df0099","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.57692579Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
12112023-09-22T23:14:58.576ZINFOcrucible: [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 8dfe8526-b15f-436d-8c37-dad574df0099, encrypted: false, database_read_version: 1, database_write_version: 1 } upstairs = 1
12122023-09-22T23:14:58.577ZINFOcrucible: 28a12560-c07c-41d3-8d35-7e06b0c337aa WaitActive WaitActive WaitActive upstairs = 1
12132023-09-22T23:14:58.577ZINFOcrucible: [1] downstairs client at 127.0.0.1:46704 has UUID 62fdae0f-000a-4ad8-95c6-010549b306c9 upstairs = 1
12142023-09-22T23:14:58.577ZINFOcrucible: [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 62fdae0f-000a-4ad8-95c6-010549b306c9, encrypted: false, database_read_version: 1, database_write_version: 1 } upstairs = 1
12152023-09-22T23:14:58.577ZINFOcrucible: 28a12560-c07c-41d3-8d35-7e06b0c337aa WaitActive WaitActive WaitActive upstairs = 1
12162023-09-22T23:14:58.577ZINFOcrucible: [2] downstairs client at 127.0.0.1:61141 has UUID a3b7c7a7-d0a0-4a01-b35d-0e669d8733e5 upstairs = 1
12172023-09-22T23:14:58.577ZINFOcrucible: [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: a3b7c7a7-d0a0-4a01-b35d-0e669d8733e5, encrypted: false, database_read_version: 1, database_write_version: 1 } upstairs = 1
12182023-09-22T23:14:58.577ZINFOcrucible: 28a12560-c07c-41d3-8d35-7e06b0c337aa WaitActive WaitActive WaitActive upstairs = 1
12192023-09-22T23:14:58.577ZINFOcrucible: negotiate packet ExtentVersionsPlease downstairs = 1
12202023-09-22T23:14:58.577ZINFOcrucible: negotiate packet ExtentVersionsPlease downstairs = 2
12212023-09-22T23:14:58.577ZINFOcrucible: negotiate packet ExtentVersionsPlease downstairs = 3
12222023-09-22T23:14:58.577ZINFOcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa (7f1ba66d-4c7a-4444-a654-4b5bfde1244a) WaitActive WaitActive WaitActive ds_transition to WaitQuorum upstairs = 1
12232023-09-22T23:14:58.577ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum upstairs = 1
12242023-09-22T23:14:58.577ZWARNcrucible: [0] new RM replaced this: None upstairs = 1
12252023-09-22T23:14:58.577ZINFOcrucible: [0] Starts reconcile loop upstairs = 1
12262023-09-22T23:14:58.577ZINFOcrucible: [1] 28a12560-c07c-41d3-8d35-7e06b0c337aa (7f1ba66d-4c7a-4444-a654-4b5bfde1244a) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum upstairs = 1
12272023-09-22T23:14:58.577ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum upstairs = 1
12282023-09-22T23:14:58.577ZWARNcrucible: [1] new RM replaced this: None upstairs = 1
12292023-09-22T23:14:58.577ZINFOcrucible: [1] Starts reconcile loop upstairs = 1
12302023-09-22T23:14:58.577ZINFOcrucible: [2] 28a12560-c07c-41d3-8d35-7e06b0c337aa (7f1ba66d-4c7a-4444-a654-4b5bfde1244a) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum upstairs = 1
12312023-09-22T23:14:58.577ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum upstairs = 1
12322023-09-22T23:14:58.577ZWARNcrucible: [2] new RM replaced this: None upstairs = 1
12332023-09-22T23:14:58.577ZINFOcrucible: [2] Starts reconcile loop upstairs = 1
1234 {"msg":"[0] 127.0.0.1:36984 task reports connection:true","v":0,"name":"crucible","level":30,"The guest has finished waiting for activation
1235 time":"2023-09-22T23:14:58.577966987Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
12362023-09-22T23:14:58.578ZINFOcrucible: 28a12560-c07c-41d3-8d35-7e06b0c337aa WaitQuorum WaitQuorum WaitQuorum upstairs = 1
12372023-09-22T23:14:58.578ZINFOcrucible: [0]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] upstairs = 1
12382023-09-22T23:14:58.578ZINFOcrucible: [0]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] upstairs = 1
12392023-09-22T23:14:58.578ZINFOcrucible: [0]R dirty: [false, false, false, false, false, false, false, false, false, false] upstairs = 1
12402023-09-22T23:14:58.578ZINFOcrucible: [1]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] upstairs = 1
1241 {{"msg":""[1]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]",msg"":v":"0,"name":"negotiate packet RegionInfoPlease"crucible",","level"v"::300,"name":"crucible","level":30{","msgtime":"":"2023-09-22T23:14:58.578253727Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":,"1negotiate packet HereIAm { version: 4, upstairs_id: 85682a66-27fd-403c-bf22-d04b4ff66805, session_id: 5d828379-bf79-455f-9270-e82849f9ecba, gen: 1, read_only: false, encrypted: false, alternate_versions: [] }"time"}:
1242 ","v":2023-09-22T23:14:58.578259533Z0"{,","name":""msg"crucible":",hostname"":"level":[1]R dirty: [false, false, false, false, false, false, false, false, false, false]"30ip-10-150-1-55.us-west-2.compute.internal,"","v"pid"::04759,"name":",crucible"","downstairs"level"::130}
1243 ,"time":"2023-09-22T23:14:58.57830414Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","negotiate packet RegionInfoPlease"pid",,":time4759""v,:""":0downstairs,""2023-09-22T23:14:58.578318907Zname":"1:,}"
1244 "hostname":"crucible","{level"ip-10-150-1-55.us-west-2.compute.internal":"30msg":,""pid":4759,"upstairs":1}
1245 negotiate packet HereIAm { version: 4, upstairs_id: 85682a66-27fd-403c-bf22-d04b4ff66805, session_id: 5d828379-bf79-455f-9270-e82849f9ecba, gen: 1, read_only: false, encrypted: false, alternate_versions: [] }","v":0,"{name,"":"time":""msg":"2023-09-22T23:14:58.578366552Z"crucible",,"[2]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]hostname"":",""vlevel"":ip-10-150-1-55.us-west-2.compute.internal"30,:"0pid":,"4759name":","crucible"downstairs",":level":230},"
1246 time":"2023-09-22T23:14:58.578400272Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":negotiate packet RegionInfoPlease4759",",downstairs":,""2v"}time":
1247 ":0,"2023-09-22T23:14:58.578411249Z"name{",:"""hostname"msg":"crucible":","ip-10-150-1-55.us-west-2.compute.internal"level",":pid"30:4759,"upstairs":negotiate packet HereIAm { version: 4, upstairs_id: 85682a66-27fd-403c-bf22-d04b4ff66805, session_id: 5d828379-bf79-455f-9270-e82849f9ecba, gen: 1, read_only: false, encrypted: false, alternate_versions: [] }"1,"}v
1248 ":0,"name,""{:"time"crucible:"""msg,""2023-09-22T23:14:58.578460689Z"level":,:"30"hostname":"[2]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]","v":ip-10-150-1-55.us-west-2.compute.internal0",,""pid"name"::"4759crucible",,""level"downstairs:,""30:time"3:"}
1249 2023-09-22T23:14:58.578487337Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal",""pid":msg"4759:",","time":"downstairs":[0] downstairs client at 127.0.0.1:36921 has UUID d057f0d7-1fef-421b-b789-cb2746bfdb262023-09-22T23:14:58.578504702Z"3",}"
1250 v",:"0hostname,{"":"name":""crucibleip-10-150-1-55.us-west-2.compute.internal"msg"",",:"pid"":level"4759:30,[0] 85682a66-27fd-403c-bf22-d04b4ff66805 (5d828379-bf79-455f-9270-e82849f9ecba) New New New ds_transition to WaitActive"",upstairs"":v"1:}0
1251 ,"name":","crucible"time":{","level":2023-09-22T23:14:58.578550382Z""msg"30,:""hostname"{:"[2]R dirty: [false, false, false, false, false, false, false, false, false, false]"ip-10-150-1-55.us-west-2.compute.internal",","msgv""pid"":::,""04759negotiate packet RegionInfoPleasetime"",,:""v",2023-09-22T23:14:58.578571948Z"":,0","name"upstairs"hostname""name:"":"::"1ip-10-150-1-55.us-west-2.compute.internalcrucible"",",pid"crucible":"4759level",:"}upstairs30"
1252 ,:"1level{}"
1253 ":msg30",{:"""msgtime""::""2023-09-22T23:14:58.578621037Z[0] Transition from New to WaitActive"",,""v":hostname"0:","name":"crucibleip-10-150-1-55.us-west-2.compute.internal"",,""levelpid"[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: d057f0d7-1fef-421b-b789-cb2746bfdb26, encrypted: false, database_read_version: 1, database_write_version: 1 }""::304759,"v",":downstairs0",:",2,"}"name":"time
1254 ":"time"crucible{2023-09-22T23:14:58.578649287Z:""msg,"":"hostname"":negotiate packet RegionInfoPlease""",",vip-10-150-1-55.us-west-2.compute.internal"":,""0pid,"":name4759"2023-09-22T23:14:58.578635805Z"level",,:"30":"upstairscrucible"":,1"}hostname"level
1255 "::{30""msg":"ip-10-150-1-55.us-west-2.compute.internal",,""pid":[0] client is_active_req TRUE, promote! session 5d828379-bf79-455f-9270-e82849f9ecba"4759time":,"",,""vtime""::0",upstairs""2023-09-22T23:14:58.578703489Z:2023-09-22T23:14:58.578693337Z"1name"",:""hostname"crucible:""},
1256 ,""ip-10-150-1-55.us-west-2.compute.internalhostname""level,"":pid30{:"":4759,""ip-10-150-1-55.us-west-2.compute.internal"downstairs"msg,:,3"}time
1257 "":{"pid"""2023-09-22T23:14:58.578740164Zmsg"::",""4759hostname:"":"Max found gen is 1",","upstairs"v":ip-10-150-1-55.us-west-2.compute.internal[0] downstairs client at 127.0.0.1:58766 has UUID f460c951-f672-4bce-b7d4-32f439e7d15b:"",,""v1"pid}:"0
1258 ,:"4759name":","crucibleupstairs"",:"1level{0"}:
1259 30",msg""{name"::"""crucible,msg""time:"""04482c5c-f564-49f3-897c-51c4de58239a WaitActive WaitActive WaitActive",",level"":v":[1] 85682a66-27fd-403c-bf22-d04b4ff66805 (5d828379-bf79-455f-9270-e82849f9ecba) WaitActive New New ds_transition to WaitActive:"300,"","2023-09-22T23:14:58.578797044Zv"",:"0name":"hostname,"":name"":"crucible"ip-10-150-1-55.us-west-2.compute.internalcrucible"",,""pid"level:"4759:,30,""level":upstairs",":301time},
1260 "":"{time":"2023-09-22T23:14:58.578819025Z"2023-09-22T23:14:58.578835342Z""msg",:""hostname",,":""time":hostname"ip-10-150-1-55.us-west-2.compute.internal":",""[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: f460c951-f672-4bce-b7d4-32f439e7d15b, encrypted: false, database_read_version: 1, database_write_version: 1 }pid"",:"4759v",:"0upstairs,"":2023-09-22T23:14:58.578842192Z1name"}:
1261 ""crucible{",",msg""level:"":ip-10-150-1-55.us-west-2.compute.internal30"[1] Transition from New to WaitActivehostname""":,,""v":"0,,""timeip-10-150-1-55.us-west-2.compute.internal"pid"name:"",":pid"4759:":,"2023-09-22T23:14:58.578879784Z"crucible",,""hostname"level:"4759":"30,upstairs""ip-10-150-1-55.us-west-2.compute.internal:"1,upstairs":}1
1262 },
1263 "{time":"2023-09-22T23:14:58.578910648Z"{,""msg":hostname""":"msg"":ip-10-150-1-55.us-west-2.compute.internal""pid,"":Generation requested: 1 >= found:1"4759pid[1] downstairs client at 127.0.0.1:37306 has UUID 62157a5b-b69c-4edc-9cca-1fe4a003d85b",,"","upstairs:"4759:"1,"}v":v"0upstairs
1264 "::10{},""
1265 msg":,name""{"name":":"msg5c1ab357-2b46-4e37-877a-5089e53cee0e WaitActive WaitActive WaitActive"":",""crucible"vcrucible,"[1] client is_active_req TRUE, promote! session 5d828379-bf79-455f-9270-e82849f9ecba"test control::test::test_crucible_control_openapi ... ,"ok
1266 levellevel"":"",:"0v,"":name0:,"":name""3030crucible:"","crucible"level,"":level30":30,"time":",,""time2023-09-22T23:14:58.579015432Z"",:time""",time"":2023-09-22T23:14:58.579018891Z"hostname":test impacted_blocks::test::empty_impacted_blocks_never_conflict ... ":ip-10-150-1-55.us-west-2.compute.internal"ok
1267 "","2023-09-22T23:14:58.579018446Z,""2023-09-22T23:14:58.579016116Z,hostname"""hostname:"",:"pid"":ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidhostname"4759:""pid,:"4759:"ip-10-150-1-55.us-west-2.compute.internal"upstairs,",4759","upstairs"":upstairs1"}:
1268 1:pid"{}1
1269 ":msg{}4759"test impacted_blocks::test::empty_contains_nothing ... msg""::""
1270 ok
1271 [1] downstairs client at 127.0.0.1:47390 has UUID 03030c34-ede9-4da3-b23e-b6a582013eba[2] 85682a66-27fd-403c-bf22-d04b4ff66805 (5d828379-bf79-455f-9270-e82849f9ecba) WaitActive WaitActive New ds_transition to WaitActive"",,""vv""::00,,""{namename""::""cruciblecrucible"",,""levellevel""::3030"msg":"Next flush: 1","v":0,"name":"crucible",",,""level"timetime""::"":302023-09-22T23:14:58.579132776Z2023-09-22T23:14:58.579132728Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759,,""upstairsupstairs""::11}}
1272 
1273 {{"msg":""msg":,""time":"[2] Transition from New to WaitActive",2023-09-22T23:14:58.579149971Z""v":,0",[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 03030c34-ede9-4da3-b23e-b6a582013eba, encrypted: false, database_read_version: 1, database_write_version: 1 }""name",:""v"crucible:"0,,""levelname"hostname":":":30"crucible"ip-10-150-1-55.us-west-2.compute.internal",","level"pid"::304759,",upstairs"":time":1"}
1274 ,2023-09-22T23:14:58.57918566Z""time",:""hostname":"2023-09-22T23:14:58.579192756Z"{,"ip-10-150-1-55.us-west-2.compute.internal"hostname,"":"pid":4759",ip-10-150-1-55.us-west-2.compute.internal""msgupstairs,"":pid1""}:
1275 4759:","{All extents match"upstairs"",:msg1"}:
1276 ""v":{0,""msg[2] client is_active_req TRUE, promote! session 5d828379-bf79-455f-9270-e82849f9ecba"":,"name"":v5c1ab357-2b46-4e37-877a-5089e53cee0e WaitActive WaitActive WaitActive"":,0",v"":name0",:""name"crucible:""",crucible""level,"":level30":crucible"30,"level":30,"time,"":"time":"2023-09-22T23:14:58.579248615Z"2023-09-22T23:14:58.579250601Z,"","hostname":"hostname":"ip-10-150-1-55.us-west-2.compute.internal",ip-10-150-1-55.us-west-2.compute.internal""pid,"":pid4759":4759,","upstairs":upstairs1","}:
1277 1time"}{
1278 ":"msg{":"2023-09-22T23:14:58.579253032Z""msg":,""hostname":"negotiate packet PromoteToActive { upstairs_id: 85682a66-27fd-403c-bf22-d04b4ff66805, session_id: 5d828379-bf79-455f-9270-e82849f9ecba, gen: 1 }"ip-10-150-1-55.us-west-2.compute.internal"[2] downstairs client at 127.0.0.1:35000 has UUID 27b6e795-c830-4cf3-a348-4b6152c16b91,"",v""v"::00,,""name","name:"":pid""crucible"crucible":,,""4759levellevel""::3030,"upstairs":1}
1279 ,,""timetime"":":"{2023-09-22T23:14:58.579302586Z"2023-09-22T23:14:58.579302622Z",","hostname":hostname""":msg"ip-10-150-1-55.us-west-2.compute.internal"":","ip-10-150-1-55.us-west-2.compute.internal"pid",No downstairs repair required":"4759pid,,""":upstairs4759"v",:"1:}downstairs
1280 "0:1{,"}name"
1281 msg""::{""crucible""msg":,""level":30[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 27b6e795-c830-4cf3-a348-4b6152c16b91, encrypted: false, database_read_version: 1, database_write_version: 1 }"negotiate packet PromoteToActive { upstairs_id: 85682a66-27fd-403c-bf22-d04b4ff66805, session_id: 5d828379-bf79-455f-9270-e82849f9ecba, gen: 1 }",",v""v:":00,,""namename""::""cruciblecrucible"",,""level"level:":3030,"time":"2023-09-22T23:14:58.579356682Z","hostname":",,""timetime""::""ip-10-150-1-55.us-west-2.compute.internal","pid2023-09-22T23:14:58.579367984Z2023-09-22T23:14:58.579368016Z""":,,"4759"hostname":hostname"",":ip-10-150-1-55.us-west-2.compute.internal""upstairs",ip-10-150-1-55.us-west-2.compute.internal""pid,"":pid":47594759:,,""1upstairsdownstairs""::12}}}
1282 
1283 {
1284 "{msg":""msg":"{5c1ab357-2b46-4e37-877a-5089e53cee0e WaitActive WaitActive WaitActive","v":"0msg,negotiate packet PromoteToActive { upstairs_id: 85682a66-27fd-403c-bf22-d04b4ff66805, session_id: 5d828379-bf79-455f-9270-e82849f9ecba, gen: 1 }""name",:""v":crucible0",,""name"level:"":"30crucible":,""level":30No initial repair work was required","v":0,"name":"crucible","level,":"30time,"":"time":"2023-09-22T23:14:58.579431947Z"2023-09-22T23:14:58.57943562Z,"","hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internal",ip-10-150-1-55.us-west-2.compute.internal""pid,"":pid4759":4759,","upstairs":downstairs1":}3
1285 }
1286 {,"time""{:""msg"msg:"":"2023-09-22T23:14:58.579444043Z","negotiate packet ExtentVersionsPleasenegotiate packet RegionInfoPlease"",",v"":v0hostname":,"":name0",:""name"crucible:"",""crucible"level,"":30level":ip-10-150-1-55.us-west-2.compute.internal30","pid":4759,"upstairs":1}
1287 The guest has finished waiting for activation
1288 {,,""timetime""::"""msg":"2023-09-22T23:14:58.579478924Z2023-09-22T23:14:58.579480369Z"",,"Set Downstairs and Upstairs active"hostname"":hostname"",":ip-10-150-1-55.us-west-2.compute.internal"",v""ip-10-150-1-55.us-west-2.compute.internal"pid,"":pid4759":,4759:",0downstairs"",downstairs:"1:"}1
1289 }name":"{
1290 "cruciblemsg{":""",msg"":negotiate packet ExtentVersionsPlease""level,"negotiate packet RegionInfoPlease""v",:"0v,"":name0",:""name"crucible:"","crucible"level,"":level30"::3030,,"","time"time:"":"upstairs"2023-09-22T23:14:58.579546302Z"2023-09-22T23:14:58.579548053Z",:","hostname"hostname"::""1,"time"ip-10-150-1-55.us-west-2.compute.internal}:"ip-10-150-1-55.us-west-2.compute.internal,"",pid""pid:"4759:
1291 ,4759""downstairs,"":2downstairs":}2
1292 }{2023-09-22T23:14:58.579548886Z"
1293 {,"""{msghostname":"""msg:":""msg"ip-10-150-1-55.us-west-2.compute.internal"negotiate packet RegionInfoPleasenegotiate packet ExtentVersionsPlease"",,,""vv""::00,,":pid":"""name"name"::""4759cruciblecrucible"",,"",levellevel""::3030"[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 62157a5b-b69c-4edc-9cca-1fe4a003d85b, encrypted: false, database_read_version: 1, database_write_version: 1 }"upstairs":1,"}v
1294 ",,""timetime""::""{:2023-09-22T23:14:58.579620624Z2023-09-22T23:14:58.579620936Z"",0"",msg"hostname"hostname:"":,""name":ip-10-150-1-55.us-west-2.compute.internal"":""ip-10-150-1-55.us-west-2.compute.internal,"",pid""pid:"4759:28a12560-c07c-41d3-8d35-7e06b0c337aa is now active with session: 7f1ba66d-4c7a-4444-a654-4b5bfde1244a4759"crucible",,"",downstairsdownstairs,""::33}}
1295 
1296 {""{"v""msglevel""msg:"":"::300,"[0] downstairs client at 127.0.0.1:39040 has UUID b0da680b-dcef-4405-a0bc-9a419607b615"name",:"[0] 5c1ab357-2b46-4e37-877a-5089e53cee0e (fcec4285-eaa1-46a4-819c-725bdeb5d523) WaitActive WaitActive WaitActive ds_transition to WaitQuorumv"",:"0v,"""name:"0:,""cruciblename""crucible",:","levelcrucible"":,30""level"level"::3030,"time":"2023-09-22T23:14:58.579684613Z",","hostname":time":",""time2023-09-22T23:14:58.579702252Z"":","hostname2023-09-22T23:14:58.579706536Z"":ip-10-150-1-55.us-west-2.compute.internal,"""hostname",ip-10-150-1-55.us-west-2.compute.internal"":,"",pid"ip-10-150-1-55.us-west-2.compute.internalpid"",:"4759pid","":upstairs:"4759time"::,1"}"
1297 upstairs2023-09-22T23:14:58.579706448Z""{:,"1"4759}msg
1298 ","hostname"upstairs{:"""msg::":"1"}
1299 ip-10-150-1-55.us-west-2.compute.internal"[0] Transition from WaitActive to WaitQuorum[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: b0da680b-dcef-4405-a0bc-9a419607b615, encrypted: false, database_read_version: 1, database_write_version: 1 }"",,""vv""::00,,""name"name:""{:crucible"",",crucible""level,"":level30":pid30"":msg4759":","upstairs":1}04482c5c-f564-49f3-897c-51c4de58239a WaitActive WaitActive WaitActive"
1300 ,,,""time"time:"":""2023-09-22T23:14:58.579795946Z{v""2023-09-22T23:14:58.579794215Z",,"":"0hostnamehostname""::"","msg"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"pid:"4759::4759,","name"upstairs"":upstairs28a12560-c07c-41d3-8d35-7e06b0c337aa Set Active after no repair""1:}1
1301 }:,{
1302 ""msg{v"":""msg:"":0","crucible[0] new RM replaced this: None85682a66-27fd-403c-bf22-d04b4ff66805 WaitActive WaitActive WaitActive""",,",name":"""vv""::00,,""namename""::level""":crucible"cruciblecrucible"",,""levellevel""::3040,"30level":30,,""timetime""::""2023-09-22T23:14:58.579888065Z2023-09-22T23:14:58.579887789Z"",,""hostnamehostname""::"","ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",",pid""pid"::47594759time",:""time":",,"2023-09-22T23:14:58.579889617Z"2023-09-22T23:14:58.579891213Z",upstairs"":upstairs1"}
1303 :,""{hostname""1msg}"
1304 :"hostname":{[0] Starts reconcile loop"""msg,"":v"":ip-10-150-1-55.us-west-2.compute.internal":,"0[1] downstairs client at 127.0.0.1:62630 has UUID 135d3cb9-fa87-4418-b94e-747b9c756857",,"""vname""::pid"0",crucible"":,name""level:"":ip-10-150-1-55.us-west-2.compute.internalcrucible30"4759,"",,"level"upstairs"":pid,:"30"1:time"4759}:
1305 ,"",time"2023-09-22T23:14:58.579963189Z:"",{"2023-09-22T23:14:58.579974259Z""upstairs",":msg1"":"hostnamehostname""::""Notify all downstairs, region set compare is done."}ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""
1306 pidpid""::47594759,,,""upstairs"upstairs:"{1"}:
1307 1v""{msg"}"
1308 msg":"::{"0","msg"name"::""crucible"[2] downstairs client at 127.0.0.1:55683 has UUID b50373f0-d859-4c68-a8df-7a98765c5610,""[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 135d3cb9-fa87-4418-b94e-747b9c756857, encrypted: false, database_read_version: 1, database_write_version: 1 }"level,[1] 5c1ab357-2b46-4e37-877a-5089e53cee0e (fcec4285-eaa1-46a4-819c-725bdeb5d523) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum"",v,"":v30"""v:"0:,0",name"":name:"0:"",cruciblecrucible"",,"""levellevel""::3030name":"crucible","level":30,The guest has finished waiting for activation
1309 ",,""time"time:"":"time":"2023-09-22T23:14:58.580066485Z"2023-09-22T23:14:58.580066157Z",",2023-09-22T23:14:58.580052974Z"hostname"":"hostname",",":ip-10-150-1-55.us-west-2.compute.internal""time"hostname":"ip-10-150-1-55.us-west-2.compute.internal,"",pid"":"ip-10-150-1-55.us-west-2.compute.internal"pid:"4759:2023-09-22T23:14:58.580070567Z"4759,,",,"""hostname":"upstairsupstairs""::pid11}}
1310 
1311 ip-10-150-1-55.us-west-2.compute.internal"{{,"":"4759pid"msg""msg:"":":,"4759upstairs85682a66-27fd-403c-bf22-d04b4ff66805 WaitActive WaitActive WaitActive[1] Transition from WaitActive to WaitQuorum""",,""vv""::00,,"","namename""::"":upstairs":cruciblecrucible"",,""11levellevel"}:
1312 }30"
1313 {:30","msg"time"::""Set check for repair"2023-09-22T23:14:58.580163166Z,"",time,"":""hostname"2023-09-22T23:14:58.580169102Z:"",v""ip-10-150-1-55.us-west-2.compute.internal"hostname,"":pid""::4759ip-10-150-1-55.us-west-2.compute.internal",,""0upstairspid,"""::14759}name
1314 ,"":"{upstairs"crucible{"":,1msg"}"
1315 :""level"[1] new RM replaced this: None{":",msg"":"v"msg"30::[2] downstairs client at 127.0.0.1:41929 has UUID 533946c5-94b5-4106-8c49-f677757735e20",,""namev""::"0",crucible""name,"":"level":crucible"40,"level":30[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: b50373f0-d859-4c68-a8df-7a98765c5610, encrypted: false, database_read_version: 1, database_write_version: 1 }",",time":"",,""timetime2023-09-22T23:14:58.580219706Z""":",:2023-09-22T23:14:58.58023078Z""",2023-09-22T23:14:58.580229367Z""hostname",:""hostnamehostnamev"":":ip-10-150-1-55.us-west-2.compute.internal""0:,""ip-10-150-1-55.us-west-2.compute.internal"pid",:ip-10-150-1-55.us-west-2.compute.internal4759",",,""pid"upstairs:"":pid"14759}:name,
1316 ""upstairs:4759"{:",""1msg}"
1317 :upstairs"{":crucible"1",}"msg"
1318 level":[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 533946c5-94b5-4106-8c49-f677757735e2, encrypted: false, database_read_version: 1, database_write_version: 1 }"":,[1] Starts reconcile loop""v,""v:"0{30,:"0name,"":"name":crucible""",cruciblemsg"""level,"":level30":":30[1] 127.0.0.1:46704 task reports connection:true","v":0,"name":"crucible",","time"level,,""time"time:"":""2023-09-22T23:14:58.580328228Z":,2023-09-22T23:14:58.580329805Z""hostname,"":30hostname"":":ip-10-150-1-55.us-west-2.compute.internal"","2023-09-22T23:14:58.580319951Z"pidip-10-150-1-55.us-west-2.compute.internal"":,4759",,pid"""upstairs:"4759:hostname1,}"":upstairs
1319 ":"1{},
1320 "ip-10-150-1-55.us-west-2.compute.internal"{msg"time":""":msg2023-09-22T23:14:58.580351387Z","","":pid"hostname":":"4759ip-10-150-1-55.us-west-2.compute.internal"85682a66-27fd-403c-bf22-d04b4ff66805 WaitActive WaitActive WaitActive",,""pid,"":[2] 5c1ab357-2b46-4e37-877a-5089e53cee0e (fcec4285-eaa1-46a4-819c-725bdeb5d523) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorumv"",:4759upstairs""0v,"":name0",:",:name""crucible:""1"crucible,"",level":}30"upstairs"
1321 level"::130,"{}time":
1322 "","2023-09-22T23:14:58.580416985Z"time,"":hostname"":"msg":"ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:14:58.580425677Z"",,""{hostnamepid""::"475904482c5c-f564-49f3-897c-51c4de58239a WaitActive WaitActive WaitActive,""ip-10-150-1-55.us-west-2.compute.internal""upstairs,","msg":pid""::14759}v",
1323 ":"0{upstairs":"1msg"}:
1324 ",28a12560-c07c-41d3-8d35-7e06b0c337aa Active Active Active{negotiate packet ExtentVersionsPlease"""",msg""v:"":,0"name",[2] Transition from WaitActive to WaitQuorum""name",:""v"crucible":,0v",""level:name""::30"0:crucible",",""name"crucible"level,":",":time30"crucible"",":level"":level":3030,2023-09-22T23:14:58.580495299Z""time",:""hostname":"2023-09-22T23:14:58.580508202Z","hostnameip-10-150-1-55.us-west-2.compute.internal"":","pid":4759ip-10-150-1-55.us-west-2.compute.internal,"","downstairspid""::14759}
1325 ,"upstairs":1{}
1326 ",msg{":",""time""negotiate packet ExtentVersionsPleasemsg"",:""v":"time"[2] new RM replaced this: None::"2023-09-22T23:14:58.580517127Z0",,""namev""::"2023-09-22T23:14:58.580517973Z"0crucible",",,"",hostname""name"":level:"":crucible30"","hostnamelevel"ip-10-150-1-55.us-west-2.compute.internal":,40":""pid":,"4759time":ip-10-150-1-55.us-west-2.compute.internal"",,"2023-09-22T23:14:58.580569449Z""time",:"","hostname2023-09-22T23:14:58.580575631Z"":,""pid"hostnameip-10-150-1-55.us-west-2.compute.internal"",::upstairs"4759:"","1pidip-10-150-1-55.us-west-2.compute.internal"":,4759"upstairs"}:pid,"":
1327 downstairs4759":12,"}{
1328 upstairs"{"msg:"1"}msg:}""
1329 :"negotiate packet ExtentVersionsPlease"
1330 thread 'impacted_blocks::test::extent_from_offset_panics_when_num_blocks_outside_region' panicked at 'assertion failed: offset.value as u128 + num_blocks.value as u128 <=\n ddef.extent_count() as u128 * extent_size as u128', upstairs/src/impacted_blocks.rs:284:5
1331 note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
1332 ,negotiate packet ExtentVersionsPlease{""",msg""v:"":v{"[2] Starts reconcile loop0",:""0,msg""namev"":::,"""0crucible,""Set check for repair,name""level:"":"name":crucible30",",v""":level":crucible30"0,,""time"name"::"",,"2023-09-22T23:14:58.580667818Ztime"",:""crucible"hostname,"2023-09-22T23:14:58.580675655Z:""""level"level"ip-10-150-1-55.us-west-2.compute.internal,"","hostnamepid"":"::4759ip-10-150-1-55.us-west-2.compute.internal,""30downstairs,""::3pid"}:
1333 475930,"{upstairs":1}
1334 ","{msg"time:""msg":"":"2023-09-22T23:14:58.580705528Z"[0] 127.0.0.1:58766 task reports connection:true",[0] 85682a66-27fd-403c-bf22-d04b4ff66805 (5d828379-bf79-455f-9270-e82849f9ecba) WaitActive WaitActive WaitActive ds_transition to WaitQuorum,"""v,"":v0",:"0name,"":name"":crucible"",,crucible""level",":level30":"hostnametime"":30":"2023-09-22T23:14:58.580717087Z"ip-10-150-1-55.us-west-2.compute.internal",,,""pid":,""hostname4759":"timetime""ip-10-150-1-55.us-west-2.compute.internal::"",2023-09-22T23:14:58.580748696Z"",,2023-09-22T23:14:58.580754128Z""",hostname""":hostnamepid"""::"ip-10-150-1-55.us-west-2.compute.internaldownstairs4759""ip-10-150-1-55.us-west-2.compute.internal,"",pid""pid:"4759:,:",4759"upstairs1"upstairs,""}upstairs::
1335 11":}1}}
1336 
1337 {{
1338 "{msg""":msg""msg"{:":5c1ab357-2b46-4e37-877a-5089e53cee0e WaitQuorum WaitQuorum WaitQuorum"","v"[0] Transition from WaitActive to WaitQuorum:"0,,""v"name""::0",msg"negotiate packet ExtentVersionsPlease"crucible""name":,"":levelcrucible"":,30",level""":v"30:[2] 127.0.0.1:61141 task reports connection:true0","name",:,"""crucible"time,"":time"":",v""level"::02023-09-22T23:14:58.580845666Z2023-09-22T23:14:58.580851229Z"",,30,"""name":"hostnamehostnamecrucible"""::"","level":ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"pid:"4759:304759,",,upstairs""":upstairs"1:}1
1339 }time"
1340 {:""{msg":""2023-09-22T23:14:58.580873878Z"msg":[0]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0],""",time"[0] new RM replaced this: None""v,"":,:""0v,"":2023-09-22T23:14:58.580887613Z"0name,hostname,"""":hostname""::name""":crucible""crucibleip-10-150-1-55.us-west-2.compute.internal",,""ip-10-150-1-55.us-west-2.compute.internal"",,levellevel""::4030""pid"pid"::47594759,","downstairs":upstairs":21}
1341 ,,""time"time:{}"":
1342 "2023-09-22T23:14:58.580939642Z""msg,2023-09-22T23:14:58.580939764Z"{hostname"",:"""hostnamemsg""ip-10-150-1-55.us-west-2.compute.internal:"",:"""ip-10-150-1-55.us-west-2.compute.internalpid""28a12560-c07c-41d3-8d35-7e06b0c337aa Active Active Active":,:"4759pid",,""v"":upstairs4759:,"":upstairs1"negotiate packet ExtentVersionsPlease}:
1343 10",{},""name""
1344 msgv:"{":""crucible"msg[0] Starts reconcile loop",:"",""level"::[0]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]""v":,0",v30"":name0":"0crucible,"","name"level:"":,crucible""30,"name"level"::30","crucible"time":",",level"2023-09-22T23:14:58.581025472Z":",,30"timetime""::"""hostname":2023-09-22T23:14:58.58104009Z2023-09-22T23:14:58.581035636Z""",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internal","pid"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,""pid,",:"4759",pidtime":"":4759:"4759upstairs",:2023-09-22T23:14:58.581050948Z",",upstairs":11"}"
1345 upstairs"}hostname"
1346 {::""1msg"}:
1347 "{ip-10-150-1-55.us-west-2.compute.internal{",""pid"":msg":"[1] 85682a66-27fd-403c-bf22-d04b4ff66805 (5d828379-bf79-455f-9270-e82849f9ecba) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorummsg"":,""4759v"Set check for repair",[0]R dirty: [false, false, false, false, false, false, false, false, false, false]:"0,,""v"name:"0:,""cruciblename"",:,""crucible"level,""""level:"30:v":downstairs":0303,"}name"
1348 :",,""crucible"{timetime""::",""2023-09-22T23:14:58.581131411Z""level"2023-09-22T23:14:58.581126952Z,"","hostname"hostname:"":":msg30"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"::4759pid",:"4759",upstairs"":upstairs1":}1
1349 }
1350 {{","msg":"msg":""[1] Transition from WaitActive to WaitQuorum"time,[1]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]""v,"":v0",:"0name,"":name"":"crucible"":",crucible"",level""level:"[0] 04482c5c-f564-49f3-897c-51c4de58239a (cc12d06e-bd83-45fe-987f-779d8c6339ec) WaitActive WaitActive WaitActive ds_transition to WaitQuorum":3030,2023-09-22T23:14:58.581158345Z""v,"":hostname"0:","name":"ip-10-150-1-55.us-west-2.compute.internal"crucible",",,,""timetime"pid"::"":""47592023-09-22T23:14:58.58119489Z"2023-09-22T23:14:58.581194625Z,"",,hostname"""level"upstairs"hostname:""::"30ip-10-150-1-55.us-west-2.compute.internal":ip-10-150-1-55.us-west-2.compute.internal,"",pid""1pid:"4759:},4759"
1351 ,upstairs,"":"upstairs1"}:
1352 1{}{
1353 ""timemsg":""{msg""[0] received reconcile message"msg:"":,:""v""[1]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"2023-09-22T23:14:58.581226272Z:,[1] new RM replaced this: None"""v",:"0v,"0":,0,hostname":",""name"name:"":crucible""",crucible""level,""level:"30:name"ip-10-150-1-55.us-west-2.compute.internal":",40"crucible"pid":,"4759level",,:""30time,"":time"upstairs""2023-09-22T23:14:58.581295416Z:"":,12023-09-22T23:14:58.581300187Z""}hostname,"":"hostname"
1354 :ip-10-150-1-55.us-west-2.compute.internal"",","{pidip-10-150-1-55.us-west-2.compute.internal"":,4759""time"pid,"":upstairs4759":,1:"}msg""
1355 upstairs"::{1"2023-09-22T23:14:58.581309444Z""}
1356 msg"[0] Transition from WaitActive to WaitQuorum{,:""msg"":"","[1]R dirty: [false, false, false, false, false, false, false, false, false, false][1] Starts reconcile loop""hostnamev,,""vv""::00,,""namename""::"""":":cruciblecrucible"",,""0ip-10-150-1-55.us-west-2.compute.internal"levellevel""::3030,","name"pid"::"4759crucible",,""level":upstairs"30:,,""time"time:"":"12023-09-22T23:14:58.581385156Z}2023-09-22T23:14:58.581384879Z"",,""
1357 hostnamehostname""::"","time{ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"pid:"4759:",4759""msg":"upstairs,"":upstairs1"[0] All repairs completed, exit"}:
1358 1,"{}v":":"
1359 msg":02023-09-22T23:14:58.581395829Z"{,""[2]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"name"msg"",:""v"::,""0hostname"[2] 85682a66-27fd-403c-bf22-d04b4ff66805 (5d828379-bf79-455f-9270-e82849f9ecba) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorumcrucible":",,""name"v:,""":level"0crucible,"",name""::level""crucible:"30,"30level"":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759,",upstairs"":time1":,"}"2023-09-22T23:14:58.58147269Z"time,"",
1360 hostname:"":""{time":"ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:14:58.581476461Z""",msg":",pid"":hostname4759"",:"2023-09-22T23:14:58.581474349Z"upstairs":"[0] new RM replaced this: None","ip-10-150-1-55.us-west-2.compute.internal1"},
1361 ",hostname{pid":"""v"msg4759::"0",:""upstairs"ip-10-150-1-55.us-west-2.compute.internal":,"1[2]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]}"
1362 ,,"{pid""vmsg""::0",:""name[2] Transition from WaitActive to WaitQuorum"":,""name"vcrucible"":,0",level":4759:"30name":,""upstairs"crucible":"1crucible",",},time""level:"":
1363 302023-09-22T23:14:58.581561682Z"","level":hostname"40{:"ip-10-150-1-55.us-west-2.compute.internal",,""time"pid:"":"47592023-09-22T23:14:58.58158032Z",,""msghostnameupstairs""::"1"}:,ip-10-150-1-55.us-west-2.compute.internal"""
1364 ,"[0] Starts cmd_loop"pid{":"4759msg",:""upstairs"time"::"[2]R dirty: [false, false, false, false, false, false, false, false, false, false]1"},,
1365 ""2023-09-22T23:14:58.581585426Z"{v,"":msg0",:""name":"[2] new RM replaced this: None"v"crucible"",,""vlevelhostname:"0:"""::300ip-10-150-1-55.us-west-2.compute.internal,""name",,"":name,"":pid"":time"":crucible""crucible"4759,2023-09-22T23:14:58.581642092Z,"",level"",:hostname40"":""upstairslevel""::301ip-10-150-1-55.us-west-2.compute.internal,"",time""pid"::"4759}
1366 2023-09-22T23:14:58.581664328Z,"","upstairs":hostname1":"}
1367 ip-10-150-1-55.us-west-2.compute.internal"{,{",pid""msg:"4759:","""msgtimeMax found gen is 1upstairs""",:"1v}"
1368 ::"{0",""msgname""::"":"[2] Starts reconcile loopcrucible"",,""vlevel""::030,2023-09-22T23:14:58.58167139Z"[0] Starts reconcile loop","",name"hostname":,""crucibletime"",:""level"::302023-09-22T23:14:58.581717911Z"",""vip-10-150-1-55.us-west-2.compute.internalhostname":"":,","0ip-10-150-1-55.us-west-2.compute.internal""time,"":pid"","pid"name"2023-09-22T23:14:58.581733145Z:"4759::"4759,,"",crucible""upstairshostname""::"1upstairs}"
1369 ip-10-150-1-55.us-west-2.compute.internal":{,,1""msgpid""::""}level
1370 Generation requested: 1 >= found:14759"",,""{vupstairs""::0:"1,}"
1371 30namemsg"{":"":"msg":crucible""[1] received reconcile message","[0] 127.0.0.1:39040 task reports connection:true,""level,":"30v"v"::00,,""namename"":","crucible"time,"":",level:"":crucible"2023-09-22T23:14:58.581813038Z30"",,"time"level"hostname""::,:""time":"ip-10-150-1-55.us-west-2.compute.internal""30,2023-09-22T23:14:58.58183153Z""pid",:"47592023-09-22T23:14:58.581802852Z",hostname"",upstairs:"":"1hostname":ip-10-150-1-55.us-west-2.compute.internal}"
1372 ,""pid{",:"4759",msgip-10-150-1-55.us-west-2.compute.internal"":time"upstairs"":""Next flush: 1:,"2023-09-22T23:14:58.58184668Z"pid"1"},:"
1373 ,hostname"4759:""{v":"0msg,"":,"ip-10-150-1-55.us-west-2.compute.internal"name",upstairs""85682a66-27fd-403c-bf22-d04b4ff66805 WaitQuorum WaitQuorum WaitQuorum:"":",crucible""pid,v""1:level0",:"30name"":}:"4759
1374 ,,crucible""time",:""level":{"302023-09-22T23:14:58.581908678Z"upstairs","":msg"1:"hostname":,""}time
1375 [1] 04482c5c-f564-49f3-897c-51c4de58239a (cc12d06e-bd83-45fe-987f-779d8c6339ec) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum""ip-10-150-1-55.us-west-2.compute.internal:"",{"2023-09-22T23:14:58.581924444Zpid"",:"4759","hostname"upstairs:"":msg,":ip-10-150-1-55.us-west-2.compute.internal1"},"
1376 pid""{":v"4759[1] All repairs completed, exit"",:msg",upstairs"":"1:0"}vAll extents match
1377 "",{","vmsg""::0",":name"[0]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]""name:,""0","cruciblename":""v:"":crucible0",,""namecrucible"level""::"30,crucible",","level"level":"30:,30"leveltime":"":302023-09-22T23:14:58.582005521Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",,""timepid""::"4759,"2023-09-22T23:14:58.58201516Z"upstairs":,"1}hostname
1378 ",:{"""time"msg":"ip-10-150-1-55.us-west-2.compute.internal:"",2023-09-22T23:14:58.582013962Z"",pidNo downstairs repair required"":"4759,,,time":""""hostname"2023-09-22T23:14:58.582019277Z":vupstairs""":,1:}0
1379 ,""{name"hostname:""msg"crucible:"",""level"[0]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]:"30,ip-10-150-1-55.us-west-2.compute.internal"":v":",0",ip-10-150-1-55.us-west-2.compute.internal"pid",","":pid"4759timename",":"upstairs":"":crucible"2023-09-22T23:14:58.582070654Z,""4759,level""hostname:"30::,""1upstairs":}1ip-10-150-1-55.us-west-2.compute.internal"},",pid""
1380 
1381 time:":4759"{,{2023-09-22T23:14:58.582099445Z""upstairs"",:"1"}hostname
1382 "msg":{"msg""ip-10-150-1-55.us-west-2.compute.internal"msg,"":pid""::":No initial repair work was required4759","","upstairsv""::01[1] Starts cmd_loop},
1383 ""[1] Transition from WaitActive to WaitQuorum",{name"":msg"",crucible:"","""level[0]R dirty: [false, false, false, false, false, false, false, false, false, false]"":v"30,v:"":v0":,"0name",,"":"nametime"crucible:"":"crucible"",,2023-09-22T23:14:58.582166236Z""level",:"30"hostname"level"::"30ip-10-150-1-55.us-west-2.compute.internal"0,","pid,"time:"4759:",""2023-09-22T23:14:58.582184198Z"upstairs,"":name"hostname1"}:
1384 ",":"time"{ip-10-150-1-55.us-west-2.compute.internal":crucible"",msg""pid:"",":2023-09-22T23:14:58.582188982Z"",Set Downstairs and Upstairs active4759"level",,"""hostname:"30:"upstairsv""::10},
1385 "ip-10-150-1-55.us-west-2.compute.internal"{name",""pid"msg:""::"crucible"4759,"[1]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]level"",,",":v30":"0timeupstairs":",1:""},name
1386 2023-09-22T23:14:58.582227423Z""":"time,crucible{""":,""hostname2023-09-22T23:14:58.5822473Z""level,"msg""hostname:"30:":"":ip-10-150-1-55.us-west-2.compute.internal""ip-10-150-1-55.us-west-2.compute.internal",",pid,"""[2] received reconcile message"pid"time:"4759:",,:"2023-09-22T23:14:58.582275426Z""v",upstairs"4759:hostname",0",:"1:}"
1387 ip-10-150-1-55.us-west-2.compute.internal""{,name"upstairs":":""msgpid1""}::4759crucible",""
1388 thread '5c1ab357-2b46-4e37-877a-5089e53cee0e is now active with session: fcec4285-eaa1-46a4-819c-725bdeb5d523upstairs"",:"1v"}:
1389 impacted_blocks::test::extent_from_offset_panics_for_offsets_outside_region' panicked at 'assertion failed: offset.value < ddef.extent_count() as u64 * extent_size', upstairs/src/impacted_blocks.rs:280:5
1390 {,"msg":"[1]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]","v":0,"name":"crucible","level":30{"level":30"msg":","[1] new RM replaced this: None"time":","v"2023-09-22T23:14:58.582419009Z":0,,""hostnamename"":":"crucible","ip-10-150-1-55.us-west-2.compute.internal"level,"":,pid"40time"":":47592023-09-22T23:14:58.582432077Z","upstairs,"":hostname":"1}
1391 ip-10-150-1-55.us-west-2.compute.internal",,""time"{:pid"":4759"2023-09-22T23:14:58.582454817Z"msg":",","hostname":upstairs"":[1]R dirty: [false, false, false, false, false, false, false, false, false, false]"1ip-10-150-1-55.us-west-2.compute.internal,}"
1392 ,""v"pid"::4759{0,,""upstairs""name"msg"::1":"}crucible
1393 [2] All repairs completed, exit"",",v"":level"0{,:"30name":""crucible"msg":","level":[1] Starts reconcile loop"30,"v":0The guest has finished waiting for activation
1394 ,"name":"crucible",","time":level":"30,"time":"2023-09-22T23:14:58.582528024Z","2023-09-22T23:14:58.582534952Z"hostname":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,"pid":,4759"upstairs",:"1,time":""}
1395 2023-09-22T23:14:58.582550672Z"upstairs":,1"}hostname"
1396 {:"ip-10-150-1-55.us-west-2.compute.internal{""msg,""":pid"msg"::"4759"[2] Starts cmd_loop",","upstairs"[2]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"v",":v":01:,0},
1397 ""name"name":"{:crucible"""crucible"msg",,":"level"":level"30:30[2] 04482c5c-f564-49f3-897c-51c4de58239a (cc12d06e-bd83-45fe-987f-779d8c6339ec) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.582631547Z",",hostname":""time":",ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:14:58.582630959Z""",,time"""hostname":pid":":"47592023-09-22T23:14:58.582640266Z",,ip-10-150-1-55.us-west-2.compute.internal"",""pid"upstairs"hostname":::"47591},
1398 ip-10-150-1-55.us-west-2.compute.internal"","upstairs"pid"::14759}
1399 ,"upstairs":1}
1400 {{"msg":""msg":"[2]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]","v":[2] Transition from WaitActive to WaitQuorum"0,,""name":"v"crucible":,0",level":"30name":"crucible","level":30,","time":"time":"2023-09-22T23:14:58.582722067Z"2023-09-22T23:14:58.582725499Z,"","hostname"hostname"::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",",pid":"4759pid":,"4759upstairs":1,"}upstairs"
1401 :1}
1402 {"msg":"{[2] new RM replaced this: None","v"":msg"0:","name":"crucible","[2]R dirty: [false, false, false, false, false, false, false, false, false, false]"level":,"40v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.582790952Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","pid":2023-09-22T23:14:58.582796296Z"4759,","hostname":"upstairs":1}ip-10-150-1-55.us-west-2.compute.internal"
1403 ,"pid":4759,{"upstairs":1"}msg
1404 ":"[2] Starts reconcile loop"{,"v":0,""namemsg":"":"Max found gen is 1"crucible"0,",",v"level":"0:name,30"":name"":"crucible"crucible",,""levellevel""::3030,"time":"2023-09-22T23:14:58.58285342Z","hostname",,""time"time:"":":"2023-09-22T23:14:58.582857488Z"2023-09-22T23:14:58.582857839Z",","hostname"hostname"::""ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""4759pidpid""::47594759,",,""upstairs":upstairsupstairs""::111}}
1405 
1406 {}
1407 {"msg":""msg"Generation requested: 1 >= found:1:"","{v":0,5c1ab357-2b46-4e37-877a-5089e53cee0e Set Active after no repair""name,"":v"""crucible:"0,,""msgnamelevel""::"30"crucible":,""level":30[0] 127.0.0.1:36921 task reports connection:true","v":0,"name":"crucible",,""time":",level":"2023-09-22T23:14:58.582925276Ztime"30",:""hostname":"2023-09-22T23:14:58.58292945Z","hostnameip-10-150-1-55.us-west-2.compute.internal"":,""pid":4759,ip-10-150-1-55.us-west-2.compute.internal"","upstairs"pid:":14759}
1408 ,"upstairs":1}{
1409 ",msg{":"""Next flush: 1msg"",:""time":"vNotify all downstairs, region set compare is done."":0,","v"name:"0:,""2023-09-22T23:14:58.582942951Znamecrucible"":,"""levelcrucible"":,30","level":hostname30":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":,"1,time"":"time":"}2023-09-22T23:14:58.582977236Z"2023-09-22T23:14:58.582980116Z"
1410 ,,""hostname"hostname:"":"{ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759",,msg""":"upstairsupstairs""::11}}
1411 
1412 {04482c5c-f564-49f3-897c-51c4de58239a WaitQuorum WaitQuorum WaitQuorum"{","msg"v"msg""::"":0All extents matchSet check for repair"",,""vv""::00,,""name"name:"":"crucible"crucible,"",level""level:"30:,"30name":"crucible","level":30,"time",:""time":"2023-09-22T23:14:58.583040954Z",2023-09-22T23:14:58.583042758Z"",hostname"":"hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internal"pid",:"4759pid":,4759",upstairs"":1upstairs":}1
1413 },
1414 {"time""{msg":""msg"::No downstairs repair required""","v":[1] 127.0.0.1:47390 task reports connection:true0",,"2023-09-22T23:14:58.583046808Z"name"":v"":crucible",",0","levelname":"30:"hostname"crucible:"","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs",":time1":"},
1415 2023-09-22T23:14:58.583086112Z""time",:""hostname":"{2023-09-22T23:14:58.58309092Z",ip-10-150-1-55.us-west-2.compute.internal"","hostname"pid""::"4759msg":",ip-10-150-1-55.us-west-2.compute.internal"","upstairs"pid:"1:4759},"
1416 upstairs":[0]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]1{}
1417 ""msg{":",""v"msgNo initial repair work was required"":,"":v0"5c1ab357-2b46-4e37-877a-5089e53cee0e Active Active Active:"0,,""vname""::0",",namecrucible"":,"""cruciblelevel"",:"30level":name"30:"crucible","level":30,"time":","time":"2023-09-22T23:14:58.583146434Z","2023-09-22T23:14:58.583148681Zhostname"":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,",pid"":upstairs4759":1,"},upstairs
1418 ":1}
1419 {{"msg":""msg":"Set Downstairs and Upstairs active"Set check for repair",","v"v:":00,,""namename""::""cruciblecrucible"",,""levellevel""::3030",,""time"timetime""::"":"2023-09-22T23:14:58.583182881Z2023-09-22T23:14:58.583182837Z""2023-09-22T23:14:58.583152385Z,,"""hostnamehostname""::"","hostname":"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""ip-10-150-1-55.us-west-2.compute.internal"::47594759,,,"""upstairsupstairs""::11}}
1420 
1421 {pid":{"4759msg"":msg"",":"upstairs"[2] 127.0.0.1:35000 task reports connection:true":,"1v85682a66-27fd-403c-bf22-d04b4ff66805 is now active with session: 5d828379-bf79-455f-9270-e82849f9ecba""},:"0v,"":name0",:""name"crucible:""
1422 crucible,"","level"level:":3030{"msg":"[0]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]","v":0,"name":",,""timetime""::""crucible",2023-09-22T23:14:58.583248165Z"2023-09-22T23:14:58.583248005Z,"""hostname,"":"hostname"level":ip-10-150-1-55.us-west-2.compute.internal:30"","pid":4759ip-10-150-1-55.us-west-2.compute.internal",,""pidupstairs""::47591},
1423 "upstairs":1{}
1424 "msg":"{"msg":"85682a66-27fd-403c-bf22-d04b4ff66805 Set Active after no repair",",v5c1ab357-2b46-4e37-877a-5089e53cee0e Active Active Active"":,0",v"":name0",:""name"crucible:"","crucible"level,"""level:"30:time":"302023-09-22T23:14:58.583272183Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,",time,"":time""":2023-09-22T23:14:58.58330024Z""upstairs,2023-09-22T23:14:58.583302498Z"","hostnamehostname"":":"":1ip-10-150-1-55.us-west-2.compute.internal"},ip-10-150-1-55.us-west-2.compute.internal""pid,"":
1425 pid4759":4759,","upstairs":upstairs"1:{}1
1426 }"
1427 {msg":"{"msg":""msg":"[0]R dirty: [false, false, false, false, false, false, false, false, false, false]Notify all downstairs, region set compare is done.Set check for repair""",,"",vv""::00,,""name""name:"":crucible""vcrucible,"","levellevel""::3030":0,"name":"crucible","level":30,,""timetime""::""2023-09-22T23:14:58.583362221Z2023-09-22T23:14:58.58336178Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid"",::47594759",,""time"upstairsupstairs""::11:"}}
1428 
1429 {2023-09-22T23:14:58.583367692Z""{msg","":"msg"hostnameSet check for repair:"",":"[0] received reconcile messagev"",":v0",:"0name,"":name"":"cruciblecrucible"",,"""levellevel""::3030ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1430 ,,""timetime""::""{2023-09-22T23:14:58.583416969Z2023-09-22T23:14:58.583416863Z"",,""hostnamehostname"""msg::""":"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""[1]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"pidpid""::47594759,",,v":""0upstairsupstairs""::11,"}name"}
1431 
1432 {:"crucible{"""msg"msg:"":","level":[1] 127.0.0.1:62630 task reports connection:true[0] All repairs completed, exit"30,""v,"":v"0:,0","name":name"":"crucible"crucible,"","level"level:":3030,"time":",,""timetime""::""2023-09-22T23:14:58.583472441Z"2023-09-22T23:14:58.583478235Z2023-09-22T23:14:58.583478276Z"",,,"""hostnamehostname""::""hostname":"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",ip-10-150-1-55.us-west-2.compute.internal"",pid""pid:"4759:,,4759""upstairs,pid""":upstairs1":}:
1433 14759}
1434 {,"{"upstairs"msgmsg""::""":[0] Starts cmd_loop1"85682a66-27fd-403c-bf22-d04b4ff66805 Active Active Active",,""vv""::00,,""name"name}"::""
1435 cruciblecrucible"",,""levellevel""::3030{"msg":"[1]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]","v":0,"name":"crucible",,""time":"time":,2023-09-22T23:14:58.58354374Z""","2023-09-22T23:14:58.583543687Zhostname"",":"hostname"level":ip-10-150-1-55.us-west-2.compute.internal:"",30"pid"ip-10-150-1-55.us-west-2.compute.internal:"4759,"pid,"":4759upstairs":,"1upstairs}"
1436 :1}
1437 {{"msg":""msg":"[1] received reconcile message"Set check for repair,"",v"":v0",:"0name,"":name"":crucible"",,crucible""",level""level:"30:time":30"2023-09-22T23:14:58.583569324Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",",pid",""time"time:"":":2023-09-22T23:14:58.583595952Z"2023-09-22T23:14:58.583598205Z",","hostname"hostname:"":4759"ip-10-150-1-55.us-west-2.compute.internal",",pid"ip-10-150-1-55.us-west-2.compute.internal:"4759,","upstairs":pid""1upstairs:":47591}},
1438 "
1439 {upstairs":1}"
1440 msg"{{:""msg":[1] All repairs completed, exit""","msg[2] 127.0.0.1:41929 task reports connection:truev"",:"0v",:"0name,"":":"name"":"crucible"crucible,"","level":level30":30[1]R dirty: [false, false, false, false, false, false, false, false, false, false]","v":0,"name":"crucible","level":30,,""timetime""::""2023-09-22T23:14:58.58365467Z2023-09-22T23:14:58.583655553Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759,,""upstairsupstairs""::11,}}
1441 
1442 {"time""{msg":""msg":[1] Starts cmd_loop:""","v":02023-09-22T23:14:58.583662509Z85682a66-27fd-403c-bf22-d04b4ff66805 Active Active Active,"","name""v:"","crucible:"0,,""level"name:"30hostname":":"crucible","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":,"1time":}"
1443 2023-09-22T23:14:58.583702414Z,"",time"":"hostname":"{2023-09-22T23:14:58.583707413Z"ip-10-150-1-55.us-west-2.compute.internal,"","hostnamepid"":":"4759msg",:"ip-10-150-1-55.us-west-2.compute.internal"","upstairs"pid:":14759}[2]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"
1444 ,","upstairs{":v"1msg"}:
1445 "":0{[2] received reconcile message"","msg,"":v"":name"Set check for repair0",:",""namev""::"0crucible",crucible"",,name""level:"":"crucible30"level":,30"level":30,"time":"2023-09-22T23:14:58.583765612Z",","hostname"time:"":"2023-09-22T23:14:58.58376961Z"ip-10-150-1-55.us-west-2.compute.internal",,""pidhostname""::"4759,",ip-10-150-1-55.us-west-2.compute.internal"","upstairs"pid:"1:}4759
1446 ,"time"{upstairs":"":msg1":"}
1447 [2] All repairs completed, exit"2023-09-22T23:14:58.583768504Z",{"v"":msg0",:""name",:[0] received reconcile message"""crucible,"",v"hostname"level""::030,":"name":"crucible",ip-10-150-1-55.us-west-2.compute.internal"","level":pid":304759,"time,":""upstairs":2023-09-22T23:14:58.583816334Z"1,"}hostname",:""time":"
1448 ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:14:58.58382543Z"",","pid":hostname4759":","{upstairs"ip-10-150-1-55.us-west-2.compute.internal:"1,"}
1449 pid"":{4759msg":,""msg"upstairs:"":"[2] Starts cmd_loop1"}
1450 ,[2]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"{"v"":,0msg,"":name"":""crucible[0] All repairs completed, exit"",",v"level""v:"30::00,,""namename":":""cruciblecrucible,"",time""level:"":"302023-09-22T23:14:58.583872048Z",,""hostname"level"::"30ip-10-150-1-55.us-west-2.compute.internal,"","time"pid:"":4759,"2023-09-22T23:14:58.583886302Z"upstairs",:"1hostname}"
1451 :","time"ip-10-150-1-55.us-west-2.compute.internal":","pid":47592023-09-22T23:14:58.583892841Z",","upstairs":hostname"1:}
1452 "{ip-10-150-1-55.us-west-2.compute.internal",""pid":msg"4759:","[0] Starts cmd_loop"upstairs",:"1v":}0
1453 ,"name":"crucible","level":{30"msg":"[2]R dirty: [false, false, false, false, false, false, false, false, false, false]","v":0,"name":"crucible","level,""time":":302023-09-22T23:14:58.583942631Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1454 ,"time":"{2023-09-22T23:14:58.583958889Z",""hostnamemsg":"":"[1] received reconcile message","vip-10-150-1-55.us-west-2.compute.internal"",":pid":04759,"name",:""crucible"upstairs",:"1level":}30
1455 {"msg":"Max found gen is 1","v":0,"name":","crucible"time":","level":2023-09-22T23:14:58.583994618Z"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1456 ,"time{":"2023-09-22T23:14:58.584014977Z"",msg":""hostname":"[1] All repairs completed, exit","v":ip-10-150-1-55.us-west-2.compute.internal"0,,""name":"pid"crucible":,"4759level":,30"upstairs":1}
1457 {"msg":"Generation requested: 1 >= found:1",","v"time"::0","name":2023-09-22T23:14:58.58405039Z"",crucible"","hostname"level":":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1458 {,"time":""msg":"2023-09-22T23:14:58.584078111Z"[1] Starts cmd_loop",","hostname":"v":0,"ip-10-150-1-55.us-west-2.compute.internalname"":","crucible"pid":,4759"level",:"30upstairs":1}
1459 {"msg":"Next flush: 1","v":0,"name":","crucible"time",:""level":302023-09-22T23:14:58.58411135Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
1460 ,"time":"{2023-09-22T23:14:58.584132467Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal"[2] received reconcile message",","pidv":"0:,4759"name,":""crucible"upstairs",":level":130,"time":"2023-09-22T23:14:58.584169606Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal}"
1461 ,"pid":4759,"{upstairs":1}
1462 "msg":"All extents match","{v":0,"name":""msg":"crucible","[2] All repairs completed, exitlevel":"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.584220147Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",","pid":time4759":","upstairs2023-09-22T23:14:58.584226423Z"":,"1hostname":"}
1463 ip-10-150-1-55.us-west-2.compute.internal","pid":4759{,"upstairs":1"}msg
1464 ":"No downstairs repair required","{v":0,"name":""cruciblemsg":"",[2] Starts cmd_loop""level":,"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.584280644Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",,""pid"time:":"4759,2023-09-22T23:14:58.584285709Z"",upstairs":"1hostname":"}
1465 ip-10-150-1-55.us-west-2.compute.internal","pid":4759{,"upstairs":1"}msg
1466 ":"No initial repair work was required","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.584337281Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"upstairs":1}
14672023-09-22T23:14:58.584ZINFOcrucible: Set Downstairs and Upstairs active upstairs = 1
14682023-09-22T23:14:58.584ZINFOcrucible: 04482c5c-f564-49f3-897c-51c4de58239a is now active with session: cc12d06e-bd83-45fe-987f-779d8c6339ec upstairs = 1
14692023-09-22T23:14:58.584ZINFOcrucible: 04482c5c-f564-49f3-897c-51c4de58239a Set Active after no repair upstairs = 1
14702023-09-22T23:14:58.584ZINFOcrucible: Notify all downstairs, region set compare is done. upstairs = 1
14712023-09-22T23:14:58.584ZINFOcrucible: Set check for repair upstairs = 1
14722023-09-22T23:14:58.584ZINFOcrucible: [1] 127.0.0.1:37306 task reports connection:true upstairs = 1
14732023-09-22T23:14:58.584ZINFOcrucible: 04482c5c-f564-49f3-897c-51c4de58239a Active Active Active upstairs = 1
14742023-09-22T23:14:58.584ZINFOcrucible: Set check for repair upstairs = 1
14752023-09-22T23:14:58.584ZINFOcrucible: [2] 127.0.0.1:55683 task reports connection:true upstairs = 1
14762023-09-22T23:14:58.584ZINFOcrucible: 04482c5c-f564-49f3-897c-51c4de58239a Active Active Active upstairs = 1
14772023-09-22T23:14:58.584ZINFOcrucible: Set check for repair upstairs = 1
14782023-09-22T23:14:58.584ZINFOcrucible: [0] received reconcile message upstairs = 1
14792023-09-22T23:14:58.584ZINFOcrucible: [0] All repairs completed, exit upstairs = 1
14802023-09-22T23:14:58.584ZINFOcrucible: [0] Starts cmd_loop upstairs = 1
14812023-09-22T23:14:58.584ZINFOcrucible: [1] received reconcile message upstairs = 1
14822023-09-22T23:14:58.584ZINFOcrucible: [1] All repairs completed, exit upstairs = 1
14832023-09-22T23:14:58.585ZINFOcrucible: [1] Starts cmd_loop upstairs = 1
14842023-09-22T23:14:58.585ZINFOcrucible: [2] received reconcile message upstairs = 1
14852023-09-22T23:14:58.585ZINFOcrucible: [2] All repairs completed, exit upstairs = 1
14862023-09-22T23:14:58.585ZINFOcrucible: [2] Starts cmd_loop upstairs = 1
14872023-09-22T23:14:58.585ZWARNcrucible: [0] downstairs disconnected upstairs = 1
14882023-09-22T23:14:58.585ZINFOcrucible: [0] 04482c5c-f564-49f3-897c-51c4de58239a Gone missing, transition from Active to Offline upstairs = 1
14892023-09-22T23:14:58.585ZINFOcrucible: [0] 04482c5c-f564-49f3-897c-51c4de58239a connection to 127.0.0.1:36921 closed looper = 0 upstairs = 1
14902023-09-22T23:14:58.585ZWARNcrucible: [0] pm_task rx.recv() is None upstairs = 1
14912023-09-22T23:14:58.585ZINFOcrucible: [0] 127.0.0.1:36921 task reports connection:false upstairs = 1
14922023-09-22T23:14:58.585ZINFOcrucible: 04482c5c-f564-49f3-897c-51c4de58239a Offline Active Active upstairs = 1
14932023-09-22T23:14:58.585ZINFOcrucible: [0] 127.0.0.1:36921 task reports offline upstairs = 1
1494 test impacted_blocks::test::extent_from_offset_can_recreate_iblocks ... ok
1495 test impacted_blocks::test::iblocks_conflicts_is_commutative ... ok
1496 thread 'impacted_blocks::test::extent_from_offset_panics_when_num_blocks_outside_region' panicked at 'assertion failed: offset.value as u128 + num_blocks.value as u128 <=\n ddef.extent_count() as u128 * extent_size as u128', upstairs/src/impacted_blocks.rs:284:5
1497 test impacted_blocks::test::extent_from_offset_panics_for_offsets_outside_region ... ok
1498 test impacted_blocks::test::iblocks_extents_returns_correct_extents ... ok
1499 thread 'impacted_blocks::test::extent_from_offset_panics_when_num_blocks_outside_region' panicked at 'assertion failed: offset.value as u128 + num_blocks.value as u128 <=\n ddef.extent_count() as u128 * extent_size as u128', upstairs/src/impacted_blocks.rs:284:5
1500 test impacted_blocks::test::extent_from_offset_panics_when_num_blocks_outside_region ... ok
1501 test impacted_blocks::test::iblocks_from_offset_is_empty_for_zero_blocks ... ok
1502 thread 'impacted_blocks::test::iblocks_from_offset_with_zero_extent_size_panics' panicked at 'assertion failed: extent_size > 0', upstairs/src/impacted_blocks.rs:146:9
1503 test impacted_blocks::test::iblocks_from_offset_with_zero_extent_size_panics ... ok
1504 test impacted_blocks::test::intersection_is_associative ... ok
1505 test impacted_blocks::test::intersection_is_commutative ... ok
1506 test impacted_blocks::test::intersection_with_empty_is_empty ... ok
1507 test impacted_blocks::test::iblocks_new_panics_for_flipped_polarity ... ok
1508 test impacted_blocks::test::intersection_produces_less_than_or_equal_block_count ... ok
1509 test impacted_blocks::test::nothing_contains_empty ... ok
1510 test impacted_blocks::test::test_extent_from_offset ... ok
1511 test impacted_blocks::test::test_extent_from_offset_single_block_only ... ok
1512 test impacted_blocks::test::test_extent_to_impacted_blocks ... ok
1513 test impacted_blocks::test::test_impacted_blocks_from_offset ... ok
1514 test impacted_blocks::test::test_large_extent_to_impacted_blocks ... ok
1515 test impacted_blocks::test::test_new_range_panics_when_last_block_before_first - should panic ... ok
1516 test impacted_blocks::test::test_new_range_panics_when_last_extent_before_first - should panic ... ok
1517 test impacted_blocks::test::overlapping_impacted_blocks_should_conflict ... ok
1518 test impacted_blocks::test::union_is_associative ... ok
1519 test impacted_blocks::test::union_is_commutative ... ok
1520 test impacted_blocks::test::subregions_are_contained ... ok
15212023-09-22T23:14:58.670ZINFOcrucible: Crucible stats registered with UUID: 641ab82c-3ee3-4691-881a-11c5f52cf819
15222023-09-22T23:14:58.670ZINFOcrucible: Crucible 641ab82c-3ee3-4691-881a-11c5f52cf819 has session id: a2e2ade3-c458-46cf-a429-7df7c085dacd
15232023-09-22T23:14:58.671ZINFOcrucible: 641ab82c-3ee3-4691-881a-11c5f52cf819 is now active with session: 46312f17-e793-483e-9dca-95375fb17903
15242023-09-22T23:14:58.671ZINFOcrucible: [0] 641ab82c-3ee3-4691-881a-11c5f52cf819 (46312f17-e793-483e-9dca-95375fb17903) New New New ds_transition to WaitActive
15252023-09-22T23:14:58.671ZINFOcrucible: [0] Transition from New to WaitActive
15262023-09-22T23:14:58.671ZINFOcrucible: [0] 641ab82c-3ee3-4691-881a-11c5f52cf819 (46312f17-e793-483e-9dca-95375fb17903) WaitActive New New ds_transition to WaitQuorum
15272023-09-22T23:14:58.671ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
15282023-09-22T23:14:58.671ZINFOcrucible: [0] 641ab82c-3ee3-4691-881a-11c5f52cf819 (46312f17-e793-483e-9dca-95375fb17903) WaitQuorum New New ds_transition to Active
15292023-09-22T23:14:58.671ZINFOcrucible: [0] Transition from WaitQuorum to Active
15302023-09-22T23:14:58.671ZINFOcrucible: [1] 641ab82c-3ee3-4691-881a-11c5f52cf819 (46312f17-e793-483e-9dca-95375fb17903) Active New New ds_transition to WaitActive
15312023-09-22T23:14:58.671ZINFOcrucible: [1] Transition from New to WaitActive
15322023-09-22T23:14:58.671ZINFOcrucible: [1] 641ab82c-3ee3-4691-881a-11c5f52cf819 (46312f17-e793-483e-9dca-95375fb17903) Active WaitActive New ds_transition to WaitQuorum
15332023-09-22T23:14:58.671ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
15342023-09-22T23:14:58.671ZINFOcrucible: [1] 641ab82c-3ee3-4691-881a-11c5f52cf819 (46312f17-e793-483e-9dca-95375fb17903) Active WaitQuorum New ds_transition to Active
15352023-09-22T23:14:58.671ZINFOcrucible: [1] Transition from WaitQuorum to Active
15362023-09-22T23:14:58.671ZINFOcrucible: [2] 641ab82c-3ee3-4691-881a-11c5f52cf819 (46312f17-e793-483e-9dca-95375fb17903) Active Active New ds_transition to WaitActive
15372023-09-22T23:14:58.671ZINFOcrucible: [2] Transition from New to WaitActive
15382023-09-22T23:14:58.671ZINFOcrucible: [2] 641ab82c-3ee3-4691-881a-11c5f52cf819 (46312f17-e793-483e-9dca-95375fb17903) Active Active WaitActive ds_transition to WaitQuorum
15392023-09-22T23:14:58.671ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
15402023-09-22T23:14:58.671ZINFOcrucible: [2] 641ab82c-3ee3-4691-881a-11c5f52cf819 (46312f17-e793-483e-9dca-95375fb17903) Active Active WaitQuorum ds_transition to Active
15412023-09-22T23:14:58.671ZINFOcrucible: [2] Transition from WaitQuorum to Active
15422023-09-22T23:14:58.671ZINFOcrucible: [1] 641ab82c-3ee3-4691-881a-11c5f52cf819 (46312f17-e793-483e-9dca-95375fb17903) Active Active Active ds_transition to Faulted
15432023-09-22T23:14:58.671ZINFOcrucible: [1] Transition from Active to Faulted
15442023-09-22T23:14:58.671ZINFOcrucible: [1] 641ab82c-3ee3-4691-881a-11c5f52cf819 (46312f17-e793-483e-9dca-95375fb17903) Active Faulted Active ds_transition to LiveRepairReady
15452023-09-22T23:14:58.671ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
15462023-09-22T23:14:58.671ZINFOcrucible: [1] 641ab82c-3ee3-4691-881a-11c5f52cf819 (46312f17-e793-483e-9dca-95375fb17903) Active LiveRepairReady Active ds_transition to LiveRepair
15472023-09-22T23:14:58.672ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
15482023-09-22T23:14:58.672ZINFOcrucible: [0] 641ab82c-3ee3-4691-881a-11c5f52cf819 (46312f17-e793-483e-9dca-95375fb17903) Active LiveRepair Active ds_transition to Faulted
15492023-09-22T23:14:58.672ZINFOcrucible: [0] Transition from Active to Faulted
15502023-09-22T23:14:58.672ZINFOcrucible: [0] 641ab82c-3ee3-4691-881a-11c5f52cf819 (46312f17-e793-483e-9dca-95375fb17903) Faulted LiveRepair Active ds_transition to LiveRepairReady
15512023-09-22T23:14:58.672ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
15522023-09-22T23:14:58.672ZINFOcrucible: Checking if live repair is needed
15532023-09-22T23:14:58.672ZWARNcrucible: Upstairs already in repair, trying again later
1554 test impacted_blocks::test::union_produces_greater_than_or_equal_block_count ... ok
1555 test live_repair::repair_test::test_check_for_repair_already_repair ... ok
15562023-09-22T23:14:58.673ZINFOcrucible: Crucible stats registered with UUID: 8228decb-4d72-4851-9eb0-d687894e47e9
15572023-09-22T23:14:58.673ZINFOcrucible: Crucible 8228decb-4d72-4851-9eb0-d687894e47e9 has session id: b5e0c294-607f-4c30-a2e2-eba071706399
15582023-09-22T23:14:58.673ZINFOcrucible: 8228decb-4d72-4851-9eb0-d687894e47e9 is now active with session: 4dbfcf6c-f885-44e1-a38c-eeeb7a138dad
15592023-09-22T23:14:58.673ZINFOcrucible: [0] 8228decb-4d72-4851-9eb0-d687894e47e9 (4dbfcf6c-f885-44e1-a38c-eeeb7a138dad) New New New ds_transition to WaitActive
1560 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30{"msg":"Crucible stats registered with UUID: 120e0f16-155b-4b25-9c8c-e48b4995715e","v":0,"name":"crucible",,""time"level:"":302023-09-22T23:14:58.67319914Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1561 {"msg":"[0] 8228decb-4d72-4851-9eb0-d687894e47e9 (4dbfcf6c-f885-44e1-a38c-eeeb7a138dad) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible,"","time"level:"":302023-09-22T23:14:58.673214482Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1562 ,"time":"2023-09-22T23:14:58.673240008Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1563 Crucible 120e0f16-155b-4b25-9c8c-e48b4995715e has session id: 320a8300-6268-4943-be65-a9ba6f11e562","v"{:0,""name"msg:"":"crucible","level":[0] Transition from WaitActive to WaitQuorum30","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:14:58.673272725Ztime"":","hostname":2023-09-22T23:14:58.6732774Z"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759ip-10-150-1-55.us-west-2.compute.internal","}pid
1564 ":4759}
1565 {{"msg":""msg":"120e0f16-155b-4b25-9c8c-e48b4995715e is now active with session: b41d05a7-cb34-4f00-b2d8-90477e36239b","v":[0] 8228decb-4d72-4851-9eb0-d687894e47e9 (4dbfcf6c-f885-44e1-a38c-eeeb7a138dad) WaitQuorum New New ds_transition to Active0",","namev""::"0,crucible""name,"":"level":crucible30","level":30,",time"":time"":"2023-09-22T23:14:58.673312065Z"2023-09-22T23:14:58.673313586Z",","hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"","pid"pid:":47594759}}
1566 
1567 {"{msg":""msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","[0] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) New New New ds_transition to WaitActive"level":,"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.673344262Z",","hostnametime""::""2023-09-22T23:14:58.673348338Z"ip-10-150-1-55.us-west-2.compute.internal",,""pidhostname""::"4759}
1568 ip-10-150-1-55.us-west-2.compute.internal","pid":{4759}
1569 "msg":"{"msg":"[0] Transition from New to WaitActive",[1] 8228decb-4d72-4851-9eb0-d687894e47e9 (4dbfcf6c-f885-44e1-a38c-eeeb7a138dad) Active New New ds_transition to WaitActive""v",:"0v",:"0,name"":name"":"crucible"crucible,"","level":level"30:30,,""timetime""::""2023-09-22T23:14:58.673380608Z2023-09-22T23:14:58.673380928Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759}}
1570 
1571 {"{msg":""msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","[0] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) WaitActive New New ds_transition to WaitQuorumlevel"":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.673413571Z",",hostname"":"time":"ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:14:58.673417822Z"",","pid":hostname4759":"}
1572 ip-10-150-1-55.us-west-2.compute.internal","pid":{4759}
1573 "msg":"{"msg":"[0] Transition from WaitActive to WaitQuorum","[1] 8228decb-4d72-4851-9eb0-d687894e47e9 (4dbfcf6c-f885-44e1-a38c-eeeb7a138dad) Active WaitActive New ds_transition to WaitQuorumv"":,"0v",:"0name,"":"name":crucible"",crucible"",level"":level30":30,,""timetime""::""2023-09-22T23:14:58.67344989Z2023-09-22T23:14:58.673450321Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759}}
1574 
1575 {"{msg":""msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","[0] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) WaitQuorum New New ds_transition to Active"level":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.673482289Z",","hostname":time"":"2023-09-22T23:14:58.673486568Zip-10-150-1-55.us-west-2.compute.internal"",,""pid":hostname"4759:"}
1576 ip-10-150-1-55.us-west-2.compute.internal","pid":{4759}
1577 "msg":"{"msg":"[0] Transition from WaitQuorum to Active","[1] 8228decb-4d72-4851-9eb0-d687894e47e9 (4dbfcf6c-f885-44e1-a38c-eeeb7a138dad) Active WaitQuorum New ds_transition to Activev"":,0",v"":name0":,""namecrucible"":","cruciblelevel"":,"30level":30,,""timetime""::""2023-09-22T23:14:58.673519054Z2023-09-22T23:14:58.673518251Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759}}
1578 
1579 {{"msg"":"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible[1] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) Active New New ds_transition to WaitActive"",","levelv""::300,"name":"crucible","level":30,"time":","time"2023-09-22T23:14:58.673557296Z:"","hostname"2023-09-22T23:14:58.673560704Z:"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,"pid}"
1580 :4759}
1581 {{"msg":""msg":"[1] Transition from New to WaitActive","v":0,"name"[2] 8228decb-4d72-4851-9eb0-d687894e47e9 (4dbfcf6c-f885-44e1-a38c-eeeb7a138dad) Active Active New ds_transition to WaitActive:"","crucible"v",:"0,level"":name30":"crucible","level":30,"time":","time"2023-09-22T23:14:58.673592314Z:"","hostname"2023-09-22T23:14:58.673594847Z:"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal4759","}
1582 pid":4759}{
1583 "msg":"{"msg":"[2] Transition from New to WaitActive","v":[1] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) Active WaitActive New ds_transition to WaitQuorum0",","namev""::"0,crucible"",name"":"level":crucible"30,"level":30,",time"":time"":"2023-09-22T23:14:58.673626653Z"2023-09-22T23:14:58.673627981Z",","hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"","pid"pid:":47594759}}
1584 
1585 {{"msg":""msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name[2] 8228decb-4d72-4851-9eb0-d687894e47e9 (4dbfcf6c-f885-44e1-a38c-eeeb7a138dad) Active Active WaitActive ds_transition to WaitQuorum"":","cruciblev"":,"0,level"":name30":"crucible","level":30,"time":","time2023-09-22T23:14:58.673659898Z"":","hostname":2023-09-22T23:14:58.673662427Z"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759ip-10-150-1-55.us-west-2.compute.internal",}"
1586 pid":4759}{
1587 "msg":"{"msg":"[2] Transition from WaitActive to WaitQuorum","[1] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) Active WaitQuorum New ds_transition to Active"v":,"0v,"":0name",:""name"crucible:"","crucible"level",:"30level":30,,""timetime""::""2023-09-22T23:14:58.673694403Z2023-09-22T23:14:58.673695167Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"","pid"pid:"4759:4759}
1588 }
1589 {{"msg":""msg":"[1] Transition from WaitQuorum to Active","v":0,"name[2] 8228decb-4d72-4851-9eb0-d687894e47e9 (4dbfcf6c-f885-44e1-a38c-eeeb7a138dad) Active Active WaitQuorum ds_transition to Active"":","cruciblev"":,0","level"name:"30:"crucible","level":30,"time":","time":2023-09-22T23:14:58.673729033Z"","hostname2023-09-22T23:14:58.673731375Z"":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,"pid}"
1590 :4759}
1591 {{"msg":""msg":"[2] Transition from WaitQuorum to Active","v":0,"[2] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) Active Active New ds_transition to WaitActivename"":","vcrucible"":,0","level"name:":30"crucible","level":30,"time":","time":2023-09-22T23:14:58.673763416Z"","hostname2023-09-22T23:14:58.673765649Z"":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,"}pid
1592 ":4759}
1593 {{"msg":""msg":"[2] Transition from New to WaitActive","v":0,"name[1] 8228decb-4d72-4851-9eb0-d687894e47e9 (4dbfcf6c-f885-44e1-a38c-eeeb7a138dad) Active Active Active ds_transition to Faulted"":","cruciblev"":,"0,level"":name30":"crucible","level":30,"time":","time":2023-09-22T23:14:58.673796954Z"","hostname2023-09-22T23:14:58.673799431Z"":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal4759","}
1594 pid":4759}{
1595 "msg":"{"msg":"[1] Transition from Active to Faulted","v":[2] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) Active Active WaitActive ds_transition to WaitQuorum0",","namev""::"0,crucible""name,"":"level":crucible"30,"level":30,,""timetime""::""2023-09-22T23:14:58.673831395Z2023-09-22T23:14:58.673832747Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759}}
1596 
1597 {{"msg"":msg"":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"[1] 8228decb-4d72-4851-9eb0-d687894e47e9 (4dbfcf6c-f885-44e1-a38c-eeeb7a138dad) Active Faulted Active ds_transition to LiveRepairReady"crucible",,""v"level:":030,"name":"crucible","level":30,"time":","2023-09-22T23:14:58.673869227Z"time":,""hostname":"2023-09-22T23:14:58.673872463Z","hostname"ip-10-150-1-55.us-west-2.compute.internal:"","pid":4759ip-10-150-1-55.us-west-2.compute.internal"},
1598 "pid":4759{}
1599 "msg":"{"msg":"[1] Transition from Faulted to LiveRepairReady",[2] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) Active Active WaitQuorum ds_transition to Active""v",:"0v",:"0,name"":name"":"crucible"crucible,"","level":level"30:30,,""timetime""::""2023-09-22T23:14:58.673905295Z2023-09-22T23:14:58.673905561Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",","pid"pid:":47594759}}
1600 
1601 {{"msg":""msg":"Checking if live repair is needed","v[2] Transition from WaitQuorum to Active"":0,","v":name"0:","cruciblename"":,""cruciblelevel"":,"30level":30,,""timetime""::""2023-09-22T23:14:58.673938603Z2023-09-22T23:14:58.673939444Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759}}
1602 
1603 {{"msg":""msg":"[1] 8228decb-4d72-4851-9eb0-d687894e47e9 (4dbfcf6c-f885-44e1-a38c-eeeb7a138dad) Active LiveRepairReady Active ds_transition to LiveRepair"[1] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) Active Active Active ds_transition to Faulted",","v"v:":00,,""namename""::""crucible"crucible",","level"level:"30:30,,""timetime""::""2023-09-22T23:14:58.673976414Z2023-09-22T23:14:58.673976138Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"pid:":47594759}}
1604 
1605 {{"msgtest impacted_blocks::test::union_with_empty_is_identity ... ""msg:"":ok[1] Transition from Active to Faulted""
1606 [1] Transition from LiveRepairReady to LiveRepair,""v":,0",v""name"::0","crucible"name",:""level"crucible:"30,"level":30,","time"time:"":"2023-09-22T23:14:58.674027398Z"2023-09-22T23:14:58.674028697Z",","hostname"hostname:"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"","pid"pid:":47594759}}
1607 
16082023-09-22T23:14:58.674ZINFOcrucible: [1] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) Active Faulted Active ds_transition to LiveRepairReady
16092023-09-22T23:14:58.674ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
16102023-09-22T23:14:58.674ZINFOcrucible: [2] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) Active LiveRepairReady Active ds_transition to Faulted
16112023-09-22T23:14:58.674ZINFOcrucible: [2] Transition from Active to Faulted
16122023-09-22T23:14:58.674ZINFOcrucible: [2] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) Active LiveRepairReady Faulted ds_transition to LiveRepairReady
16132023-09-22T23:14:58.674ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
1614 {"msg":"Checking if live repair is needed","v":0,"name":"crucible","level":30,"time":"test live_repair::repair_test::test_check_for_repair_do_repair ... 2023-09-22T23:14:58.674289781Z"ok,"
1615 hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
16162023-09-22T23:14:58.674ZINFOcrucible: [1] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) Active LiveRepairReady LiveRepairReady ds_transition to LiveRepair
16172023-09-22T23:14:58.674ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
16182023-09-22T23:14:58.674ZINFOcrucible: [2] 120e0f16-155b-4b25-9c8c-e48b4995715e (b41d05a7-cb34-4f00-b2d8-90477e36239b) Active LiveRepair LiveRepairReady ds_transition to LiveRepair
16192023-09-22T23:14:58.674ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
1620 {"msg":"Crucible stats registered with UUID: a1d81752-deda-403e-8082-906a34f8182a","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.674593437Z","hostname":"ip-10-150-1-55.us-west-2.compute.internaltest live_repair::repair_test::test_check_for_repair_do_two_repair ... "ok,"
1621 pid":4759}
16222023-09-22T23:14:58.674ZINFOcrucible: Crucible a1d81752-deda-403e-8082-906a34f8182a has session id: 88d926f8-def9-4076-bba6-faf2ecc35512
16232023-09-22T23:14:58.674ZINFOcrucible: Checking if live repair is needed
16242023-09-22T23:14:58.674ZINFOcrucible: a1d81752-deda-403e-8082-906a34f8182a is now active with session: 7ab251f4-fbab-4f08-b351-a01533f527e9
16252023-09-22T23:14:58.674ZINFOcrucible: [0] a1d81752-deda-403e-8082-906a34f8182a (7ab251f4-fbab-4f08-b351-a01533f527e9) New New New ds_transition to WaitActive
1626 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30{"msg":","time":"2023-09-22T23:14:58.674793383ZCrucible stats registered with UUID: d90599f9-c15d-4f80-b9c1-b848e2e8d476"",",hostname"":v"":0,"ip-10-150-1-55.us-west-2.compute.internal"name",":"pid":crucible"4759,"}level"
1627 :30{"msg":"[0] a1d81752-deda-403e-8082-906a34f8182a (7ab251f4-fbab-4f08-b351-a01533f527e9) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,",time":""time":"2023-09-22T23:14:58.674838418Z","2023-09-22T23:14:58.674825568Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1628 {"msg":"hostname":"Crucible d90599f9-c15d-4f80-b9c1-b848e2e8d476 has session id: 1eae53c6-8d73-4b5f-801d-5e6e237195b1","v":ip-10-150-1-55.us-west-2.compute.internal0",",name":"crucible","level":30"pid":4759}
1629 {"msg":",[0] Transition from WaitActive to WaitQuorum""time",:""v":0,2023-09-22T23:14:58.674932173Z""name":","crucible"hostname",:""level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1630 {,"time":""msg":2023-09-22T23:14:58.674963935Z"","hostname":"ip-10-150-1-55.us-west-2.compute.internal",d90599f9-c15d-4f80-b9c1-b848e2e8d476 is now active with session: 4bd7dc1d-d4df-4f2e-8daa-2d91d52e68c3""pid":,4759"v"}:
1631 0,"name":"{crucible",""levelmsg":"":30[0] a1d81752-deda-403e-8082-906a34f8182a (7ab251f4-fbab-4f08-b351-a01533f527e9) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:14:58.675002309Z"time":","hostname":"2023-09-22T23:14:58.675011854Z","hostname"ip-10-150-1-55.us-west-2.compute.internal":,""pid":4759ip-10-150-1-55.us-west-2.compute.internal",}"
1632 {pid""msg"::4759"}
1633 [0] d90599f9-c15d-4f80-b9c1-b848e2e8d476 (4bd7dc1d-d4df-4f2e-8daa-2d91d52e68c3) New New New ds_transition to WaitActive","v":{0,"name":""cruciblemsg"":,""level":30[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:14:58.675070066Ztime":"","hostname"2023-09-22T23:14:58.675076395Z":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,"pid":}4759
1634 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30}
1635 ,"time":"2023-09-22T23:14:58.675164625Z"{,"hostname":""ip-10-150-1-55.us-west-2.compute.internal"msg",:""pid":4759}
1636 {"msg":"[1] a1d81752-deda-403e-8082-906a34f8182a (7ab251f4-fbab-4f08-b351-a01533f527e9) Active New New ds_transition to WaitActive","v":[0] d90599f9-c15d-4f80-b9c1-b848e2e8d476 (4bd7dc1d-d4df-4f2e-8daa-2d91d52e68c3) WaitActive New New ds_transition to WaitQuorum0",",name"":"v"crucible:0,"name":"crucible","level":30","level":30,"time":"2023-09-22T23:14:58.675233101Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",",{"pid"time"::4759"msg":"Crucible stats registered with UUID: c7bd94b4-dec0-4286-b77a-1e856258195c"}
1637 "{2023-09-22T23:14:58.675240979Z",""hostname"msg:""ip-10-150-1-55.us-west-2.compute.internal",":pid"":4759[0] Transition from WaitActive to WaitQuorum}"
1638 ,{"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30"v":,"0time":,""name":"crucible"2023-09-22T23:14:58.675314474Z",","level":hostname"30,"v":0,"name":"crucible","level":30:"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,}"
1639 time":"2023-09-22T23:14:58.675345702Z","{hostname":""ip-10-150-1-55.us-west-2.compute.internalmsg"":","pid":4759}
1640 {[1] a1d81752-deda-403e-8082-906a34f8182a (7ab251f4-fbab-4f08-b351-a01533f527e9) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":""2023-09-22T23:14:58.675408485Z"msg",:",[0] d90599f9-c15d-4f80-b9c1-b848e2e8d476 (4bd7dc1d-d4df-4f2e-8daa-2d91d52e68c3) WaitQuorum New New ds_transition to Active",""v":hostname0",:""name":"crucibleip-10-150-1-55.us-west-2.compute.internal"""time",":pid"":,"47592023-09-22T23:14:58.675352816Z"level"}:
1641 ,"30hostname":"ip-10-150-1-55.us-west-2.compute.internal{","pid":4759"msg":"}
1642 [1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible"{,,""timelevel""::30"2023-09-22T23:14:58.675468963Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}"
1643 {,"time":"2023-09-22T23:14:58.675493179Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1644 {"msg":"[1] a1d81752-deda-403e-8082-906a34f8182a (7ab251f4-fbab-4f08-b351-a01533f527e9) Active WaitQuorum New ds_transition to Active","v":0","msgname""::"[0] Transition from WaitQuorum to Active"msg,""v"::"0,"name":""crucible",crucible""Crucible c7bd94b4-dec0-4286-b77a-1e856258195c has session id: ef6e50e6-6ec1-49ef-aac4-a434e5e75199",level","v":0,"name":":crucible"30,"level":30"level":30,"time":"2023-09-22T23:14:58.675579141Z",","time"hostname":","time":"2023-09-22T23:14:58.675588193Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal4759","pid":}4759
1645 {:"2023-09-22T23:14:58.675581361Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1646 {"msg":""[0] c7bd94b4-dec0-4286-b77a-1e856258195c (148894f2-0fa3-4b3d-a2ba-6c769977de7b) New New New ds_transition to WaitActive"msg":,""v":0,"name":"crucible","level":[1] d90599f9-c15d-4f80-b9c1-b848e2e8d476 (4bd7dc1d-d4df-4f2e-8daa-2d91d52e68c3) Active New New ds_transition to WaitActive"30,"v":0,"name":"crucible","level":30}
1647 ,"time":"2023-09-22T23:14:58.675669132Z",{"hostname":",""ip-10-150-1-55.us-west-2.compute.internal"msg,time"""pid::""2023-09-22T23:14:58.675679675Z"[1] Transition from WaitQuorum to Active",","hostname"v":""ip-10-150-1-55.us-west-2.compute.internal":,"4759pid":4759}
1648 }
1649 :0,"name":"crucible","level{":30"{msg":"[1] Transition from New to WaitActive","v":"0,"msg"name":"crucible","level",:"30time":"2023-09-22T23:14:58.675746657Z":,""hostname":"[0] Transition from New to WaitActive"ip-10-150-1-55.us-west-2.compute.internal",",pid"":v"4759:0},
1650 ,""name":"time{"crucible":",""level":2023-09-22T23:14:58.675766432Zmsg30","time":"2023-09-22T23:14:58.675800799Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1651 {"msg":","hostname":"[0] c7bd94b4-dec0-4286-b77a-1e856258195c (148894f2-0fa3-4b3d-a2ba-6c769977de7b) WaitActive New New ds_transition to WaitQuorumip-10-150-1-55.us-west-2.compute.internal"","pid,""v"::04759,"name":"}crucible"
1652 {,"level":"30msg":"[1] d90599f9-c15d-4f80-b9c1-b848e2e8d476 (4bd7dc1d-d4df-4f2e-8daa-2d91d52e68c3) Active WaitActive New ds_transition to WaitQuorum,"time":"2023-09-22T23:14:58.675863639Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1653 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30","time","v:"":02023-09-22T23:14:58.67589105Z,""","name:"":[2] a1d81752-deda-403e-8082-906a34f8182a (7ab251f4-fbab-4f08-b351-a01533f527e9) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30"crucible",,hostname":"ip-10-150-1-55.us-west-2.compute.internal"","level"pid"::475930"}
1654 time":"{2023-09-22T23:14:58.675924754Z"",msg":""hostname":"[0] c7bd94b4-dec0-4286-b77a-1e856258195c (148894f2-0fa3-4b3d-a2ba-6c769977de7b) WaitQuorum New New ds_transition to Activeip-10-150-1-55.us-west-2.compute.internal"",","v":pid"0:,,""nametime":"2023-09-22T23:14:58.6759729Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1655 {""msg":":"crucible"[1] Transition from WaitActive to WaitQuorum",",level""v"::300,"name":"crucible","level":475930}
1656 ,"time":"{2023-09-22T23:14:58.676026942Z","hostname":""ip-10-150-1-55.us-west-2.compute.internalmsg"":,""pid,":"4759[2] Transition from New to WaitActive"}time
1657 "{"msg":":"[0] Transition from WaitQuorum to Active","v":02023-09-22T23:14:58.676031673Z,""name":",,""v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.676073211Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1658 hostname":"{ip-10-150-1-55.us-west-2.compute.internal",""pidmsg""::"4759}
1659 crucible","level":30,"time":"2023-09-22T23:14:58.67612504Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1660 {{"msg":"[2] a1d81752-deda-403e-8082-906a34f8182a (7ab251f4-fbab-4f08-b351-a01533f527e9) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30[1] d90599f9-c15d-4f80-b9c1-b848e2e8d476 (4bd7dc1d-d4df-4f2e-8daa-2d91d52e68c3) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":30,"time":""2023-09-22T23:14:58.6761637Z"msg":","hostname":"[1] c7bd94b4-dec0-4286-b77a-1e856258195c (148894f2-0fa3-4b3d-a2ba-6c769977de7b) Active New New ds_transition to WaitActive"ip-10-150-1-55.us-west-2.compute.internal",","pid"v"::04759,"name":"}crucible
1661 ",,""level":time"30:"2023-09-22T23:14:58.67619352Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1662 {"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30{"msg":","time":"[2] Transition from WaitActive to WaitQuorum"2023-09-22T23:14:58.676242401Z",","vhostname":"":0,ip-10-150-1-55.us-west-2.compute.internal"",,""pid":4759}
16632023-09-22T23:14:58.676ZINFOcrucible: [1] Transition from New to WaitActive
1664 {"msg":"time":"2023-09-22T23:14:58.676250459Z","hostname":"[1] c7bd94b4-dec0-4286-b77a-1e856258195c (148894f2-0fa3-4b3d-a2ba-6c769977de7b) Active WaitActive New ds_transition to WaitQuorum","ip-10-150-1-55.us-west-2.compute.internalv":"0,",name":""crucible"pid,""level":30,"time":"2023-09-22T23:14:58.676315746Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1665 {"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30:4759}
1666 name":"crucible","level":30,"time":"2023-09-22T23:14:58.676355253Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759"}msg":","time":"2023-09-22T23:14:58.676336673Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1667 {"msg":"[1] c7bd94b4-dec0-4286-b77a-1e856258195c (148894f2-0fa3-4b3d-a2ba-6c769977de7b) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":30[2] d90599f9-c15d-4f80-b9c1-b848e2e8d476 (4bd7dc1d-d4df-4f2e-8daa-2d91d52e68c3) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.676411276Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1668 {"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":","crucible"time"
1669 {"msg":"[2] a1d81752-deda-403e-8082-906a34f8182a (7ab251f4-fbab-4f08-b351-a01533f527e9) Active Active WaitQuorum ds_transition to Active","v":0,"name":"crucible:"","level":2023-09-22T23:14:58.676425028Z"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1670 ,"time":"{2023-09-22T23:14:58.676480391Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal",,"[2] Transition from New to WaitActivepid"":4759}
1671 {"msg":"[2] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":",2023-09-22T23:14:58.676525486Z""v":,"0hostname",:""name":""levelip-10-150-1-55.us-west-2.compute.internal"crucible","","level":30pid":4759}
1672 ,"time":"2023-09-22T23:14:58.676554972Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1673 {{"msg":""msg":"Checking if live repair is needed","v":0[2] d90599f9-c15d-4f80-b9c1-b848e2e8d476 (4bd7dc1d-d4df-4f2e-8daa-2d91d52e68c3) Active Active WaitActive ds_transition to WaitQuorum",",name":""v":crucible"0,",:"levelname":"30:,"time":"2023-09-22T23:14:58.676613278Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1674 {"msg":""crucible","No Live Repair required at this timelevel"",:"30v":0,30"name":"crucible","level":30,,""timetime""::",""2023-09-22T23:14:58.67665563Z"2023-09-22T23:14:58.676653004Z"time,""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1675 {"msg":"[2] c7bd94b4-dec0-4286-b77a-1e856258195c (148894f2-0fa3-4b3d-a2ba-6c769977de7b) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30,":time"":"2023-09-22T23:14:58.676650502Z2023-09-22T23:14:58.676696792Z"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1676 {"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.676726708Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal",hostname""pid"::"4759}
1677 ip-10-150-1-55.us-west-2.compute.internal{"msg":"","pid":4759[2] c7bd94b4-dec0-4286-b77a-1e856258195c (148894f2-0fa3-4b3d-a2ba-6c769977de7b) Active Active WaitActive ds_transition to WaitQuorum","}v
1678 {":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.676774135Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1679 {"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible"","level"msg":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1680 [2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.676837298Z",":hostname":30"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1681 ,"time":"2023-09-22T23:14:58.676863179Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1682 "msg":"{"msg":"[2] d90599f9-c15d-4f80-b9c1-b848e2e8d476 (4bd7dc1d-d4df-4f2e-8daa-2d91d52e68c3) Active Active WaitQuorum ds_transition to Active","v":0,"name":"[2] c7bd94b4-dec0-4286-b77a-1e856258195c (148894f2-0fa3-4b3d-a2ba-6c769977de7b) Active Active WaitQuorum ds_transition to Active"crucible",",v":"0level":,"30name":"crucible","level":30,"time":"2023-09-22T23:14:58.676903041Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",,""pid":4759time":"}
1683 2023-09-22T23:14:58.676900226Z"{,""hostname":msg":""[2] Transition from WaitQuorum to Active","v":ip-10-150-1-55.us-west-2.compute.internal0",","pid":4759}
1684 {name":"crucible",""level":msg"30:"[2] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.676985358Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1685 {"msg":"c7bd94b4-dec0-4286-b77a-1e856258195c is now active with session: 148894f2-0fa3-4b3d-a2ba-6c769977de7b","v":0,"name":,""crucible",time""level":30:"2023-09-22T23:14:58.676994516Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",","pid"time":":47592023-09-22T23:14:58.677017227Z","hostname":"}
1686 {"msg":ip-10-150-1-55.us-west-2.compute.internal"","pid":4759}
1687 [1] d90599f9-c15d-4f80-b9c1-b848e2e8d476 (4bd7dc1d-d4df-4f2e-8daa-2d91d52e68c3) Active Active Active ds_transition to Faulted","v":{0,""namemsg":"":"crucible","level":30[1] c7bd94b4-dec0-4286-b77a-1e856258195c (148894f2-0fa3-4b3d-a2ba-6c769977de7b) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30,"test live_repair::repair_test::test_check_for_repair_normal ... time":,"time":"2023-09-22T23:14:58.677106484Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1688 {"msg":"[1] Transition from Active to Faulted","v":0,"name":"crucible","level":30"2023-09-22T23:14:58.677098906Z",,""time":"hostname":"2023-09-22T23:14:58.677143652Z","ip-10-150-1-55.us-west-2.compute.internal"hostname",:""pid":4759ip-10-150-1-55.us-west-2.compute.internal",}"
1689 {pid":4759}
1690 {"msg":"[1] c7bd94b4-dec0-4286-b77a-1e856258195c (148894f2-0fa3-4b3d-a2ba-6c769977de7b) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30"msg":"[1] Transition from Active to Faulted","v":0,"name":"crucible",",level":30,"time":"2023-09-22T23:14:58.677253789Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759"time":"}
1691 ok{
1692 "msg":"[1] d90599f9-c15d-4f80-b9c1-b848e2e8d476 (4bd7dc1d-d4df-4f2e-8daa-2d91d52e68c3) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible","2023-09-22T23:14:58.677237011Zlevel"":30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1693 {"msg":"[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":,"30time":"2023-09-22T23:14:58.67734569Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1694 {,"time"":"msg":"2023-09-22T23:14:58.677365797Z","hostname":[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1695 {"msg":"[1] c7bd94b4-dec0-4286-b77a-1e856258195c (148894f2-0fa3-4b3d-a2ba-6c769977de7b) Active LiveRepairReady Active ds_transition to LiveRepair",",v":0","timename":""crucible":,""level":302023-09-22T23:14:58.677432222Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1696 {,"time":""msg2023-09-22T23:14:58.677458485Z"":","hostname":"Checking if live repair is needed"ip-10-150-1-55.us-west-2.compute.internal","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.677523762Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1697 {"msg",:""pid":4759}
1698 Upstairs repair task running, trying again later","v":{0,""name":"crucible","level":40msg":"[1] Transition from LiveRepairReady to LiveRepair","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.677611738Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1699 ,"time":"2023-09-22T23:14:58.677627058Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1700 {"msg":"Crucible stats registered with UUID: 1cff5a3f-832b-41e6-a1da-ab29a77accd1","v":0,"name":"crucible"test live_repair::repair_test::test_live_repair_deps_after_no_overlap ... ,"oklevel
1701 ":30,"time":"2023-09-22T23:14:58.677861296Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1702 test live_repair::repair_test::test_check_for_repair_task_running ... ok{
1703 "msg":"Crucible 1cff5a3f-832b-41e6-a1da-ab29a77accd1 has session id: d0cad690-982c-473a-b981-845555a75a75","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.677912023Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
17042023-09-22T23:14:58.677ZINFOcrucible: [0] 1cff5a3f-832b-41e6-a1da-ab29a77accd1 (67e248db-469f-4db7-bdb8-64747eb67344) New New New ds_transition to WaitActive
17052023-09-22T23:14:58.677ZINFOcrucible: [0] Transition from New to WaitActive
17062023-09-22T23:14:58.678ZINFOcrucible: [0] 1cff5a3f-832b-41e6-a1da-ab29a77accd1 (67e248db-469f-4db7-bdb8-64747eb67344) WaitActive New New ds_transition to WaitQuorum
17072023-09-22T23:14:58.678ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
17082023-09-22T23:14:58.678ZINFOcrucible: [0] 1cff5a3f-832b-41e6-a1da-ab29a77accd1 (67e248db-469f-4db7-bdb8-64747eb67344) WaitQuorum New New ds_transition to Active
17092023-09-22T23:14:58.678ZINFOcrucible: [0] Transition from WaitQuorum to Active
17102023-09-22T23:14:58.678ZINFOcrucible: [1] 1cff5a3f-832b-41e6-a1da-ab29a77accd1 (67e248db-469f-4db7-bdb8-64747eb67344) Active New New ds_transition to WaitActive
17112023-09-22T23:14:58.678ZINFOcrucible: [1] Transition from New to WaitActive
17122023-09-22T23:14:58.678ZINFOcrucible: [1] 1cff5a3f-832b-41e6-a1da-ab29a77accd1 (67e248db-469f-4db7-bdb8-64747eb67344) Active WaitActive New ds_transition to WaitQuorum
17132023-09-22T23:14:58.678ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
17142023-09-22T23:14:58.678ZINFOcrucible: [1] 1cff5a3f-832b-41e6-a1da-ab29a77accd1 (67e248db-469f-4db7-bdb8-64747eb67344) Active WaitQuorum New ds_transition to Active
1715 {"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.678380746Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":{4759}
1716 "msg":"{"msg":"Crucible stats registered with UUID: b0d4bfe3-554b-45f1-b2c7-a87fcc965ee5","v":0,"name":"[2] 1cff5a3f-832b-41e6-a1da-ab29a77accd1 (67e248db-469f-4db7-bdb8-64747eb67344) Active Active New ds_transition to WaitActive"crucible",","v":level"0:,"30name":"crucible","level":30,"time":"2023-09-22T23:14:58.678427995Z,"","time":hostname":""2023-09-22T23:14:58.678425306Z"ip-10-150-1-55.us-west-2.compute.internal",,""pid":hostname"4759:"}
1717 ip-10-150-1-55.us-west-2.compute.internal","pid":4759{}
1718 "msg":"[2] Transition from New to WaitActive"{,"v":0,""msg"name":":"crucible","level":30Crucible b0d4bfe3-554b-45f1-b2c7-a87fcc965ee5 has session id: 56decb8e-bf1d-4f14-a929-82d627317444","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.678485856Z",","hostname"time"::""2023-09-22T23:14:58.678492714Z"ip-10-150-1-55.us-west-2.compute.internal",","hostnamepid"":":4759}
1719 ip-10-150-1-55.us-west-2.compute.internal","pid":4759{}
1720 "msg":"{"msg":"[2] 1cff5a3f-832b-41e6-a1da-ab29a77accd1 (67e248db-469f-4db7-bdb8-64747eb67344) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":[0] b0d4bfe3-554b-45f1-b2c7-a87fcc965ee5 (1a550ef9-f088-45b5-a367-3199209f5bd1) New New New ds_transition to WaitActive"","crucible"v,"":level"0:,30"name":"crucible","level":30,,""time"time":":"2023-09-22T23:14:58.678544128Z"2023-09-22T23:14:58.678547548Z",","hostname":hostname":""ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",,""pid"pid"::47594759}}
1721 
1722 {"msg{":""msg":"[0] Transition from New to WaitActive","v":0[2] Transition from WaitActive to WaitQuorum",",name""v"::"0crucible",,""name":level":"30crucible","level":30,"time":"2023-09-22T23:14:58.67860458Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"time":","pid":2023-09-22T23:14:58.678607207Z"4759,"}hostname"
1723 :"ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4759"}msg":
1724 "{"msg":"[0] b0d4bfe3-554b-45f1-b2c7-a87fcc965ee5 (1a550ef9-f088-45b5-a367-3199209f5bd1) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"[2] 1cff5a3f-832b-41e6-a1da-ab29a77accd1 (67e248db-469f-4db7-bdb8-64747eb67344) Active Active WaitQuorum ds_transition to Active"crucible",",v":"0level":,30"name":"crucible","level":30,"time":"2023-09-22T23:14:58.678668054Z",",hostname":""time":"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:14:58.678671811Zpid"":4759,"hostname}":
1725 "ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4759"msg":"}
1726 [0] Transition from WaitActive to WaitQuorum","v":0,"name":"{crucible","level":"30msg":"[2] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.678718932Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1727 ,"time":"{2023-09-22T23:14:58.678727702Z","hostname"":msg"":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1728 [0] b0d4bfe3-554b-45f1-b2c7-a87fcc965ee5 (1a550ef9-f088-45b5-a367-3199209f5bd1) WaitQuorum New New ds_transition to Active","v":0,"{name":"crucible",""level":msg":"301cff5a3f-832b-41e6-a1da-ab29a77accd1 is now active with session: 67e248db-469f-4db7-bdb8-64747eb67344","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.678770879Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1729 ,"time":"{2023-09-22T23:14:58.678780776Z",""hostname"msg":":"[0] Transition from WaitQuorum to Active"ip-10-150-1-55.us-west-2.compute.internal",,""pid":v":47590,"}name"
1730 :"crucible","level":30{"msg":"[1] 1cff5a3f-832b-41e6-a1da-ab29a77accd1 (67e248db-469f-4db7-bdb8-64747eb67344) Active Active Active ds_transition to Faulted","v":0,"name":","crucible"time",:""level":302023-09-22T23:14:58.678822365Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1731 {","msgtime"":":"2023-09-22T23:14:58.678840201Z","hostname":"[1] b0d4bfe3-554b-45f1-b2c7-a87fcc965ee5 (1a550ef9-f088-45b5-a367-3199209f5bd1) Active New New ds_transition to WaitActive"ip-10-150-1-55.us-west-2.compute.internal",",v"":pid0",:"4759name":"}crucible
1732 ","level":30{"msg":"[1] Transition from Active to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.678880292Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}{
1733 ","timemsg""::""{2023-09-22T23:14:58.678892055Z"",Crucible stats registered with UUID: 1702975c-d4cc-4dbc-8ef2-d4f28d842baamsg""hostname":,"""v"ip-10-150-1-55.us-west-2.compute.internal:"0,",pid"":name4759":}":crucible
1734 "","level{":30[1] Transition from New to WaitActive""msg":","v":0,"name":"crucible","level":[1] 1cff5a3f-832b-41e6-a1da-ab29a77accd1 (67e248db-469f-4db7-bdb8-64747eb67344) Active Faulted Active ds_transition to LiveRepairReady"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.678927721Z",","hostname":time","":"time":"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:14:58.678936415Z"pid2023-09-22T23:14:58.678941423Z"":,4759","}hostname"
1735 :"hostname":"{ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internalpid""msg:":4759""}
1736 ,"pid":{Crucible 1702975c-d4cc-4dbc-8ef2-d4f28d842baa has session id: be7e299b-d8c8-4183-a620-a3152a3ad667"4759","msg"v:"":}0
1737 [1] Transition from Faulted to LiveRepairReady,"",name""v:"":0crucible{,"",name"":"level""crucible:"30,"msg":level":"30[1] b0d4bfe3-554b-45f1-b2c7-a87fcc965ee5 (1a550ef9-f088-45b5-a367-3199209f5bd1) Active WaitActive New ds_transition to WaitQuorum","v",":time0",",:""time":name":2023-09-22T23:14:58.678990323Z""","2023-09-22T23:14:58.67899399Zcrucible"hostname"",:"",hostname""ip-10-150-1-55.us-west-2.compute.internal:level"","":pidip-10-150-1-55.us-west-2.compute.internal"":,4759"30}pid"
1738 :4759}
1739 {{"msg"":"msg":"[0] 1702975c-d4cc-4dbc-8ef2-d4f28d842baa (baac46cc-c0a4-4a68-b7fe-469cb936213f) New New New ds_transition to WaitActive,"[1] 1cff5a3f-832b-41e6-a1da-ab29a77accd1 (67e248db-469f-4db7-bdb8-64747eb67344) Active LiveRepairReady Active ds_transition to LiveRepair,"","v"timev""::00,,""":namename""::"""cruciblecrucible"",,""2023-09-22T23:14:58.67902287Z"levellevel",:"30:"30hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1740 ,,""timetime""::""{2023-09-22T23:14:58.679054825Z2023-09-22T23:14:58.679053703Z"",,""hostname""msg":hostname""::"ip-10-150-1-55.us-west-2.compute.internal"","ip-10-150-1-55.us-west-2.compute.internalpid"":,"4759pid"[1] Transition from WaitActive to WaitQuorum":}
1741 4759,"}{
1742 "vmsg":{"":0","[1] Transition from LiveRepairReady to LiveRepairmsg"":name":"","crucible"v[0] Transition from New to WaitActive"":,0",v,"":name"0:,"""namecrucible"":,""levellevelcrucible"":,30"":level":3030,"time":"2023-09-22T23:14:58.679113698Z,"",time"":"hostname":"2023-09-22T23:14:58.679116886Z","ip-10-150-1-55.us-west-2.compute.internal"hostname",:""pid",:"4759ip-10-150-1-55.us-west-2.compute.internal"},
1743 "timepid""::"4759}
1744 2023-09-22T23:14:58.679116552Z","{hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1745 [0] 1702975c-d4cc-4dbc-8ef2-d4f28d842baa (baac46cc-c0a4-4a68-b7fe-469cb936213f) WaitActive New New ds_transition to WaitQuorum","v":0{,"name":""crucible"msg,":""level":30,"time":"2023-09-22T23:14:58.679165497Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1746 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.679202035Z",[1] b0d4bfe3-554b-45f1-b2c7-a87fcc965ee5 (1a550ef9-f088-45b5-a367-3199209f5bd1) Active WaitQuorum New ds_transition to Active""hostname,"":v"":0,"name":ip-10-150-1-55.us-west-2.compute.internal"","cruciblepid":"4759,"level}"
1747 :30{"msg":"[0] 1702975c-d4cc-4dbc-8ef2-d4f28d842baa (baac46cc-c0a4-4a68-b7fe-469cb936213f) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible",,""leveltime""::"302023-09-22T23:14:58.679236515Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1748 {,""time":"msg":"2023-09-22T23:14:58.679256014Z","[1] Transition from WaitQuorum to Active"hostname":","v":0,"ip-10-150-1-55.us-west-2.compute.internalname":""crucible,""pid",:"4759level":30}
1749 {"msg,":""time":"[0] Transition from WaitQuorum to Active","2023-09-22T23:14:58.679288268Z"v,":"0hostname",":"name":"crucible",ip-10-150-1-55.us-west-2.compute.internal"",level"":pid"30:4759}
1750 {"msg":"[2] b0d4bfe3-554b-45f1-b2c7-a87fcc965ee5 (1a550ef9-f088-45b5-a367-3199209f5bd1) Active Active New ds_transition to WaitActive","v":0,","name":time"":"crucible","2023-09-22T23:14:58.67931569Z"level":,"30hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1751 ,"time":"{2023-09-22T23:14:58.679339294Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1752 [1] 1702975c-d4cc-4dbc-8ef2-d4f28d842baa (baac46cc-c0a4-4a68-b7fe-469cb936213f) Active New New ds_transition to WaitActive","{v":0",msg"":"name":"crucible"[2] Transition from New to WaitActive,""level",:"30v":0,"name":"crucible","level":30,,""time"time":":"2023-09-22T23:14:58.679374175Z"2023-09-22T23:14:58.679378666Z",","hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internaltest live_repair::repair_test::test_live_repair_deps_flush_repair_flush ... ",ok"pid
1753 ":4759ip-10-150-1-55.us-west-2.compute.internal","}pid
1754 ":4759}
1755 {{"msg"":"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level"[2] b0d4bfe3-554b-45f1-b2c7-a87fcc965ee5 (1a550ef9-f088-45b5-a367-3199209f5bd1) Active Active WaitActive ds_transition to WaitQuorum":,30"v":0,"name":"crucible","level":30,",time"":"time":"2023-09-22T23:14:58.679435343Z",2023-09-22T23:14:58.679440037Z"","hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",,""pid"pid"::47594759}
1756 }
1757 {"msg":"{[2] Transition from WaitActive to WaitQuorum"",msg":""v":0,"name":"crucible"[1] 1702975c-d4cc-4dbc-8ef2-d4f28d842baa (baac46cc-c0a4-4a68-b7fe-469cb936213f) Active WaitActive New ds_transition to WaitQuorum",","v":level"0:,"30name":"crucible","level":30,"time":","time":"2023-09-22T23:14:58.679493197Z"2023-09-22T23:14:58.679495224Z",","hostname":"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}ip-10-150-1-55.us-west-2.compute.internal"
1758 ,"pid":4759{}
1759 "msg":"{[1] Transition from WaitActive to WaitQuorum","v":"0,msg""name":":crucible"","level":30[2] b0d4bfe3-554b-45f1-b2c7-a87fcc965ee5 (1a550ef9-f088-45b5-a367-3199209f5bd1) Active Active WaitQuorum ds_transition to Active","v":0,","nametime":"":"2023-09-22T23:14:58.67953667Z"crucible",",hostname":""level":ip-10-150-1-55.us-west-2.compute.internal30","pid":4759}
1760 {"msg":"[1] 1702975c-d4cc-4dbc-8ef2-d4f28d842baa (baac46cc-c0a4-4a68-b7fe-469cb936213f) Active WaitQuorum New ds_transition to Active",,""v":time"0:,""name":"crucible","level2023-09-22T23:14:58.679553041Z"":30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1761 ,"time":"{2023-09-22T23:14:58.67957117Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1762 [2] Transition from WaitQuorum to Active",{"v"":msg0":","name":[1] Transition from WaitQuorum to Active"","v":crucible"0,,""name":level"":crucible"30,"level":30,"time":"2023-09-22T23:14:58.679605946Z",,""hostname":time"":"ip-10-150-1-55.us-west-2.compute.internal","pid2023-09-22T23:14:58.679605162Z"":4759,"}
1763 hostname":"{"msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1764 [2] 1702975c-d4cc-4dbc-8ef2-d4f28d842baa (baac46cc-c0a4-4a68-b7fe-469cb936213f) Active Active New ds_transition to WaitActive","v":0,"name":"{crucible","level":30"msg":","timeb0d4bfe3-554b-45f1-b2c7-a87fcc965ee5 is now active with session: 1a550ef9-f088-45b5-a367-3199209f5bd1"":","v"2023-09-22T23:14:58.679636751Z":,0","hostname":"name":"ip-10-150-1-55.us-west-2.compute.internalcrucible"",,""pid":level":475930}
1765 {"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time",:""time":"2023-09-22T23:14:58.679666001Z"2023-09-22T23:14:58.679658071Z",",hostname"":"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1766 ip-10-150-1-55.us-west-2.compute.internal",{"pid"":msg4759":"}
1767 [2] 1702975c-d4cc-4dbc-8ef2-d4f28d842baa (baac46cc-c0a4-4a68-b7fe-469cb936213f) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible"{,"level":30"msg":","time":"2023-09-22T23:14:58.679696742Z"[1] b0d4bfe3-554b-45f1-b2c7-a87fcc965ee5 (1a550ef9-f088-45b5-a367-3199209f5bd1) Active Active Active ds_transition to Faulted",","hostname":"v":0ip-10-150-1-55.us-west-2.compute.internal",",pid":"4759name}"
1768 :"{crucible"","msg":"level":30[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.67972546Z","hostname":",ip-10-150-1-55.us-west-2.compute.internal"","time"pid"::"4759}
1769 2023-09-22T23:14:58.679722289Z"{,""hostnamemsg":"":"ip-10-150-1-55.us-west-2.compute.internal","[2] 1702975c-d4cc-4dbc-8ef2-d4f28d842baa (baac46cc-c0a4-4a68-b7fe-469cb936213f) Active Active WaitQuorum ds_transition to Active"pid",":v4759":0,"name":"}crucible"
1770 ,"level":30{"msg":","time":"[1] Transition from Active to Faulted"2023-09-22T23:14:58.679761194Z",,""hostnamev":"0:","name":"ip-10-150-1-55.us-west-2.compute.internal","crucible"pid":,4759"}level"
1771 :30{"msg":"[2] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.679788773Z,"","timehostname":"":"ip-10-150-1-55.us-west-2.compute.internal","pid":2023-09-22T23:14:58.679783345Z"4759,}
1772 "hostname":{""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":47591702975c-d4cc-4dbc-8ef2-d4f28d842baa is now active with session: baac46cc-c0a4-4a68-b7fe-469cb936213f","}v":0
1773 ,"name":"crucible","level":30{"msg":","time":"2023-09-22T23:14:58.679822704Z","hostname":"[1] b0d4bfe3-554b-45f1-b2c7-a87fcc965ee5 (1a550ef9-f088-45b5-a367-3199209f5bd1) Active Faulted Active ds_transition to LiveRepairReady"ip-10-150-1-55.us-west-2.compute.internal",,""vpid":":47590}
1774 ,"name{":""msgcrucible"":","level":30[1] 1702975c-d4cc-4dbc-8ef2-d4f28d842baa (baac46cc-c0a4-4a68-b7fe-469cb936213f) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.679851484Z",","hostname"time":":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.679847003Z,""pid":,4759"}hostname"
1775 :"{ip-10-150-1-55.us-west-2.compute.internal","pid":"4759msg":"}
1776 [1] Transition from Active to Faulted","v":0{,"name":"crucible"","msg"level"::"30[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.67989964Z","hostname":,""time":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.679907111Z,""pid":,4759"hostname}"
1777 :"{ip-10-150-1-55.us-west-2.compute.internal","pid":"4759msg":"}
1778 [1] 1702975c-d4cc-4dbc-8ef2-d4f28d842baa (baac46cc-c0a4-4a68-b7fe-469cb936213f) Active Faulted Active ds_transition to LiveRepairReady{","v":0","msgname":""crucible":,""level":30[1] b0d4bfe3-554b-45f1-b2c7-a87fcc965ee5 (1a550ef9-f088-45b5-a367-3199209f5bd1) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,","name"time":":"2023-09-22T23:14:58.67995029Z"crucible,""hostname":,""level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1779 {"msg":"[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible",",level"":30time":"2023-09-22T23:14:58.679968358Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal,""time,":""2023-09-22T23:14:58.67998093Z"pid,""hostname:":"4759}ip-10-150-1-55.us-west-2.compute.internal"
1780 ,"pid":4759}
1781 {{""msg":msg":""[1] Transition from LiveRepairReady to LiveRepair","[1] 1702975c-d4cc-4dbc-8ef2-d4f28d842baa (baac46cc-c0a4-4a68-b7fe-469cb936213f) Active LiveRepairReady Active ds_transition to LiveRepair"v",":v":00,",name":""crucible"name",":level":"30crucible","level":30,"time":"2023-09-22T23:14:58.680025966Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"time,"":"pid":47592023-09-22T23:14:58.680028527Z}"
1782 ,"hostname":"{"ip-10-150-1-55.us-west-2.compute.internal"msg{":,""pid"":msg[1] Transition from LiveRepairReady to LiveRepair"":","4759v":}0Crucible stats registered with UUID: 9389be7a-e733-4e8f-a375-dbab99314661,""
1783 name,"":"v":crucible"0,","level"name:":30"crucible","level":30,"time":"2023-09-22T23:14:58.680071575Z","hostname":","time"ip-10-150-1-55.us-west-2.compute.internal:"","pid":47592023-09-22T23:14:58.680074242Z"},
1784 "hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
17852023-09-22T23:14:58.680ZINFOcrucible: Crucible 9389be7a-e733-4e8f-a375-dbab99314661 has session id: 51553c06-313b-486e-a473-4e2fb6b5da09
17862023-09-22T23:14:58.680ZINFOcrucible: [0] 9389be7a-e733-4e8f-a375-dbab99314661 (2b861982-700b-4629-9a70-6e31dee46671) New New New ds_transition to WaitActive
17872023-09-22T23:14:58.680ZINFOcrucible: [0] Transition from New to WaitActive
17882023-09-22T23:14:58.680ZINFOcrucible: [0] 9389be7a-e733-4e8f-a375-dbab99314661 (2b861982-700b-4629-9a70-6e31dee46671) WaitActive New New ds_transition to WaitQuorum
1789 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30test live_repair::repair_test::test_live_repair_deps_mix ... ok,"
1790 time":"2023-09-22T23:14:58.680274506Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
17912023-09-22T23:14:58.680ZINFOcrucible: [0] 9389be7a-e733-4e8f-a375-dbab99314661 (2b861982-700b-4629-9a70-6e31dee46671) WaitQuorum New New ds_transition to Active
17922023-09-22T23:14:58.680ZINFOcrucible: [0] Transition from WaitQuorum to Active
1793 {"msg":"test live_repair::repair_test::test_live_repair_deps_no_overlap ... [1] 9389be7a-e733-4e8f-a375-dbab99314661 (2b861982-700b-4629-9a70-6e31dee46671) Active New New ds_transition to WaitActive"ok,
1794 "v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.680365339Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
17952023-09-22T23:14:58.680ZINFOcrucible: [1] Transition from New to WaitActive
17962023-09-22T23:14:58.680ZINFOcrucible: [1] 9389be7a-e733-4e8f-a375-dbab99314661 (2b861982-700b-4629-9a70-6e31dee46671) Active WaitActive New ds_transition to WaitQuorum
17972023-09-22T23:14:58.680ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
17982023-09-22T23:14:58.680ZINFOcrucible: [1] 9389be7a-e733-4e8f-a375-dbab99314661 (2b861982-700b-4629-9a70-6e31dee46671) Active WaitQuorum New ds_transition to Active
17992023-09-22T23:14:58.680ZINFOcrucible: [1] Transition from WaitQuorum to Active
18002023-09-22T23:14:58.680ZINFOcrucible: [2] 9389be7a-e733-4e8f-a375-dbab99314661 (2b861982-700b-4629-9a70-6e31dee46671) Active Active New ds_transition to WaitActive
18012023-09-22T23:14:58.680ZINFOcrucible: [2] Transition from New to WaitActive
18022023-09-22T23:14:58.680ZINFOcrucible: [2] 9389be7a-e733-4e8f-a375-dbab99314661 (2b861982-700b-4629-9a70-6e31dee46671) Active Active WaitActive ds_transition to WaitQuorum
18032023-09-22T23:14:58.680ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
18042023-09-22T23:14:58.680ZINFOcrucible: [2] 9389be7a-e733-4e8f-a375-dbab99314661 (2b861982-700b-4629-9a70-6e31dee46671) Active Active WaitQuorum ds_transition to Active
1805 {"msg":"[2] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.680784422Z","{hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759"}
1806 msg":"{Crucible stats registered with UUID: e55b2cbb-f658-4df1-a2c6-bb10a7952e80""msg":","v":0,"name":"9389be7a-e733-4e8f-a375-dbab99314661 is now active with session: 2b861982-700b-4629-9a70-6e31dee46671crucible"",",level"":v":300,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.680829561Z","hostname":","time"ip-10-150-1-55.us-west-2.compute.internal":,""pid":47592023-09-22T23:14:58.680825827Z"}
1807 ,"hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid""msg"::4759"}
1808 [1] 9389be7a-e733-4e8f-a375-dbab99314661 (2b861982-700b-4629-9a70-6e31dee46671) Active Active Active ds_transition to Faulted","v":0,"name":"{crucible","level":30"msg":"Crucible e55b2cbb-f658-4df1-a2c6-bb10a7952e80 has session id: 26164dc7-fc5c-47ae-b2e3-73a50f49b4e3","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.680882958Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1809 ,"time":"{2023-09-22T23:14:58.680894064Z"","msg":hostname"":"[1] Transition from Active to Faulted"ip-10-150-1-55.us-west-2.compute.internal,"",v""pid"::47590,"}name
1810 ":"crucible","level":30{"msg":"[0] e55b2cbb-f658-4df1-a2c6-bb10a7952e80 (10373868-88fa-495a-b411-62c3231f0270) New New New ds_transition to WaitActive","v":,"0time":","name":"2023-09-22T23:14:58.680933367Z"crucible",","hostnamelevel""::30"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1811 ,"{time":""2023-09-22T23:14:58.680955525Z"msg":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759[1] 9389be7a-e733-4e8f-a375-dbab99314661 (2b861982-700b-4629-9a70-6e31dee46671) Active Faulted Active ds_transition to LiveRepairReady"},
1812 "v":0,"name":"crucible"{,"level":30"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.680994116Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1813 ,"time":"2023-09-22T23:14:58.681004984Z{","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","[1] Transition from Faulted to LiveRepairReadypid"":,"4759v":0},
1814 "name":"crucible","level":{30"msg":"[0] e55b2cbb-f658-4df1-a2c6-bb10a7952e80 (10373868-88fa-495a-b411-62c3231f0270) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.681047316Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1815 ,"time":"{2023-09-22T23:14:58.681061556Z",""hostname":"msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1816 [1] 9389be7a-e733-4e8f-a375-dbab99314661 (2b861982-700b-4629-9a70-6e31dee46671) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"{crucible","level":30"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.681100963Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1817 ,"time":"{2023-09-22T23:14:58.681110421Z","hostname""{msg":":""msg[1] Transition from LiveRepairReady to LiveRepair"":"ip-10-150-1-55.us-west-2.compute.internal",,""Crucible stats registered with UUID: ce2db535-1138-455f-9805-280fa31ce378v"":pid",0",v""name:"0:,"":namecrucible"":","cruciblelevel"",:"304759level":30}
1818 {,""time":"msg":"2023-09-22T23:14:58.681157801Z",,""time"hostname"::""2023-09-22T23:14:58.68115977Z"ip-10-150-1-55.us-west-2.compute.internal,"",[0] e55b2cbb-f658-4df1-a2c6-bb10a7952e80 (10373868-88fa-495a-b411-62c3231f0270) WaitQuorum New New ds_transition to Active"hostname"":pid,""":v"4759ip-10-150-1-55.us-west-2.compute.internal":,}"
1819 pid"0:4759,"name":"}
1820 crucible","level":{30"msg":"Crucible ce2db535-1138-455f-9805-280fa31ce378 has session id: 44873b2a-74af-4682-8e7e-bc088fb77067","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.681203553Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid",":time"4759:"}
1821 2023-09-22T23:14:58.681215832Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759"}msg
1822 ":"[0] Transition from WaitQuorum to Active"{,"v":0","msg"name":":"crucible","level":30[0] ce2db535-1138-455f-9805-280fa31ce378 (cf305ba7-b9a0-443c-94d6-97299bd8db90) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.681262188Z","hostname":,""time":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.681270436Z","pid,"":hostname4759":"}
1823 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1824 {{"msg":""msg":"[0] Transition from New to WaitActive","[1] e55b2cbb-f658-4df1-a2c6-bb10a7952e80 (10373868-88fa-495a-b411-62c3231f0270) Active New New ds_transition to WaitActivev":"0,,""namev""::"0crucible",,""level"name":":crucible"30,"level":30,"time":"2023-09-22T23:14:58.681328876Z",",hostname"":"time":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.681330776Z,""pid":4759,"hostname"}:
1825 "ip-10-150-1-55.us-west-2.compute.internal","pid"{:4759}"
1826 msg":"{[0] ce2db535-1138-455f-9805-280fa31ce378 (cf305ba7-b9a0-443c-94d6-97299bd8db90) WaitActive New New ds_transition to WaitQuorum""msg",:""v":0[1] Transition from New to WaitActive",","name"v":":crucible"0,,""namelevel"":":crucible"30,"level":30,"time":"2023-09-22T23:14:58.681396445Z",",time"":hostname":""ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.681398257Z,""pid":4759,"hostname"}:
1827 "ip-10-150-1-55.us-west-2.compute.internal","{pid":4759"}msg":
1828 "[0] Transition from WaitActive to WaitQuorum","v"{:0,"name":"crucible""msg":,""level":30[1] e55b2cbb-f658-4df1-a2c6-bb10a7952e80 (10373868-88fa-495a-b411-62c3231f0270) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucibletest live_repair::repair_test::test_live_repair_deps_reads ... ",ok
1829 ,""leveltime":"":302023-09-22T23:14:58.681454456Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1830 {"msg":"[0] ce2db535-1138-455f-9805-280fa31ce378 (cf305ba7-b9a0-443c-94d6-97299bd8db90) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.68148364Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",","timepid"":":47592023-09-22T23:14:58.681499838Z","}hostname":
1831 "ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1832 {{"msg":""msg":"[1] Transition from WaitActive to WaitQuorum"[0] Transition from WaitQuorum to Active",",v"":v":00,,""name"name:"":crucible"","cruciblelevel"":,"30level":30,"time":"2023-09-22T23:14:58.681549288Z","hostname":","time"ip-10-150-1-55.us-west-2.compute.internal":,""pid":47592023-09-22T23:14:58.681550468Z"}
1833 ,"hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759"msg":"}
1834 [1] ce2db535-1138-455f-9805-280fa31ce378 (cf305ba7-b9a0-443c-94d6-97299bd8db90) Active New New ds_transition to WaitActive","v":0,"name":"{crucible","level":30"msg":"[1] e55b2cbb-f658-4df1-a2c6-bb10a7952e80 (10373868-88fa-495a-b411-62c3231f0270) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.681594939Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1835 ,"time":"{2023-09-22T23:14:58.681607867Z"","msg":hostname":""ip-10-150-1-55.us-west-2.compute.internal"[1] Transition from New to WaitActive",",pid"":v":47590,"name}"
1836 :"crucible","level":30{"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.681650374Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1837 ,"time":"{2023-09-22T23:14:58.68166212Z","hostname"":msg"":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1838 [1] ce2db535-1138-455f-9805-280fa31ce378 (cf305ba7-b9a0-443c-94d6-97299bd8db90) Active WaitActive New ds_transition to WaitQuorum","v":0,"{name":"crucible",""level"msg:"30:"[2] e55b2cbb-f658-4df1-a2c6-bb10a7952e80 (10373868-88fa-495a-b411-62c3231f0270) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.681704421Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1839 ,"time":"2023-09-22T23:14:58.681715783Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":[1] Transition from WaitActive to WaitQuorum4759","}v":
1840 0,"name":"crucible","level"{:30"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.681754173Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1841 ,"time{":"2023-09-22T23:14:58.681765905Z""msg":,""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759[1] ce2db535-1138-455f-9805-280fa31ce378 (cf305ba7-b9a0-443c-94d6-97299bd8db90) Active WaitQuorum New ds_transition to Active"}
1842 ,"v":0,"name":"{crucible","level":30"msg":"[2] e55b2cbb-f658-4df1-a2c6-bb10a7952e80 (10373868-88fa-495a-b411-62c3231f0270) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.68180374Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1843 ,{"time":""msg":"2023-09-22T23:14:58.681816857Z","hostname":[1] Transition from WaitQuorum to Active"","v":0ip-10-150-1-55.us-west-2.compute.internal",,""pid"name"::4759"crucible"},
1844 "level":30{"msg":"[2] Transition from WaitActive to WaitQuorum","v":,"0time",:""name":"2023-09-22T23:14:58.681854969Z"crucible",","hostname":"level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1845 {"msg":","time":[2] ce2db535-1138-455f-9805-280fa31ce378 (cf305ba7-b9a0-443c-94d6-97299bd8db90) Active Active New ds_transition to WaitActive"","v":02023-09-22T23:14:58.681875563Z","name":","crucible"hostname,""level"::"30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1846 {,""timemsg":"":"2023-09-22T23:14:58.681903757Z","hostname":"[2] e55b2cbb-f658-4df1-a2c6-bb10a7952e80 (10373868-88fa-495a-b411-62c3231f0270) Active Active WaitQuorum ds_transition to Activeip-10-150-1-55.us-west-2.compute.internal"","pid",":v"4759:0},"
1847 name":"crucible","level":30{"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.68194846Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1848 ,"time":"2023-09-22T23:14:58.681960757Z"{,"hostname":""msg"ip-10-150-1-55.us-west-2.compute.internal":{","pid":4759[2] Transition from WaitQuorum to Active"},
1849 ""v"msg":{:0",""name"msg"::Crucible stats registered with UUID: 8eddf9f4-e32a-4516-96f5-18a954e25241"""crucible",,""v"level:[2] ce2db535-1138-455f-9805-280fa31ce378 (cf305ba7-b9a0-443c-94d6-97299bd8db90) Active Active WaitActive ds_transition to WaitQuorum"0",:,30""namev":":"0crucible,"",name":""level":crucible"30,"level":30,"time":"2023-09-22T23:14:58.682017617Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1850 ,"time":"{2023-09-22T23:14:58.68203163Z",","time""hostname":"msg":":ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.682029986Z,""",pid"":hostname4759e55b2cbb-f658-4df1-a2c6-bb10a7952e80 is now active with session: 10373868-88fa-495a-b411-62c3231f0270""}:
1851 ","v":0{,"ip-10-150-1-55.us-west-2.compute.internal"name",""pidmsg"":"::crucible""4759,"level":[2] Transition from WaitActive to WaitQuorum"30},
1852 "v":0,"name":"crucible","{level":30"msg":","time":"Crucible 8eddf9f4-e32a-4516-96f5-18a954e25241 has session id: a10d4b49-4ccf-45c2-853d-34a55302e6802023-09-22T23:14:58.682092229Z"",,""hostnamev,"":"0:,"time"":ip-10-150-1-55.us-west-2.compute.internal"name,""":"pidcrucible"2023-09-22T23:14:58.682104914Z"":,4759","hostname"level:}"
1853 ":30ip-10-150-1-55.us-west-2.compute.internal","pid":{4759}
1854 "msg":"{"msg":"[1] e55b2cbb-f658-4df1-a2c6-bb10a7952e80 (10373868-88fa-495a-b411-62c3231f0270) Active Active Active ds_transition to Faulted",",time"":v":[2] ce2db535-1138-455f-9805-280fa31ce378 (cf305ba7-b9a0-443c-94d6-97299bd8db90) Active Active WaitQuorum ds_transition to Active"0,",name"":"v":crucible0",",name":""cruciblelevel"":,"30level":30"2023-09-22T23:14:58.682148231Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1855 ,"time":",{"time"2023-09-22T23:14:58.682183867Z":"","msg2023-09-22T23:14:58.68218282Z"":",hostname":""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pidip-10-150-1-55.us-west-2.compute.internal"[0] 8eddf9f4-e32a-4516-96f5-18a954e25241 (7c3b4d36-4dbe-4df1-b74a-ee41370cd9e4) New New New ds_transition to WaitActive"",:,""vpid":47594759":}0}
1856 
1857 ,{"name":"crucible",{""msglevel""":msg":"":30[1] Transition from Active to Faulted"[2] Transition from WaitQuorum to Active",","vv""::00,,""namename"":":"crucible"crucible",,,""level"time"::"30"level":2023-09-22T23:14:58.682251102Z30","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1858 {"msg":",",time[0] Transition from New to WaitActive""",:"time":""2023-09-22T23:14:58.682274192Z"v":,02023-09-22T23:14:58.682276895Z,""hostname",:"""name":ip-10-150-1-55.us-west-2.compute.internal"","hostname":"crucible"pid",ip-10-150-1-55.us-west-2.compute.internal":",level":"304759pid":4759}
1859 }
1860 {,"time":""{2023-09-22T23:14:58.682324192Z"msg":",""hostname":"msg":"ce2db535-1138-455f-9805-280fa31ce378 is now active with session: cf305ba7-b9a0-443c-94d6-97299bd8db90"ip-10-150-1-55.us-west-2.compute.internal",,""v"pid:"0:[1] e55b2cbb-f658-4df1-a2c6-bb10a7952e80 (10373868-88fa-495a-b411-62c3231f0270) Active Faulted Active ds_transition to LiveRepairReady",",name"4759:"}"
1861 v":crucible0"{,,""level"name"":msg":"":crucible"30,"level":30[0] 8eddf9f4-e32a-4516-96f5-18a954e25241 (7c3b4d36-4dbe-4df1-b74a-ee41370cd9e4) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":",",time":"2023-09-22T23:14:58.682377278Z""2023-09-22T23:14:58.68237907Z"time",:,"""hostname"hostname"2023-09-22T23:14:58.682386822Z":":","ip-10-150-1-55.us-west-2.compute.internalhostnameip-10-150-1-55.us-west-2.compute.internal""",,:"""pid"pid"ip-10-150-1-55.us-west-2.compute.internal:":,47594759"pid}}"
1862 
1863 :{4759}
1864 "{msg":"{""msg"msg"::[1] ce2db535-1138-455f-9805-280fa31ce378 (cf305ba7-b9a0-443c-94d6-97299bd8db90) Active Active Active ds_transition to Faulted""","v":0[0] Transition from WaitActive to WaitQuorum,[1] Transition from Faulted to LiveRepairReady"""name",:"",v"":v":crucible"00,,""level","name"name":::""30crucible"crucible",",level":"30level":30,,""time":time"":"2023-09-22T23:14:58.682490893Z"2023-09-22T23:14:58.682484953Z,,"""hostname",":hostnametime"":"":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.682490231Z"ip-10-150-1-55.us-west-2.compute.internal",,",pid"":"pid"hostname"::47594759"}
1865 ip-10-150-1-55.us-west-2.compute.internal}"
1866 {,"pid":4759"msg":{"}
1867 [0] 8eddf9f4-e32a-4516-96f5-18a954e25241 (7c3b4d36-4dbe-4df1-b74a-ee41370cd9e4) WaitQuorum New New ds_transition to Active"",msg":""{v"[1] Transition from Active to Faulted":,"0",v":msg"0",name":name""::""crucible"","cruciblelevel"":,"30level"[1] e55b2cbb-f658-4df1-a2c6-bb10a7952e80 (10373868-88fa-495a-b411-62c3231f0270) Active LiveRepairReady Active ds_transition to LiveRepair":30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.682578931Z","hostname":","time"ip-10-150-1-55.us-west-2.compute.internal":","pid":47592023-09-22T23:14:58.682581295Z"}
1868 ,","hostname"{:time""":msg":"ip-10-150-1-55.us-west-2.compute.internal"","pid":2023-09-22T23:14:58.682587359Z4759[1] ce2db535-1138-455f-9805-280fa31ce378 (cf305ba7-b9a0-443c-94d6-97299bd8db90) Active Faulted Active ds_transition to LiveRepairReady"","}v,"
1869 hostname""::"{0,"name":"crucible"ip-10-150-1-55.us-west-2.compute.internal"",msg","level":pid30""::"4759}[0] Transition from WaitQuorum to Active"
1870 ,,""time":"v":{2023-09-22T23:14:58.682635081Z"0,","hostname":"name":"ip-10-150-1-55.us-west-2.compute.internal"crucible"","msg":",pid":"4759[1] Transition from LiveRepairReady to LiveRepair"level","}v"::030
1871 ,"name":"{crucible""msg":","level":[1] Transition from Faulted to LiveRepairReady"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.682671892Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"time,"":"pid":47592023-09-22T23:14:58.682684234Z","}hostname":","
1872 time"ip-10-150-1-55.us-west-2.compute.internal":,""{pid":47592023-09-22T23:14:58.682681791Z"}",
1873 "msg{"hostname":":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}[1] 8eddf9f4-e32a-4516-96f5-18a954e25241 (7c3b4d36-4dbe-4df1-b74a-ee41370cd9e4) Active New New ds_transition to WaitActive[1] ce2db535-1138-455f-9805-280fa31ce378 (cf305ba7-b9a0-443c-94d6-97299bd8db90) Active LiveRepairReady Active ds_transition to LiveRepair"
1874 ","v":0,","v"name":":crucible"0,",level":30"name":"crucible","level":30,"time":"2023-09-22T23:14:58.682742197Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1875 {"msg":"[1] Transition from LiveRepairReady to LiveRepair","v",":0time",":name":""crucible","level":302023-09-22T23:14:58.682746606Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,"4759time":"}2023-09-22T23:14:58.6827666Z"
1876 ,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","{pid":4759}
1877 "msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.682796219Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
18782023-09-22T23:14:58.682ZINFOcrucible: [1] 8eddf9f4-e32a-4516-96f5-18a954e25241 (7c3b4d36-4dbe-4df1-b74a-ee41370cd9e4) Active WaitActive New ds_transition to WaitQuorum
18792023-09-22T23:14:58.682ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
18802023-09-22T23:14:58.682ZINFOcrucible: [1] 8eddf9f4-e32a-4516-96f5-18a954e25241 (7c3b4d36-4dbe-4df1-b74a-ee41370cd9e4) Active WaitQuorum New ds_transition to Active
18812023-09-22T23:14:58.682ZINFOcrucible: [1] Transition from WaitQuorum to Active
1882 {"msg":"[2] 8eddf9f4-e32a-4516-96f5-18a954e25241 (7c3b4d36-4dbe-4df1-b74a-ee41370cd9e4) Active Active New ds_transition to WaitActive","v":0,"name":"crucibletest live_repair::repair_test::test_live_repair_deps_repair_flush_repair ... "ok
1883 ,"level":30,"time":"2023-09-22T23:14:58.683025957Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1884 {"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.683065844Z","hostname":"test live_repair::repair_test::test_live_repair_deps_repair_flush ... ip-10-150-1-55.us-west-2.compute.internalok
1885 ","pid":4759}
18862023-09-22T23:14:58.683ZINFOcrucible: [2] 8eddf9f4-e32a-4516-96f5-18a954e25241 (7c3b4d36-4dbe-4df1-b74a-ee41370cd9e4) Active Active WaitActive ds_transition to WaitQuorum
18872023-09-22T23:14:58.683ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
18882023-09-22T23:14:58.683ZINFOcrucible: [2] 8eddf9f4-e32a-4516-96f5-18a954e25241 (7c3b4d36-4dbe-4df1-b74a-ee41370cd9e4) Active Active WaitQuorum ds_transition to Active
18892023-09-22T23:14:58.683ZINFOcrucible: [2] Transition from WaitQuorum to Active
18902023-09-22T23:14:58.683ZINFOcrucible: 8eddf9f4-e32a-4516-96f5-18a954e25241 is now active with session: 7c3b4d36-4dbe-4df1-b74a-ee41370cd9e4
18912023-09-22T23:14:58.683ZINFOcrucible: [1] 8eddf9f4-e32a-4516-96f5-18a954e25241 (7c3b4d36-4dbe-4df1-b74a-ee41370cd9e4) Active Active Active ds_transition to Faulted
18922023-09-22T23:14:58.683ZINFOcrucible: [1] Transition from Active to Faulted
18932023-09-22T23:14:58.683ZINFOcrucible: [1] 8eddf9f4-e32a-4516-96f5-18a954e25241 (7c3b4d36-4dbe-4df1-b74a-ee41370cd9e4) Active Faulted Active ds_transition to LiveRepairReady
18942023-09-22T23:14:58.683ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
18952023-09-22T23:14:58.683ZINFOcrucible: [1] 8eddf9f4-e32a-4516-96f5-18a954e25241 (7c3b4d36-4dbe-4df1-b74a-ee41370cd9e4) Active LiveRepairReady Active ds_transition to LiveRepair
18962023-09-22T23:14:58.683ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
1897 {{"msg":""Write to Extent 1:2:9 under repair"msg,""v":":0,"name":"crucible","Crucible stats registered with UUID: be858f8c-b2ed-4658-ae55-df51207082cblevel"":40,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.683549173Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1898 ,"time":"2023-09-22T23:14:58.683556758Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1899 {{"msg":""Crucible be858f8c-b2ed-4658-ae55-df51207082cb has session id: 7c583b93-3217-4cbe-8866-c7e99e1310b0"msg",":v"":0,"name":"crucible"Crucible stats registered with UUID: 7070e84b-45e9-4b98-b03f-656a6ba86de7,""level":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.683620584Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1900 ,"time":"2023-09-22T23:14:58.683626941Z"{,"hostname":""ip-10-150-1-55.us-west-2.compute.internal"msg",":pid"":4759}
1901 [0] be858f8c-b2ed-4658-ae55-df51207082cb (e05bbd9e-fb85-4807-b00a-a7affda3fe87) New New New ds_transition to WaitActive","v":0,"{name":"crucible",""level"msg"::"30Crucible 7070e84b-45e9-4b98-b03f-656a6ba86de7 has session id: 65505822-2360-4a3c-803c-47f082ed4b20","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.683688224Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1902 ,"time":"{2023-09-22T23:14:58.683697918Z",""hostname":"msg":"ip-10-150-1-55.us-west-2.compute.internal"[0] Transition from New to WaitActive",","pid"v"::47590,"name":}"
1903 crucible","level":30{"msg":"[0] 7070e84b-45e9-4b98-b03f-656a6ba86de7 (f1f39d9e-3c69-4dbe-901b-424816224ade) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.683741075Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1904 ,"time":"{2023-09-22T23:14:58.683756719Z",""hostnamemsg"":":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1905 [0] be858f8c-b2ed-4658-ae55-df51207082cb (e05bbd9e-fb85-4807-b00a-a7affda3fe87) WaitActive New New ds_transition to WaitQuorum","v":0,"{name":"crucible","level":"30msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.683797028Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"time",":"pid":47592023-09-22T23:14:58.683805054Z"},
1906 "hostname":"ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4759"msg":"}
1907 [0] Transition from WaitActive to WaitQuorum","v":0,"name"{:"crucible","level":"30msg":"[0] 7070e84b-45e9-4b98-b03f-656a6ba86de7 (f1f39d9e-3c69-4dbe-901b-424816224ade) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.68385249Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1908 ,"time":"2023-09-22T23:14:58.683864342Z"{,"hostname":""msg"ip-10-150-1-55.us-west-2.compute.internal":","pid":4759}
1909 [0] be858f8c-b2ed-4658-ae55-df51207082cb (e05bbd9e-fb85-4807-b00a-a7affda3fe87) WaitQuorum New New ds_transition to Active","v":0{,"name":"crucible"",msg"":"level":30[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.683907676Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"time":","pid":2023-09-22T23:14:58.683914555Z"4759,"}hostname"
1910 :"ip-10-150-1-55.us-west-2.compute.internal","pid":{4759}
1911 "msg":"[0] Transition from WaitQuorum to Active"{,"v":0,""msg":"name":"crucible","level":30[0] 7070e84b-45e9-4b98-b03f-656a6ba86de7 (f1f39d9e-3c69-4dbe-901b-424816224ade) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.683964255Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal,""time,""pid"::"47592023-09-22T23:14:58.683971726Z}"
1912 ,"hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759"msg"}:
1913 "{"[1] be858f8c-b2ed-4658-ae55-df51207082cb (e05bbd9e-fb85-4807-b00a-a7affda3fe87) Active New New ds_transition to WaitActive"msg":","v":0[0] Transition from WaitQuorum to Active",","namevtest live_repair::repair_test::test_live_repair_deps_repair_kitchen_sink ... ":ok0
1914 ,""name:"":"crucible"crucible",",level":"30level":30,","time":"time":"2023-09-22T23:14:58.684045377Z"2023-09-22T23:14:58.684046321Z",","hostname":"hostname":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",,""pidpid""::47594759}
1915 }
1916 {"msg":"{[1] 7070e84b-45e9-4b98-b03f-656a6ba86de7 (f1f39d9e-3c69-4dbe-901b-424816224ade) Active New New ds_transition to WaitActive",""v":msg0",:""name":"crucible","level[1] Transition from New to WaitActive"":30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.684104238Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1917 {"msg":,""time":"[1] Transition from New to WaitActive","2023-09-22T23:14:58.684109802Z"v":,0","hostname":"name":"crucible","ip-10-150-1-55.us-west-2.compute.internal"level":,"30pid":4759}
1918 ,"time":"2023-09-22T23:14:58.684140613Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1919 "msg":{""msg":"[1] 7070e84b-45e9-4b98-b03f-656a6ba86de7 (f1f39d9e-3c69-4dbe-901b-424816224ade) Active WaitActive New ds_transition to WaitQuorum"[1] be858f8c-b2ed-4658-ae55-df51207082cb (e05bbd9e-fb85-4807-b00a-a7affda3fe87) Active WaitActive New ds_transition to WaitQuorum","v":0,",v"":name":"0crucible",","namelevel":":"30crucible","level":30,"time":"2023-09-22T23:14:58.684179407Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1920 {"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.684182545Z","hostname":","time":"2023-09-22T23:14:58.684198784Z"ip-10-150-1-55.us-west-2.compute.internal,"",hostname":""pid":ip-10-150-1-55.us-west-2.compute.internal"4759,"pid":4759}}
1921 
1922 {{"msg":""msg":"[1] 7070e84b-45e9-4b98-b03f-656a6ba86de7 (f1f39d9e-3c69-4dbe-901b-424816224ade) Active WaitQuorum New ds_transition to Active","v":0,"[1] Transition from WaitActive to WaitQuorum"name":","cruciblev":","0level":30,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.684234937Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1923 {"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.684239246Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal",2023-09-22T23:14:58.684253645Z"","pidhostname":"":4759ip-10-150-1-55.us-west-2.compute.internal","pid":4759}}
1924 
1925 {{"msg":""msg":"[2] 7070e84b-45e9-4b98-b03f-656a6ba86de7 (f1f39d9e-3c69-4dbe-901b-424816224ade) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30[1] be858f8c-b2ed-4658-ae55-df51207082cb (e05bbd9e-fb85-4807-b00a-a7affda3fe87) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible",",time"":"level":2023-09-22T23:14:58.684284201Z"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1926 {"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,","time":"time":"2023-09-22T23:14:58.684306009Z"2023-09-22T23:14:58.684295611Z","hostname":","hostnameip-10-150-1-55.us-west-2.compute.internal","":pid":"4759}
1927 ip-10-150-1-55.us-west-2.compute.internal","{pid":"4759msg":"}
1928 [2] 7070e84b-45e9-4b98-b03f-656a6ba86de7 (f1f39d9e-3c69-4dbe-901b-424816224ade) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30{"msg":"[1] Transition from WaitQuorum to Active,""time":","v"2023-09-22T23:14:58.684338122Z":,"0hostname",:""name":"ip-10-150-1-55.us-west-2.compute.internal","cruciblepid":"4759,"}level":
1929 30{"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.684363217Z","hostname":","timeip-10-150-1-55.us-west-2.compute.internal"",":"pid":47592023-09-22T23:14:58.68437373Z","hostname}":"
1930 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1931 {{"msg":""msg":"[2] 7070e84b-45e9-4b98-b03f-656a6ba86de7 (f1f39d9e-3c69-4dbe-901b-424816224ade) Active Active WaitQuorum ds_transition to Active","v":0,"name":"crucible","[2] be858f8c-b2ed-4658-ae55-df51207082cb (e05bbd9e-fb85-4807-b00a-a7affda3fe87) Active Active New ds_transition to WaitActivelevel":"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.684415318Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1932 ,"time":"{2023-09-22T23:14:58.684420145Z"",msg"":"hostname":"[2] Transition from WaitQuorum to Active",ip-10-150-1-55.us-west-2.compute.internal"","v"pid"::04759,"name":"}crucible
1933 ","level":30{"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible",","leveltime":"":302023-09-22T23:14:58.684462483Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1934 {"msg":"7070e84b-45e9-4b98-b03f-656a6ba86de7 is now active with session: f1f39d9e-3c69-4dbe-901b-424816224ade","v":0,"name":"crucible","level":,30"time":"2023-09-22T23:14:58.684478737Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",",time":""pid"2023-09-22T23:14:58.684496615Z:"4759,"hostname":"}
1935 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1936 {"{msg":""msg":"[2] be858f8c-b2ed-4658-ae55-df51207082cb (e05bbd9e-fb85-4807-b00a-a7affda3fe87) Active Active WaitActive ds_transition to WaitQuorum","v"[1] 7070e84b-45e9-4b98-b03f-656a6ba86de7 (f1f39d9e-3c69-4dbe-901b-424816224ade) Active Active Active ds_transition to Faulted":,"0v":,0","name"name{":":"crucible"crucible"",","msglevel""::"30level":30Crucible stats registered with UUID: e2411e4d-ee59-47a2-bfc3-972c6325e258","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.684550675Z","hostname":","time"ip-10-150-1-55.us-west-2.compute.internal":,""pid":47592023-09-22T23:14:58.684552206Z"}
1937 ,",time"{:"""hostname"msg:"2023-09-22T23:14:58.684560327Z"":","ip-10-150-1-55.us-west-2.compute.internal"[1] Transition from Active to Faultedhostname,"":",""pidvip-10-150-1-55.us-west-2.compute.internal"":,"0",pid""name:"4759:":crucible}"
1938 ,"4759level":30{}
1939 "msg":"{,"Crucible e2411e4d-ee59-47a2-bfc3-972c6325e258 has session id: 2a5689ec-7f51-4a8b-9e84-e207c2db9743"time":","v""2023-09-22T23:14:58.684604515Z:"0msg":",,""namehostname""::[2] Transition from WaitActive to WaitQuorum""","crucible"ip-10-150-1-55.us-west-2.compute.internal,""vlevel,"":pid30":"4759:}0
1940 ,"name":"{crucible,"""msg,"time:"":""level":2023-09-22T23:14:58.684629502Z"30,"hostname":"[1] 7070e84b-45e9-4b98-b03f-656a6ba86de7 (f1f39d9e-3c69-4dbe-901b-424816224ade) Active Faulted Active ds_transition to LiveRepairReady","ip-10-150-1-55.us-west-2.compute.internalv"",:"0pid,"":4759name":"}
1941 crucible","level"{:30"msg":"[0] e2411e4d-ee59-47a2-bfc3-972c6325e258 (d6d84fee-a1eb-4f10-bf73-cb08f9fa6e09) New New New ds_transition to WaitActive",,"",vtime""":time0",:""name":":2023-09-22T23:14:58.684656533Z""crucible,"",2023-09-22T23:14:58.684646725Z"hostname"":"level,":ip-10-150-1-55.us-west-2.compute.internal30"","hostname":"pid":4759}
1942 ip-10-150-1-55.us-west-2.compute.internal","pid"{,""time"msg:"":":2023-09-22T23:14:58.684680657Z4759"[1] Transition from Faulted to LiveRepairReady",","hostnamev""::"0},"
1943 ip-10-150-1-55.us-west-2.compute.internalname"":,""pidcrucible"":,4759"level"}:
1944 30{{"msg"":msg"":"[0] Transition from New to WaitActive","v,"":0time",:""name":"[2] be858f8c-b2ed-4658-ae55-df51207082cb (e05bbd9e-fb85-4807-b00a-a7affda3fe87) Active Active WaitQuorum ds_transition to Active2023-09-22T23:14:58.684711734Zcrucible""",,"",levelhostname"":"30:"v":0,ip-10-150-1-55.us-west-2.compute.internal"","name":"pid"crucible:,4759""}time,":
1945 ""level":2023-09-22T23:14:58.684730791Z{"30,""msg":hostname"":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1946 [1] 7070e84b-45e9-4b98-b03f-656a6ba86de7 (f1f39d9e-3c69-4dbe-901b-424816224ade) Active LiveRepairReady Active ds_transition to LiveRepair","v{":0,""namemsg""::""crucible","level":30[0] e2411e4d-ee59-47a2-bfc3-972c6325e258 (d6d84fee-a1eb-4f10-bf73-cb08f9fa6e09) WaitActive New New ds_transition to WaitQuorum","v":,"0time,"",name"":"time"crucible:":,"""level"2023-09-22T23:14:58.684760777Z:"30,2023-09-22T23:14:58.684746911Z""hostname":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pidip-10-150-1-55.us-west-2.compute.internal"",",:"4759pid"}time
1947 ":":{47592023-09-22T23:14:58.684776031Z"",msg"":hostname"":"}
1948 [1] Transition from LiveRepairReady to LiveRepairip-10-150-1-55.us-west-2.compute.internal"",,""pidv""::47590,"}
1949 name"{{:"crucible"",msg"":"level":"30msg[0] Transition from WaitActive to WaitQuorum"":","v":0,[2] Transition from WaitQuorum to Active""name":,""crucible,"",time""level"::"30v":2023-09-22T23:14:58.684811651Z"0,","hostname"name":":"crucible",ip-10-150-1-55.us-west-2.compute.internal,"","time"pid:"":"47592023-09-22T23:14:58.684825306Z"},
1950 "level"hostname"::"30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1951 {"msg":"[0] e2411e4d-ee59-47a2-bfc3-972c6325e258 (d6d84fee-a1eb-4f10-bf73-cb08f9fa6e09) WaitQuorum New New ds_transition to Active","v":0,",name":""crucible"time":","level":2023-09-22T23:14:58.684845033Z"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1952 ,"time":"2023-09-22T23:14:58.684869216Z"{,"hostname":""msg":ip-10-150-1-55.us-west-2.compute.internal"","pid":4759}
1953 {"msg":"[0] Transition from WaitQuorum to Active","v":0,"name"be858f8c-b2ed-4658-ae55-df51207082cb is now active with session: e05bbd9e-fb85-4807-b00a-a7affda3fe87:""crucible","level":30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.684957595Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1954 {"msg":"[1] e2411e4d-ee59-47a2-bfc3-972c6325e258 (d6d84fee-a1eb-4f10-bf73-cb08f9fa6e09) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.684966876Z",",time":""hostname2023-09-22T23:14:58.684985327Z"":,""hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internalpid":"4759,}"
1955 pid":{4759"msg":"}
1956 [1] Transition from New to WaitActive","v":0,"name":"crucible","level":30{"msg":","time":"2023-09-22T23:14:58.685019073Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1957 [1] be858f8c-b2ed-4658-ae55-df51207082cb (e05bbd9e-fb85-4807-b00a-a7affda3fe87) Active Active Active ds_transition to Faulted"{,"v":"0msg":","name":"crucible","level[1] e2411e4d-ee59-47a2-bfc3-972c6325e258 (d6d84fee-a1eb-4f10-bf73-cb08f9fa6e09) Active WaitActive New ds_transition to WaitQuorum"":,"v30":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.685047245Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",",pid":"4759time}"
1958 :"{2023-09-22T23:14:58.685045079Z",""msg":"hostname":"[1] Transition from WaitActive to WaitQuorum"ip-10-150-1-55.us-west-2.compute.internal,"",v"":pid":04759,"name":"}
1959 crucible","level":30{"msg":"[1] Transition from Active to Faulted","v":0,"name":"crucible","level":,"30time":"2023-09-22T23:14:58.685088151Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1960 {,""msg":"time":"2023-09-22T23:14:58.685099476Z","[1] e2411e4d-ee59-47a2-bfc3-972c6325e258 (d6d84fee-a1eb-4f10-bf73-cb08f9fa6e09) Active WaitQuorum New ds_transition to Active"hostname":","v":0,"name":"crucible"ip-10-150-1-55.us-west-2.compute.internal",",level":"30pid":4759}
1961 {"msg":","time":"2023-09-22T23:14:58.685125188Z"[1] be858f8c-b2ed-4658-ae55-df51207082cb (e05bbd9e-fb85-4807-b00a-a7affda3fe87) Active Faulted Active ds_transition to LiveRepairReady",","hostname":v"":0,"nametest live_repair::repair_test::test_live_repair_deps_repair_overlappers ... ip-10-150-1-55.us-west-2.compute.internal"",ok"pid
1962 "::4759"crucible"},"
1963 level":30{"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.685222984Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1964 {"msg":"[2] e2411e4d-ee59-47a2-bfc3-972c6325e258 (d6d84fee-a1eb-4f10-bf73-cb08f9fa6e09) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level",:"30time":"2023-09-22T23:14:58.685166802Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid,""time"::"47592023-09-22T23:14:58.68526541Z",}"
1965 hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1966 {{"msg":""msg":[2] Transition from New to WaitActive"","v":0,"name":"crucible",[1] Transition from Faulted to LiveRepairReady""level":30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.685300691Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1967 {"msg":"[2] e2411e4d-ee59-47a2-bfc3-972c6325e258 (d6d84fee-a1eb-4f10-bf73-cb08f9fa6e09) Active Active WaitActive ds_transition to WaitQuorum","v":0,,""name":"time":crucible"","level":302023-09-22T23:14:58.685307231Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,"4759time":"}2023-09-22T23:14:58.685326071Z
1968 ","hostname":"ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4759}
1969 "msg{":""msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30[1] be858f8c-b2ed-4658-ae55-df51207082cb (e05bbd9e-fb85-4807-b00a-a7affda3fe87) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"crucible",",time"":"level":2023-09-22T23:14:58.685357854Z30","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1970 {"msg":"[2] e2411e4d-ee59-47a2-bfc3-972c6325e258 (d6d84fee-a1eb-4f10-bf73-cb08f9fa6e09) Active Active WaitQuorum ds_transition to Active","v":0,"name,"":"timecrucible"",":level":30"2023-09-22T23:14:58.685368687Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",",time":""pid":2023-09-22T23:14:58.685386228Z"4759,"hostname":"}
1971 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1972 {{"msg"":"msg":"[2] Transition from WaitQuorum to Active","v":0,"name":"[1] Transition from LiveRepairReady to LiveRepair"crucible",,""v"level":30:0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.685424378Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1973 {"msg":","time":e2411e4d-ee59-47a2-bfc3-972c6325e258 is now active with session: d6d84fee-a1eb-4f10-bf73-cb08f9fa6e09"","v":0,"name":"2023-09-22T23:14:58.685429243Zcrucible"",",level":"30hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1974 ,"time":"2023-09-22T23:14:58.685450085Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
19752023-09-22T23:14:58.685ZINFOcrucible: [1] e2411e4d-ee59-47a2-bfc3-972c6325e258 (d6d84fee-a1eb-4f10-bf73-cb08f9fa6e09) Active Active Active ds_transition to Faulted
19762023-09-22T23:14:58.685ZINFOcrucible: [1] Transition from Active to Faulted
19772023-09-22T23:14:58.685ZINFOcrucible: [1] e2411e4d-ee59-47a2-bfc3-972c6325e258 (d6d84fee-a1eb-4f10-bf73-cb08f9fa6e09) Active Faulted Active ds_transition to LiveRepairReady
19782023-09-22T23:14:58.685ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
19792023-09-22T23:14:58.685ZINFOcrucible: [1] e2411e4d-ee59-47a2-bfc3-972c6325e258 (d6d84fee-a1eb-4f10-bf73-cb08f9fa6e09) Active LiveRepairReady Active ds_transition to LiveRepair
19802023-09-22T23:14:58.685ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
1981 {"msg":"Crucible stats registered with UUID: 6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6","v":0,"name":"crucible","level":30test live_repair::repair_test::test_live_repair_deps_repair_other ... ok
1982 ,"time":"2023-09-22T23:14:58.685676398Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
19832023-09-22T23:14:58.685ZINFOcrucible: Crucible 6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6 has session id: 8a93fbe4-dffc-4ef3-ac6b-8ce0395f2496
19842023-09-22T23:14:58.685ZINFOcrucible: [0] 6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6 (99b1ca7f-770d-44d0-9b20-c9a2a545ed4c) New New New ds_transition to WaitActive
1985 {"msg":"[0] Transition from New to WaitActive"test live_repair::repair_test::test_live_repair_deps_repair_rafter ... ,"okv"
1986 :0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.685815273Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
19872023-09-22T23:14:58.685ZINFOcrucible: [0] 6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6 (99b1ca7f-770d-44d0-9b20-c9a2a545ed4c) WaitActive New New ds_transition to WaitQuorum
19882023-09-22T23:14:58.685ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
19892023-09-22T23:14:58.685ZINFOcrucible: [0] 6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6 (99b1ca7f-770d-44d0-9b20-c9a2a545ed4c) WaitQuorum New New ds_transition to Active
19902023-09-22T23:14:58.685ZINFOcrucible: [0] Transition from WaitQuorum to Active
19912023-09-22T23:14:58.686ZINFOcrucible: [1] 6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6 (99b1ca7f-770d-44d0-9b20-c9a2a545ed4c) Active New New ds_transition to WaitActive
1992 {{""msg":"msg":"[1] Transition from New to WaitActive","v":0,"Crucible stats registered with UUID: 8d04c4f4-46e4-45cb-86ab-55f2298edbab"name":"crucible",,""level"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.686072987Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759:30}
1993 {"msg":","Crucible 8d04c4f4-46e4-45cb-86ab-55f2298edbab has session id: 51715fcf-9888-475e-af8d-25850d80ded5"time",":"v":02023-09-22T23:14:58.686106554Z","name",:""hostnamecrucible"":,""level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1994 {"msg":","time":"2023-09-22T23:14:58.686137641Z"[1] 6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6 (99b1ca7f-770d-44d0-9b20-c9a2a545ed4c) Active WaitActive New ds_transition to WaitQuorum",",hostname"":v"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1995 ":0{,"name":"crucible",""level":msg"30:"[0] 8d04c4f4-46e4-45cb-86ab-55f2298edbab (85964bc3-39c1-482e-87d4-de63e3780fd4) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.686190857Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1996 ,"time":"{2023-09-22T23:14:58.686202839Z","hostname"":msg":""ip-10-150-1-55.us-west-2.compute.internal"[1] Transition from WaitActive to WaitQuorum",",pid""v":4759}
1997 :{0,"name":"crucible"","msg"level":"[0] Transition from New to WaitActive","v"{:0,"name":"crucible""msg":","level":30Crucible stats registered with UUID: 7d29f824-b397-4b45-b288-49fd0e9a3aeb","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.68627544Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"}
1998 {:30"msg":"[0] 8d04c4f4-46e4-45cb-86ab-55f2298edbab (85964bc3-39c1-482e-87d4-de63e3780fd4) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.686313667Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
1999 ,"time":"{2023-09-22T23:14:58.686327Z",""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}time
2000 msg":"{[1] 6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6 (99b1ca7f-770d-44d0-9b20-c9a2a545ed4c) Active WaitQuorum New ds_transition to Active"",msg"":"v":0,"name"[0] Transition from WaitActive to WaitQuorum","v":0,"name":""crucible":","level":2023-09-22T23:14:58.686282794Z"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759:}
2001 "crucible","{level":30"msg":","time":"2023-09-22T23:14:58.686392926Z"Crucible 7d29f824-b397-4b45-b288-49fd0e9a3aeb has session id: 8b5fa56a-1c47-4d69-9152-184aa57abe96",,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2002 {"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.686445129Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2003 {"msg":""msg":"[0] 8d04c4f4-46e4-45cb-86ab-55f2298edbab (85964bc3-39c1-482e-87d4-de63e3780fd4) WaitQuorum New New ds_transition to Active"[0] 7d29f824-b397-4b45-b288-49fd0e9a3aeb (28f01602-0927-4cde-bb59-43ed57ad0042) New New New ds_transition to WaitActive",","vv",:""0:time0,"name":"crucible",""level"::"302023-09-22T23:14:58.686407543Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"name",:""pid":crucible"4759,"level"}:
2004 30{,"time"":msg"":"2023-09-22T23:14:58.686492573Z"[1] Transition from WaitQuorum to Active,"",hostname,""v"::0,"name":"crucible",""level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2005 ,"time":"2023-09-22T23:14:58.686528424Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2006 {"msg":"[2] 6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6 (99b1ca7f-770d-44d0-9b20-c9a2a545ed4c) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30{,"time":""2023-09-22T23:14:58.686561501Z"msg":,""hostname":"[0] Transition from WaitQuorum to Active"ip-10-150-1-55.us-west-2.compute.internal",,"""timepid""::"v4759"2023-09-22T23:14:58.686505412Z"}
2007 ,:"0hostname"{,"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.686610127Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid"":4759name":}"
2008 crucible",":{level""":msgip-10-150-1-55.us-west-2.compute.internal""30:,""pid":4759}
2009 [2] 6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6 (99b1ca7f-770d-44d0-9b20-c9a2a545ed4c) Active Active WaitActive ds_transition to WaitQuorum","v":0,"{name":"crucible",""level"msg:":30"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":",2023-09-22T23:14:58.686642889Z","time":"2023-09-22T23:14:58.686660712Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2010 {"msg":"[0] 7d29f824-b397-4b45-b288-49fd0e9a3aeb (28f01602-0927-4cde-bb59-43ed57ad0042) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":"4759time":"},
2011 {2023-09-22T23:14:58.686653519Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2012 {"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30"msg":"[1] 8d04c4f4-46e4-45cb-86ab-55f2298edbab (85964bc3-39c1-482e-87d4-de63e3780fd4) Active New New ds_transition to WaitActive","v":,""time"time"::""0,"2023-09-22T23:14:58.686764996Z2023-09-22T23:14:58.686696682Z""name":,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2013 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30"crucible","level":30,"time":"2023-09-22T23:14:58.686813637Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2014 {,"time":""msg":"2023-09-22T23:14:58.686819998Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"hostname[0] 7d29f824-b397-4b45-b288-49fd0e9a3aeb (28f01602-0927-4cde-bb59-43ed57ad0042) WaitQuorum New New ds_transition to Active,","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.686855351Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2015 "pid":{4759"msg":}"
2016 "{:"[0] Transition from WaitQuorum to Active"ip-10-150-1-55.us-west-2.compute.internal",,"""pidmsg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.686909999Z"",":hostname"4759:"}
2017 ip-10-150-1-55.us-west-2.compute.internal","pid":4759{}
2018 {""msg":"[1] 8d04c4f4-46e4-45cb-86ab-55f2298edbab (85964bc3-39c1-482e-87d4-de63e3780fd4) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30msg":","time":"[2] 6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6 (99b1ca7f-770d-44d0-9b20-c9a2a545ed4c) Active Active WaitQuorum ds_transition to Active"2023-09-22T23:14:58.686970634Z",,""vhostnamev":0,"name":"crucible","level":30",":time"":"2023-09-22T23:14:58.687010852Z"ip-10-150-1-55.us-west-2.compute.internal",",hostname"":pid"":4759ip-10-150-1-55.us-west-2.compute.internal","}pid"
2019 ":0,"name":"crucible","level":30{,"time":""2023-09-22T23:14:58.687062504Z"msg":","hostname":"[1] Transition from WaitActive to WaitQuorumip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2020 ","v":0{,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.687098883Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",""pid"msg:":"4759}[2] Transition from WaitQuorum to Active
2021 :4759}
2022 {"msg":"[1] 7d29f824-b397-4b45-b288-49fd0e9a3aeb (28f01602-0927-4cde-bb59-43ed57ad0042) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30{"msg":""[1] 8d04c4f4-46e4-45cb-86ab-55f2298edbab (85964bc3-39c1-482e-87d4-de63e3780fd4) Active WaitQuorum New ds_transition to Active","v,,"""time":":2023-09-22T23:14:58.68716495Z","hostname":"0,"ip-10-150-1-55.us-west-2.compute.internalname"",:""pid":crucible"4759}
2023 {"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:14:58.687210896Z"level,"":hostname"30:"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2024 ,"time":"2023-09-22T23:14:58.687228846Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2025 {"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","{level":30"msg":"[1] 7d29f824-b397-4b45-b288-49fd0e9a3aeb (28f01602-0927-4cde-bb59-43ed57ad0042) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.687277073Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2026 v":{0,"name":"",crucible""msg,time""level"::"30":"2023-09-22T23:14:58.68729059Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal"[2] 8d04c4f4-46e4-45cb-86ab-55f2298edbab (85964bc3-39c1-482e-87d4-de63e3780fd4) Active Active New ds_transition to WaitActive","pid,"":v4759":}0
2027 ,",time":"{"name":""crucible","level":30,"time":"2023-09-22T23:14:58.687352273Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2028 {"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30msg":","time":[1] Transition from WaitActive to WaitQuorum"","v":2023-09-22T23:14:58.687397977Z0,""name":","crucible"hostname",:""level"2023-09-22T23:14:58.687322368Z:"30ip-10-150-1-55.us-west-2.compute.internal","pid",":hostname4759":"}
2029 ,"time":"{2023-09-22T23:14:58.687429017Z","ip-10-150-1-55.us-west-2.compute.internalhostname"":,"""pid":ip-10-150-1-55.us-west-2.compute.internal4759msg}
2030 {"msg":"6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6 is now active with session: 99b1ca7f-770d-44d0-9b20-c9a2a545ed4c","v":0,"name":"crucible","level":30":","time":"2023-09-22T23:14:58.687473631Z","hostname":"[2] 8d04c4f4-46e4-45cb-86ab-55f2298edbab (85964bc3-39c1-482e-87d4-de63e3780fd4) Active Active WaitActive ds_transition to WaitQuorum"ip-10-150-1-55.us-west-2.compute.internal",,""v"pid:","pid":4759}
2031 {0"msg",:""name":"crucible","level":30[1] 7d29f824-b397-4b45-b288-49fd0e9a3aeb (28f01602-0927-4cde-bb59-43ed57ad0042) Active WaitQuorum New ds_transition to Active"","v"::04759,"name"}:"
2032 crucible","level":30{"msg":"[1] 6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6 (99b1ca7f-770d-44d0-9b20-c9a2a545ed4c) Active Active Active ds_transition to Faulted","v":0,"name":",",crucible""time,""timelevel":":30,"time":"2023-09-22T23:14:58.68759292Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2033 "2023-09-22T23:14:58.687531401Z","hostname{":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid"[1] Transition from Active to Faulted":,"v4759":0,"}name:""
2034 {2023-09-22T23:14:58.687540193Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2035 {"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":":2023-09-22T23:14:58.687686477Z"","crucible"hostname":","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2036 {"msg":,""time":","time":"2023-09-22T23:14:58.687702111Z",2023-09-22T23:14:58.687711314Z[2] 7d29f824-b397-4b45-b288-49fd0e9a3aeb (28f01602-0927-4cde-bb59-43ed57ad0042) Active Active New ds_transition to WaitActive""",,""hostnamehostnamev""::0","":nameip-10-150-1-55.us-west-2.compute.internal"":,"""pidip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2037 {"msg":"[2] 8d04c4f4-46e4-45cb-86ab-55f2298edbab (85964bc3-39c1-482e-87d4-de63e3780fd4) Active Active WaitQuorum ds_transition to Active","v":0,"name":"crucible","level":30":4759}
2038 {"msg":"crucible",",time"":level":30,"time":"2023-09-22T23:14:58.687785043Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2039 {"msg"":"2023-09-22T23:14:58.687764788Z"[2] Transition from New to WaitActive","v":,0","hostname"name:":""crucible","level":ip-10-150-1-55.us-west-2.compute.internal30","pid":4759}
2040 ,"time":"2023-09-22T23:14:58.687827849Z"{,"hostname":""ip-10-150-1-55.us-west-2.compute.internal"msg,"":pid"[1] 6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6 (99b1ca7f-770d-44d0-9b20-c9a2a545ed4c) Active Faulted Active ds_transition to LiveRepairReady:"4759"},
2041 "[2] Transition from WaitQuorum to Active"v{":,0",""msg"name:"":"v":crucible"0,","level"name"[2] 7d29f824-b397-4b45-b288-49fd0e9a3aeb (28f01602-0927-4cde-bb59-43ed57ad0042) Active Active WaitActive ds_transition to WaitQuorum":,"v":0","cruciblename"":","crucible"level",":level"30::,"time":"2023-09-22T23:14:58.687879233Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2042 30{"msg":"[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30,"time":","time":2023-09-22T23:14:58.687911182Z"","2023-09-22T23:14:58.687921884Z"hostname":","30hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal4759","pid"},"time":"2023-09-22T23:14:58.687939757Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2043 {"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible"
2044 {,"level"":msg30"::"4759}
2045 8d04c4f4-46e4-45cb-86ab-55f2298edbab is now active with session: 85964bc3-39c1-482e-87d4-de63e3780fd4","v":{0,"name":",""crucible","level":30,"time":"2023-09-22T23:14:58.688008223Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal"time,"pid":4759}
2046 "{:"2023-09-22T23:14:58.687991481Z"msg":"[1] 6c58d01c-f6ba-47d5-9bbd-2d1152f57cb6 (99b1ca7f-770d-44d0-9b20-c9a2a545ed4c) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"crucible","level":30msg":"[1] 8d04c4f4-46e4-45cb-86ab-55f2298edbab (85964bc3-39c1-482e-87d4-de63e3780fd4) Active Active Active ds_transition to Faulted","time",:""v":02023-09-22T23:14:58.688074605Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2047 {"msg":"[1] Transition from LiveRepairReady to LiveRepair","v":0,"name":"crucible","level":30,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.688119377Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}"
2048 ,"hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","pid":2023-09-22T23:14:58.688127799Z4759"}
2049 ,"hostname":"{ip-10-150-1-55.us-west-2.compute.internal",""pidmsg":"":4759}
2050 {"msg":[2] 7d29f824-b397-4b45-b288-49fd0e9a3aeb (28f01602-0927-4cde-bb59-43ed57ad0042) Active Active WaitQuorum ds_transition to Active""[1] Transition from Active to Faulted",","vv""::00,","name":name"":"crucible"crucible",,""level"level"::3030,"time":,""time":"2023-09-22T23:14:58.688199662Z","2023-09-22T23:14:58.688200073Z"hostname":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,"pid":}4759
2051 }
2052 test impacted_blocks::test::iblocks_blocks_iterates_over_all_blocks ... ok
2053 {"msg":"[1] 8d04c4f4-46e4-45cb-86ab-55f2298edbab (85964bc3-39c1-482e-87d4-de63e3780fd4) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30{,"time":""2023-09-22T23:14:58.688292653Z"msg":,""hostname":"[2] Transition from WaitQuorum to Active"ip-10-150-1-55.us-west-2.compute.internal",",v""pid":4759}
2054 :0{,"name":"crucible"",msg""level:""[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level"::3030,"time":",test live_repair::repair_test::test_live_repair_deps_repair_read ... "oktime":"
2055 2023-09-22T23:14:58.688360051Z"2023-09-22T23:14:58.688359171Z","hostname,":""hostname":ip-10-150-1-55.us-west-2.compute.internal"","pid":4759ip-10-150-1-55.us-west-2.compute.internal}"
2056 ,"pid":4759}{
2057 {"msg""msg":":[1] 8d04c4f4-46e4-45cb-86ab-55f2298edbab (85964bc3-39c1-482e-87d4-de63e3780fd4) Active LiveRepairReady Active ds_transition to LiveRepair"","v":0,"name":"7d29f824-b397-4b45-b288-49fd0e9a3aeb is now active with session: 28f01602-0927-4cde-bb59-43ed57ad0042"crucible",","level"v":30:0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.688451456Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2058 {"msg":","time":[1] Transition from LiveRepairReady to LiveRepair"","v"2023-09-22T23:14:58.688464289Z":0,",name"":"hostname"crucible":","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2059 {"msg":","time":"2023-09-22T23:14:58.688507069Z","[1] 7d29f824-b397-4b45-b288-49fd0e9a3aeb (28f01602-0927-4cde-bb59-43ed57ad0042) Active Active Active ds_transition to Faultedhostname"":","v":0ip-10-150-1-55.us-west-2.compute.internal,"","name"pid:"":crucible4759","}level":
2060 30,"time":"2023-09-22T23:14:58.688599327Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2061 {"msg":"[1] Transition from Active to Faulted","v":0,"name":{"crucible","level":30"msg":"Crucible stats registered with UUID: 1c9731f3-b505-4ad4-b45b-f22ac14fff3b","v":0,"name":"crucible","level":,30"time":"2023-09-22T23:14:58.688650795Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2062 {"msg":","time":"[1] 7d29f824-b397-4b45-b288-49fd0e9a3aeb (28f01602-0927-4cde-bb59-43ed57ad0042) Active Faulted Active ds_transition to LiveRepairReady","v2023-09-22T23:14:58.688665809Z"":0,,""hostnamename":"":"crucible","level":ip-10-150-1-55.us-west-2.compute.internal30","pid":4759}
2063 ,"time":"2023-09-22T23:14:58.688700165Z"{,"hostname":""msg":ip-10-150-1-55.us-west-2.compute.internal"","pid":4759}
2064 Crucible 1c9731f3-b505-4ad4-b45b-f22ac14fff3b has session id: bfa6ab39-1847-4eeb-8aa8-fd97c3e05502","v":{0,"name"":"msg":crucible"","level":30[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.688735587Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","pid":2023-09-22T23:14:58.688741632Z"4759,"hostname}":
2065 "ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4759"msg}"
2066 :"{"msg[0] 1c9731f3-b505-4ad4-b45b-f22ac14fff3b (51812711-0ce3-446b-bb9f-9ece34c97797) New New New ds_transition to WaitActive"":","v":0,"name":"crucible","level":[1] 7d29f824-b397-4b45-b288-49fd0e9a3aeb (28f01602-0927-4cde-bb59-43ed57ad0042) Active LiveRepairReady Active ds_transition to LiveRepair"30test live_repair::repair_test::test_live_repair_deps_repair_repair_repair ... ,"okv
2067 ":0,"name":","crucible"time",:""level":2023-09-22T23:14:58.688792311Z30","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2068 {,"time"":msg"":"2023-09-22T23:14:58.68881173Z"[0] Transition from New to WaitActive,"",hostname""v"::0","name":"crucible"ip-10-150-1-55.us-west-2.compute.internal",",level":"30pid":4759}
2069 {"msg":"[1] Transition from LiveRepairReady to LiveRepair",",v"":time":"0,"name"2023-09-22T23:14:58.688847128Z":","crucible"hostname":","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2070 {"msg,"":time"":"2023-09-22T23:14:58.688875158Z","hostname":"[0] 1c9731f3-b505-4ad4-b45b-f22ac14fff3b (51812711-0ce3-446b-bb9f-9ece34c97797) WaitActive New New ds_transition to WaitQuorum","ip-10-150-1-55.us-west-2.compute.internal"v,":"0pid",:"4759name":"}crucible
2071 ","level":30,"time":"2023-09-22T23:14:58.68891197Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
20722023-09-22T23:14:58.688ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
2073 {{"msg"":"msg":"Crucible stats registered with UUID: a5c9346e-78da-47f3-a9ba-6cf4321f87bd"[0] 1c9731f3-b505-4ad4-b45b-f22ac14fff3b (51812711-0ce3-446b-bb9f-9ece34c97797) WaitQuorum New New ds_transition to Active",",v""v"::00,,""namename""::""crucible"crucible",","level"level:":3030,"time":"2023-09-22T23:14:58.688997304Z","hostname",:""time":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.688997818Z,""pid",:"4759hostname":}"
2074 ip-10-150-1-55.us-west-2.compute.internal","pid":4759{}
2075 {"msg":"Crucible a5c9346e-78da-47f3-a9ba-6cf4321f87bd has session id: 41805997-5765-4deb-a3cd-4db3222bd701","v":0,"name":"crucible","level":30"msg":","time":"[0] Transition from WaitQuorum to Active"2023-09-22T23:14:58.689043544Z","v,"":0hostname",:""name":"crucible",ip-10-150-1-55.us-west-2.compute.internal""level,"":pid30":4759}
2076 {"msg":","time":"2023-09-22T23:14:58.689064136Z","hostname":"[0] a5c9346e-78da-47f3-a9ba-6cf4321f87bd (be7cf715-24aa-4419-8cc0-e5ddcc58d64d) New New New ds_transition to WaitActive","v":ip-10-150-1-55.us-west-2.compute.internal"0,,""pid"name:":4759"crucible"},
2077 "level":30{"msg":"[1] 1c9731f3-b505-4ad4-b45b-f22ac14fff3b (51812711-0ce3-446b-bb9f-9ece34c97797) Active New New ds_transition to WaitActive","v":,"0,time"":name"":"crucible"2023-09-22T23:14:58.689087542Z,"",level"":30hostname":test live_repair::repair_test::test_live_repair_deps_repair_rspan_left ... ,""okip-10-150-1-55.us-west-2.compute.internal"
2078 ,"pid":4759}
2079 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30time":"2023-09-22T23:14:58.689101515Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,4759"time":}"
2080 2023-09-22T23:14:58.689138647Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal""msg,"":"pid":4759}
2081 {"msg":"{[0] a5c9346e-78da-47f3-a9ba-6cf4321f87bd (be7cf715-24aa-4419-8cc0-e5ddcc58d64d) WaitActive New New ds_transition to WaitQuorum",""v":msg"0:,"name":"crucible","level":30[1] Transition from New to WaitActive","v":0,"name":"crucible","level",""Crucible stats registered with UUID: b28e974a-c998-41d2-b8a0-fef8e732ae88"time",:""v":0,"2023-09-22T23:14:58.689186008Z"name":","crucible"hostname",:""level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}:
2082 {,"time":""msg":"2023-09-22T23:14:58.689209859Z","hostname[0] Transition from WaitActive to WaitQuorum"":","v":0,ip-10-150-1-55.us-west-2.compute.internal""name,"":"pid":crucible"4759,"level"}:
2083 30{"msg":"Crucible b28e974a-c998-41d2-b8a0-fef8e732ae88 has session id: d887034b-9c0f-4710-a9b0-98af6b46dd84","v"30:0,"name":",crucible""time,"":"level":302023-09-22T23:14:58.689241189Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2084 {,"time":""msg":"2023-09-22T23:14:58.689253953Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal,"[0] a5c9346e-78da-47f3-a9ba-6cf4321f87bd (be7cf715-24aa-4419-8cc0-e5ddcc58d64d) WaitQuorum New New ds_transition to Active,"","v":0,"name":"crucible","level":30pid":4759}
2085 {"msg":"","time":time"":"[0] b28e974a-c998-41d2-b8a0-fef8e732ae88 (8884c3fe-403a-4534-b3a2-b199a5a05d84) New New New ds_transition to WaitActive2023-09-22T23:14:58.689277765Z"",",hostname""v:"ip-10-150-1-55.us-west-2.compute.internal","pid":":47590,"}
2086 {2023-09-22T23:14:58.689250532Z""msg":","[0] Transition from WaitQuorum to Active"hostname",:""v"name:0","ip-10-150-1-55.us-west-2.compute.internal:name"":,"pid":"4759crucible","}level":
2087 30{""msg":"crucible","level":,30"time":"[1] 1c9731f3-b505-4ad4-b45b-f22ac14fff3b (51812711-0ce3-446b-bb9f-9ece34c97797) Active WaitActive New ds_transition to WaitQuorum2023-09-22T23:14:58.689329022Z"",","hostnamev""::0","name":"crucible",ip-10-150-1-55.us-west-2.compute.internal""time,":"2023-09-22T23:14:58.68933997Z""pid,"":4759hostname":"}
2088 {"msg":","level":30[1] a5c9346e-78da-47f3-a9ba-6cf4321f87bd (be7cf715-24aa-4419-8cc0-e5ddcc58d64d) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":",2023-09-22T23:14:58.689373282Z""time":,""hostname":"2023-09-22T23:14:58.689378534Z","hostname"ip-10-150-1-55.us-west-2.compute.internal:"","pid":4759ip-10-150-1-55.us-west-2.compute.internal",}"
2089 pid":4759}
2090 {"msg":"ip-10-150-1-55.us-west-2.compute.internal"[1] Transition from New to WaitActive",","pidv""::47590,"}name
2091 ":"crucible"{,"level":30"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.689416372Z","{hostname":","timeip-10-150-1-55.us-west-2.compute.internal","msg":""pid":4759[1] Transition from WaitActive to WaitQuorum"}
2092 "{:""msg2023-09-22T23:14:58.68942453Z"":","hostname":"ip-10-150-1-55.us-west-2.compute.internal,"[1] a5c9346e-78da-47f3-a9ba-6cf4321f87bd (be7cf715-24aa-4419-8cc0-e5ddcc58d64d) Active WaitActive New ds_transition to WaitQuorum,"","v":0,"name":"crucible","pid"level:":475930}
2093 {""v"msg"::"0,"time",":name"":"[0] b28e974a-c998-41d2-b8a0-fef8e732ae88 (8884c3fe-403a-4534-b3a2-b199a5a05d84) WaitActive New New ds_transition to WaitQuorum2023-09-22T23:14:58.689461646Z"crucible,"hostname":"ip-10-150-1-55.us-west-2.compute.internal"","pid",:4759}"
2094 {""msg":"level":30[1] Transition from WaitActive to WaitQuorum","v":,0,""vname":"crucible","level":30{":0,"name":""crucible"msg,",:"""timetime"":":Crucible stats registered with UUID: c72ddd63-a86d-4aaf-9e6d-66c5f8903115"2023-09-22T23:14:58.689502575Z"",",v"":2023-09-22T23:14:58.689509924Z0,"","hostname":","level":ip-10-150-1-55.us-west-2.compute.internal"30,"pid":name4759":"}
2095 ,{"time":"2023-09-22T23:14:58.689542405Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2096 hostname{":""msg":"ip-10-150-1-55.us-west-2.compute.internal","[0] Transition from WaitActive to WaitQuorumpid"":,"4759v":0,"}name
2097 ""msg:"":"crucible","level":30{[1] a5c9346e-78da-47f3-a9ba-6cf4321f87bd (be7cf715-24aa-4419-8cc0-e5ddcc58d64d) Active WaitQuorum New ds_transition to Active",""v":msg0","crucible","nametime"",::"crucible"","levellevel":":3030"2023-09-22T23:14:58.689596654Z","hostname":":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,}"
2098 time[1] 1c9731f3-b505-4ad4-b45b-f22ac14fff3b (51812711-0ce3-446b-bb9f-9ece34c97797) Active WaitQuorum New ds_transition to Active"":"{,"2023-09-22T23:14:58.689623323Z""msg":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid[0] b28e974a-c998-41d2-b8a0-fef8e732ae88 (8884c3fe-403a-4534-b3a2-b199a5a05d84) WaitQuorum New New ds_transition to Active"":4759,"v"}:
2099 ,{"msg":"[1] Transition from WaitQuorum to Active","v":v0","name:":"0crucible",",level":"30name":"crucible","level":30,"time":"2023-09-22T23:14:58.689673451Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2100 {"time":""msg":"2023-09-22T23:14:58.68962436Z","time":","hostname":2023-09-22T23:14:58.689678129Z""[2] a5c9346e-78da-47f3-a9ba-6cf4321f87bd (be7cf715-24aa-4419-8cc0-e5ddcc58d64d) Active Active New ds_transition to WaitActive",","hostnameip-10-150-1-55.us-west-2.compute.internal"v,"0"pid":,"4759name:":0","crucible"name}"
2101 :"crucible","level":30":",{"level":ip-10-150-1-55.us-west-2.compute.internal"30,"pid":"4759msg":"}
2102 ,","time"time:"Crucible c72ddd63-a86d-4aaf-9e6d-66c5f8903115 has session id: 63f8da4b-43a4-40e6-97e2-4347bb217a59":{,"""v":2023-09-22T23:14:58.689726088Z2023-09-22T23:14:58.689721035Z0",","msg"hostname:""":name":"[1] Transition from WaitQuorum to Activecrucible""",,"vip-10-150-1-55.us-west-2.compute.internal"",""pid":4759}
2103 :0,"name":"crucible","level":30{"msg":"[2] Transition from New to WaitActive","v":0,"name":""crucible","level":,30"hostname":",levelip-10-150-1-55.us-west-2.compute.internal""",":time"30pid":":4759,2023-09-22T23:14:58.689789317Z"}time
2104 "{:"2023-09-22T23:14:58.68980531Z""msg",:""hostname":"[0] Transition from WaitQuorum to Active",ip-10-150-1-55.us-west-2.compute.internal"","hostname":",ip-10-150-1-55.us-west-2.compute.internal"","pid":time"4759:"}
2105 2023-09-22T23:14:58.689816448Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid""msg":":4759}
2106 ,[2] 1c9731f3-b505-4ad4-b45b-f22ac14fff3b (51812711-0ce3-446b-bb9f-9ece34c97797) Active Active New ds_transition to WaitActive""pid,""v{"::04759,"name":""crucible"msg",:""level":30[0] c72ddd63-a86d-4aaf-9e6d-66c5f8903115 (d9db3e91-27d7-4cb9-af2b-1cf3b9294be1) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.689875598Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2107 }
2108 ,"time":"2023-09-22T23:14:58.689886372Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2109 "{v":0,"name":""msg":crucible{""msg":"{[2] a5c9346e-78da-47f3-a9ba-6cf4321f87bd (be7cf715-24aa-4419-8cc0-e5ddcc58d64d) Active Active WaitActive ds_transition to WaitQuorum","v":"0msg":","name":"[2] Transition from New to WaitActive"crucible",","level":30v":0,"name":"crucible","level":30,"time":"[0] Transition from New to WaitActive"2023-09-22T23:14:58.689948424Z",,""v":hostname":"0,"name"ip-10-150-1-55.us-west-2.compute.internal","time":"2023-09-22T23:14:58.689956941Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2110 {"msg":"[2] 1c9731f3-b505-4ad4-b45b-f22ac14fff3b (51812711-0ce3-446b-bb9f-9ece34c97797) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"pid":4759}
2111 ,"time":"2023-09-22T23:14:58.689997855Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2112 {"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30{"msg":"[2] Transition from WaitActive to WaitQuorum","v":,0","time":"name":"2023-09-22T23:14:58.690024341Z"crucible",,":,""level":30crucible","level":30,"time":""2023-09-22T23:14:58.690049087Z"level":,"30hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759},"
2113 ,time"":time{":"msg":""hostname"[1] b28e974a-c998-41d2-b8a0-fef8e732ae88 (8884c3fe-403a-4534-b3a2-b199a5a05d84) Active New New ds_transition to WaitActive:"","v"ip-10-150-1-55.us-west-2.compute.internal:"0,,""pid":name4759":"}
2114 crucible","level":{30"msg":""2023-09-22T23:14:58.690067915Z",[2] 1c9731f3-b505-4ad4-b45b-f22ac14fff3b (51812711-0ce3-446b-bb9f-9ece34c97797) Active Active WaitQuorum ds_transition to Active"",hostname""v",::"0ip-10-150-1-55.us-west-2.compute.internal"","pid":47592023-09-22T23:14:58.690053453Z"}
2115 {"msg":","[2] a5c9346e-78da-47f3-a9ba-6cf4321f87bd (be7cf715-24aa-4419-8cc0-e5ddcc58d64d) Active Active WaitQuorum ds_transition to Active"hostname,,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.69015452Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2116 "v":0,"name":"{crucible","level"":msg30":"[2] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30",","timetime""::"2023-09-22T23:14:58.690183743Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2117 "{:""msg":"ip-10-150-1-55.us-west-2.compute.internal"[2] Transition from WaitQuorum to Active",","pid"v:":04759,"name":"}crucible""
2118 2023-09-22T23:14:58.690110235Z,""level":30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal{","pid":4759}"
2119 ,"time":"msg":"2023-09-22T23:14:58.690236963Z"{,"hostname":"time""ip-10-150-1-55.us-west-2.compute.internal:"[0] c72ddd63-a86d-4aaf-9e6d-66c5f8903115 (d9db3e91-27d7-4cb9-af2b-1cf3b9294be1) WaitActive New New ds_transition to WaitQuorum""2023-09-22T23:14:58.690189735Z,"v":0,""name":"crucible",","hostname"level"::"30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2120 {"msg":"1c9731f3-b505-4ad4-b45b-f22ac14fff3b is now active with session: 51812711-0ce3-446b-bb9f-9ece34c97797","v":0,"name":"crucible","level,":"30pid,":msg,"":time":""2023-09-22T23:14:58.69028777Z","hostname":[1] Transition from New to WaitActive""ip-10-150-1-55.us-west-2.compute.internal",",pid":"4759v":}
2121 04759,"name"}:"
2122 "{"msg":"a5c9346e-78da-47f3-a9ba-6cf4321f87bd is now active with session: be7cf715-24aa-4419-8cc0-e5ddcc58d64d","v":0,"name":"crucible","level":30time":"2023-09-22T23:14:58.690267713Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759crucible[1] 1c9731f3-b505-4ad4-b45b-f22ac14fff3b (51812711-0ce3-446b-bb9f-9ece34c97797) Active Active Active ds_transition to Faulted}",,
2123 ""vtime":"2023-09-22T23:14:58.690366257Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2124 ":0,"name":"crucible","level":30",,""{leveltime""":msg30"msg":"[0] Transition from WaitActive to WaitQuorum","v":,0","timename":"":crucible"","2023-09-22T23:14:58.690435121Zlevel""::"[1] a5c9346e-78da-47f3-a9ba-6cf4321f87bd (be7cf715-24aa-4419-8cc0-e5ddcc58d64d) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30:"2023-09-22T23:14:58.690418167Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2125 ,"time":"{2023-09-22T23:14:58.690473068Z",""hostname"msg":":"[1] Transition from Active to Faultedip-10-150-1-55.us-west-2.compute.internal30"",,""v"pid:"0:,"4759name":"crucible"},"
2126 ,{""msg":","[1] Transition from Active to Faulted"hostname,"":"v":0,"name"ip-10-150-1-55.us-west-2.compute.internal:"","crucible"pid,":4759}
2127 level":30{"msg":"[1] b28e974a-c998-41d2-b8a0-fef8e732ae88 (8884c3fe-403a-4534-b3a2-b199a5a05d84) Active WaitActive New ds_transition to WaitQuorum","v":0,",time":""name":"2023-09-22T23:14:58.690561714Z""level,"":hostname"30:"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2128 "time":","{2023-09-22T23:14:58.690496112Ztime":"""cruciblemsg,":2023-09-22T23:14:58.690583951Z"",hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759"}
2129 ,"level":30{"msg":""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":[0] c72ddd63-a86d-4aaf-9e6d-66c5f8903115 (d9db3e91-27d7-4cb9-af2b-1cf3b9294be1) WaitQuorum New New ds_transition to Active4759,"}"
2130 "{"[1] 1c9731f3-b505-4ad4-b45b-f22ac14fff3b (51812711-0ce3-446b-bb9f-9ece34c97797) Active Faulted Active ds_transition to LiveRepairReady"msg":","v":0,"name":"crucible","level":30[1] a5c9346e-78da-47f3-a9ba-6cf4321f87bd (be7cf715-24aa-4419-8cc0-e5ddcc58d64d) Active Faulted Active ds_transition to LiveRepairReady","v":0,"timename":"":"crucible",,2023-09-22T23:14:58.690623768Z""level":,,"time":""2023-09-22T23:14:58.690693114Z"hostname,"":hostname":""ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,"}pid"
2131 :4759}{
2132 "msg":"[1] Transition from Faulted to LiveRepairReady{30"","msgv":":"0,"name":"[1] Transition from WaitActive to WaitQuorumcrucible""",,""level"v"v":::,"0time",:"302023-09-22T23:14:58.690729159Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2133 {"msg":"[1] Transition from Faulted to LiveRepairReady",",v"":time"0:,"""name"name:""2023-09-22T23:14:58.690748971Z"crucible",,""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":47590}
2134 ,"name":{"crucible"",msg":""level":30[1] 1c9731f3-b505-4ad4-b45b-f22ac14fff3b (51812711-0ce3-446b-bb9f-9ece34c97797) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"crucible","level":30level":30:"crucible","level":30,"time":"2023-09-22T23:14:58.690807432Z",,"",hostname""time":","time":"2023-09-22T23:14:58.690807696Z","2023-09-22T23:14:58.690815795Z"hostname":,""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid"ip-10-150-1-55.us-west-2.compute.internal:"4759,"pid"}:
2135 {"msg":"4759:"}
2136 [1] a5c9346e-78da-47f3-a9ba-6cf4321f87bd (be7cf715-24aa-4419-8cc0-e5ddcc58d64d) Active LiveRepairReady Active ds_transition to LiveRepair"{"msg":"time[1] b28e974a-c998-41d2-b8a0-fef8e732ae88 (8884c3fe-403a-4534-b3a2-b199a5a05d84) Active WaitQuorum New ds_transition to Active"",:""v":0,"2023-09-22T23:14:58.690800617Zname":""crucible","level":,30"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759},"
2137 ,v"":time"0:","name":"2023-09-22T23:14:58.690875331Z"crucible",,""hostnamelevel""{::"30ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",,""msg"pid"::"4759","pidtime"[0] Transition from WaitQuorum to Active"":","v":2023-09-22T23:14:58.690895389Z"0,,""name":"crucible","}level
2138 ":30{"msg":"[1] Transition from LiveRepairReady to LiveRepair","v":0,"name":"crucible","level":30hostname":","timeip-10-150-1-55.us-west-2.compute.internal"":","pid":,"2023-09-22T23:14:58.690920841Z"4759time"},
2139 :{"msg":"[1] Transition from LiveRepairReady to LiveRepair","v":0,"name4759":"crucible"},
2140 "level":30{"msg","time"":"hostname"2023-09-22T23:14:58.690992288Z":","hostname":"ip-10-150-1-55.us-west-2.compute.internal",ip-10-150-1-55.us-west-2.compute.internal""pid",:"pid"4759:4759}
2141 {}
2142 {"msg""msg":"[1] c72ddd63-a86d-4aaf-9e6d-66c5f8903115 (d9db3e91-27d7-4cb9-af2b-1cf3b9294be1) Active New New ds_transition to WaitActive","v"::"0,"name":"[1] Transition from WaitQuorum to Active"crucible",",level"":v":300,"name":"crucible","level":30:"Write to Extent 0:2:9 under repair","v":0,"name":"crucible","level":40,,""time":time",""time":":2023-09-22T23:14:58.691069933Z"2023-09-22T23:14:58.691082007Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2143 :"2023-09-22T23:14:58.690929802Z","hostname":"2023-09-22T23:14:58.69107416Z"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"}
2144 hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2145 {{"msg":"Write 1:0:9 past extent under repair 0","v":0,"name":"crucible","level":40,"""msgtime""::""2023-09-22T23:14:58.691175336Z",","hostnamehostname"[2] b28e974a-c998-41d2-b8a0-fef8e732ae88 (8884c3fe-403a-4534-b3a2-b199a5a05d84) Active Active New ds_transition to WaitActive":":"","ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"pid":4759"},"
2146 pid":4759}
2147 {{""msg"msg":":"[1] Transition from New to WaitActive"Write 1:1:9 past extent under repair 0",",v""v"::0,0","name":"crucible","namelevel"":":crucible"30,"level":40v":0,"name":"crucible",","level"time","time":"2023-09-22T23:14:58.691245693Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2148 :"2023-09-22T23:14:58.691246774Z","hostname"{:":ip-10-150-1-55.us-west-2.compute.internal","pid"30":4759msg":}"
2149 {[1] c72ddd63-a86d-4aaf-9e6d-66c5f8903115 (d9db3e91-27d7-4cb9-af2b-1cf3b9294be1) Active WaitActive New ds_transition to WaitQuorum"","msg"v":":0IO Write 1008 on eur 0 Added deps 1",,""v"name":":0crucible,""name,,""level:"":crucible","level":40,"time":"2023-09-22T23:14:58.691321989Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2150 30"time":"2023-09-22T23:14:58.691294406Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2151 ,"time":"2023-09-22T23:14:58.691352592Z"{,"hostname":""ip-10-150-1-55.us-west-2.compute.internal"msg":","pid":4759[2] Transition from New to WaitActive","}v
2152 ":0,"name":"crucible","{level":30"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30test live_repair::repair_test::test_live_repair_deps_repair_rspan_right ... ,ok"
2153 time":"2023-09-22T23:14:58.691399517Z",","hostnametime":":""2023-09-22T23:14:58.69141105Z"ip-10-150-1-55.us-west-2.compute.internal",,""hostname"pid":":4759ip-10-150-1-55.us-west-2.compute.internal}"
2154 ,"pid":4759}
2155 {"{msg":""msg":"[2] b28e974a-c998-41d2-b8a0-fef8e732ae88 (8884c3fe-403a-4534-b3a2-b199a5a05d84) Active Active WaitActive ds_transition to WaitQuorum","v":0,[1] c72ddd63-a86d-4aaf-9e6d-66c5f8903115 (d9db3e91-27d7-4cb9-af2b-1cf3b9294be1) Active WaitQuorum New ds_transition to Active""name":,""v"crucible":,"0level":,"30name":"crucible","level":30,"time":"2023-09-22T23:14:58.691473716Z",,""hostname"time":":"ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:14:58.69147679Z"",",pid"":hostname4759":"}
2156 ip-10-150-1-55.us-west-2.compute.internal","pid":4759{}
2157 "msg":"[2] Transition from WaitActive to WaitQuorum"{,"v":0,"name"":"msg":crucible"","level":30[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.691530115Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,4759"time"}:
2158 "2023-09-22T23:14:58.691536694Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2159 [2] b28e974a-c998-41d2-b8a0-fef8e732ae88 (8884c3fe-403a-4534-b3a2-b199a5a05d84) Active Active WaitQuorum ds_transition to Active","v":0,"{name":"crucible","level":"30msg":"[2] c72ddd63-a86d-4aaf-9e6d-66c5f8903115 (d9db3e91-27d7-4cb9-af2b-1cf3b9294be1) Active Active New ds_transition to WaitActive","test live_repair::repair_test::test_live_repair_deps_repair_wafter ... v",ok:
2160 "0time":",2023-09-22T23:14:58.691581094Z"","namehostname":":""crucible"ip-10-150-1-55.us-west-2.compute.internal",,""pid":level":475930}
2161 {"msg":"[2] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,,""time":"time":"2023-09-22T23:14:58.691628255Z","hostname":"2023-09-22T23:14:58.691617832Z"ip-10-150-1-55.us-west-2.compute.internal",,""pidhostname""::"4759}
2162 ip-10-150-1-55.us-west-2.compute.internal","{pid":"4759msg":"}
2163 b28e974a-c998-41d2-b8a0-fef8e732ae88 is now active with session: 8884c3fe-403a-4534-b3a2-b199a5a05d84","v":0,"name{":"crucible","level":"30msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.691665948Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2164 ,"{time":""2023-09-22T23:14:58.691674412Z"msg":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759[1] b28e974a-c998-41d2-b8a0-fef8e732ae88 (8884c3fe-403a-4534-b3a2-b199a5a05d84) Active Active Active ds_transition to Faulted",}"
2165 v":0,"name":"crucible"{,"level":30"msg":"[2] c72ddd63-a86d-4aaf-9e6d-66c5f8903115 (d9db3e91-27d7-4cb9-af2b-1cf3b9294be1) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.69171284Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2166 ,"time":"{2023-09-22T23:14:58.691726256Z",""hostnamemsg":"":"[1] Transition from Active to Faulted"ip-10-150-1-55.us-west-2.compute.internal,"","v"pid"::04759,"name":}"
2167 crucible","level":30{"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.691767922Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2168 ,"time":"2023-09-22T23:14:58.691779927Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2169 [1] b28e974a-c998-41d2-b8a0-fef8e732ae88 (8884c3fe-403a-4534-b3a2-b199a5a05d84) Active Faulted Active ds_transition to LiveRepairReady","v":0,"{name":"crucible",""levelmsg":"":30[2] c72ddd63-a86d-4aaf-9e6d-66c5f8903115 (d9db3e91-27d7-4cb9-af2b-1cf3b9294be1) Active Active WaitQuorum ds_transition to Active","v":0,"name":"crucible"{,"level":30"msg,""time":":"2023-09-22T23:14:58.691826985Z","hostname":"Crucible stats registered with UUID: 4755c4b6-38b2-4fc5-b920-a3b417f63d0d",ip-10-150-1-55.us-west-2.compute.internal"",v"","pid"::04759time,""name:""}:"
2170 2023-09-22T23:14:58.691837888Z"crucible",,""{hostname":"level""msgip-10-150-1-55.us-west-2.compute.internal""::"30,"pid":4759[1] Transition from Faulted to LiveRepairReady",}"
2171 v":0,"name":"crucible",{"level":30"msg":"[2] Transition from WaitQuorum to Active","v":0,",name"":time":""crucible"2023-09-22T23:14:58.691880887Z,"",time":"","2023-09-22T23:14:58.691895244Z"hostname",:""level"hostname":ip-10-150-1-55.us-west-2.compute.internal"":,30ip-10-150-1-55.us-west-2.compute.internal"",pid"":pid"4759:4759}}
2172 
2173 {,"time":"{"msg2023-09-22T23:14:58.691922917Z":"""msg",:""hostname":"Crucible 4755c4b6-38b2-4fc5-b920-a3b417f63d0d has session id: 89f34d5a-f22c-4d68-bcdd-5b31cf1d61b8","ip-10-150-1-55.us-west-2.compute.internal"v,""[1] b28e974a-c998-41d2-b8a0-fef8e732ae88 (8884c3fe-403a-4534-b3a2-b199a5a05d84) Active LiveRepairReady Active ds_transition to LiveRepair":,0pid":"4759,v"}:
2174 0","name"name":":{"crucible"crucible,""level",":"30level"msg"::"30c72ddd63-a86d-4aaf-9e6d-66c5f8903115 is now active with session: d9db3e91-27d7-4cb9-af2b-1cf3b9294be1","v":0,"name":"crucible","level":30,"time":",2023-09-22T23:14:58.69197879Z"","time":"hostname":"2023-09-22T23:14:58.691982245Z"ip-10-150-1-55.us-west-2.compute.internal,"","hostname,pid"":time4759":"}:"
2175 "2023-09-22T23:14:58.691992116Z","ip-10-150-1-55.us-west-2.compute.internal"hostname,"{pid""::4759""msg"}:
2176 "ip-10-150-1-55.us-west-2.compute.internal","pid":4759[1] Transition from LiveRepairReady to LiveRepair"{},
2177 ""v"msg:":0{","name":"crucible"",msg":""[0] 4755c4b6-38b2-4fc5-b920-a3b417f63d0d (29aa10dc-2c76-4f2c-8851-5916dfa1f48d) New New New ds_transition to WaitActive"level":,"30v":0[1] c72ddd63-a86d-4aaf-9e6d-66c5f8903115 (d9db3e91-27d7-4cb9-af2b-1cf3b9294be1) Active Active Active ds_transition to Faulted",",name":""cruciblev"":,0",level"":name":30"crucible",","time"level"::"302023-09-22T23:14:58.692060952Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2178 ,"time":"{2023-09-22T23:14:58.692074326Z","",hostname":""msg"time":ip-10-150-1-55.us-west-2.compute.internal"":,"2023-09-22T23:14:58.69208152Zpid""":,"4759Write to Extent 0:2:9 under repair"hostname},"
2179 ":v":"0,"{name"ip-10-150-1-55.us-west-2.compute.internal":,"""pid"msg"::crucible4759"",}"
2180 level"[0] Transition from New to WaitActive":40,"v"{:0,"name":""crucible"msg",:""level":30[1] Transition from Active to Faulted","v":0,"name":"crucible","level":,"30time":"2023-09-22T23:14:58.69214181Z"{,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","",pid"msg"::4759"}
2181 Crucible stats registered with UUID: 3bb7876f-b8c5-49a9-9144-66268618f386","time,":""v":2023-09-22T23:14:58.692158374Z"0,,""hostname":name":""crucibletime":"2023-09-22T23:14:58.692150972Z","hostname":"","levelip-10-150-1-55.us-west-2.compute.internal"":,"30pid":4759}
2182 {""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759[0] 4755c4b6-38b2-4fc5-b920-a3b417f63d0d (29aa10dc-2c76-4f2c-8851-5916dfa1f48d) WaitActive New New ds_transition to WaitQuorum"},,""v"time:":0","name":2023-09-22T23:14:58.692196077Z""crucible",","hostname"level:"":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2183 ,"time":"2023-09-22T23:14:58.692224723Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2184 Crucible 3bb7876f-b8c5-49a9-9144-66268618f386 has session id: 80597233-0197-410e-81fc-216b4e403404{"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.692264048Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",""pid":4759,"v"}:
2185 0,"name":"crucible{","level":"30msg":"
2186 [0] 4755c4b6-38b2-4fc5-b920-a3b417f63d0d (29aa10dc-2c76-4f2c-8851-5916dfa1f48d) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible",",time"{level""::"302023-09-22T23:14:58.692291638Z",""hostname":msg"","time":"2023-09-22T23:14:58.692306364Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2187 {"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2188 {,"time":""msg":"2023-09-22T23:14:58.692337929Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","[0] 3bb7876f-b8c5-49a9-9144-66268618f386 (910334ff-5f0d-4dec-a302-b396ca5c2b47) New New New ds_transition to WaitActivepid"":,4759"v":}
2189 0:,{""name"":msg"":"crucible","level":30[1] c72ddd63-a86d-4aaf-9e6d-66c5f8903115 (d9db3e91-27d7-4cb9-af2b-1cf3b9294be1) Active Faulted Active ds_transition to LiveRepairReady","v":0[1] 4755c4b6-38b2-4fc5-b920-a3b417f63d0d (29aa10dc-2c76-4f2c-8851-5916dfa1f48d) Active New New ds_transition to WaitActive",",name"":v":"0crucible,",,"level"":30time":"2023-09-22T23:14:58.692364834Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2190 {"name":""msgcrucible"":",,""[0] Transition from New to WaitActivetimelevel""",:"2023-09-22T23:14:58.692380017Z"test live_repair::repair_test::test_live_repair_deps_repair_write ... ,"okhostname
2191 ":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2192 {"v"":msg":"0,"name":"[1] Transition from Faulted to LiveRepairReady"crucible",",v":"0level",":name":"30crucible","level":30,"time":"2023-09-22T23:14:58.692443999Z","hostname":,":"time30":"ip-10-150-1-55.us-west-2.compute.internal",2023-09-22T23:14:58.69244648Z"","pid"hostname":,"time":"2023-09-22T23:14:58.692464536Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2193 {"msg":"4759[1] Transition from New to WaitActive"}
2194 {"msg":"[0] 3bb7876f-b8c5-49a9-9144-66268618f386 (910334ff-5f0d-4dec-a302-b396ca5c2b47) WaitActive New New ds_transition to WaitQuorum",,""v":v"0:,"0name":,""namecrucible"":",:crucible""ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2195 {"msg":"[1] c72ddd63-a86d-4aaf-9e6d-66c5f8903115 (d9db3e91-27d7-4cb9-af2b-1cf3b9294be1) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"crucible","level":30level":30,"time":"2023-09-22T23:14:58.69253704Z"","hostname":","level,""time:":"2023-09-22T23:14:58.692543734Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2196 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":3030,"time":"2023-09-22T23:14:58.69258263Z","hostname":","time":ip-10-150-1-55.us-west-2.compute.internal"","pid":47592023-09-22T23:14:58.692588Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2197 {"msg":"[1] 4755c4b6-38b2-4fc5-b920-a3b417f63d0d (29aa10dc-2c76-4f2c-8851-5916dfa1f48d) Active WaitActive New ds_transition to WaitQuorum","v":0,}"
2198 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2199 {"msg":"[1] Transition from LiveRepairReady to LiveRepair","v":0,"name":"crucible","level":{30"msg":"[0] 3bb7876f-b8c5-49a9-9144-66268618f386 (910334ff-5f0d-4dec-a302-b396ca5c2b47) WaitQuorum New New ds_transition to Active","v":0,"name":"namecrucible"",:""level":crucible"30,","time"level:"":302023-09-22T23:14:58.692658656Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2200 ,"time,"":"time":"2023-09-22T23:14:58.692671444Z"2023-09-22T23:14:58.692675683Z,"",hostname"":"hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internal"pid",:"4759pid":4759}
2201 {"msg":"[0] Transition from WaitQuorum to Active","v":0},"name
2202 ":"crucible","level"{:30"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.692714509Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2203 {"msg",:""time":"2023-09-22T23:14:58.692724301Z","hostname":"[1] 3bb7876f-b8c5-49a9-9144-66268618f386 (910334ff-5f0d-4dec-a302-b396ca5c2b47) Active New New ds_transition to WaitActive","v"ip-10-150-1-55.us-west-2.compute.internal:"0,","pid":name":4759"crucible"}
2204 ,"level":30{"msg":"[1] 4755c4b6-38b2-4fc5-b920-a3b417f63d0d (29aa10dc-2c76-4f2c-8851-5916dfa1f48d) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible",","level"time:":30"2023-09-22T23:14:58.692762904Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2205 ,"time":"{2023-09-22T23:14:58.692775806Z"",msg"":"hostname":"[1] Transition from New to WaitActive",ip-10-150-1-55.us-west-2.compute.internal""v,"":pid0":,"4759name":}"
2206 crucible","level"{:30"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.692798433Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"time}"
2207 {"msg":"[1] 3bb7876f-b8c5-49a9-9144-66268618f386 (910334ff-5f0d-4dec-a302-b396ca5c2b47) Active WaitActive New ds_transition to WaitQuorum":,"v"":0,"name"2023-09-22T23:14:58.692806853Z:""crucible",,""levelhostname""::"30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2208 {{,""time"msg:"":"2023-09-22T23:14:58.692835738Z",""hostnamemsg"":"[2] 4755c4b6-38b2-4fc5-b920-a3b417f63d0d (29aa10dc-2c76-4f2c-8851-5916dfa1f48d) Active Active New ds_transition to WaitActive:"","v":0,ip-10-150-1-55.us-west-2.compute.internal"","pid":4759}
2209 name{":""crucible"msg":","level":30[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30Crucible stats registered with UUID: af9c4a58-0d51-4892-b110-0ff415b3afef","v":0,"name":"crucible",",time",time""level":30:"2023-09-22T23:14:58.692879492Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2210 {,""time"msg:"":"test live_repair::repair_test::test_live_repair_deps_repair_wspan_left ... 2023-09-22T23:14:58.692893017Z",ok"hostname
2211 ":"[1] 3bb7876f-b8c5-49a9-9144-66268618f386 (910334ff-5f0d-4dec-a302-b396ca5c2b47) Active WaitQuorum New ds_transition to Activeip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2212 {"msg":"Crucible af9c4a58-0d51-4892-b110-0ff415b3afef has session id: 5d96c4af-b83f-49c3-b296-a00e3993f867"","v":0,,""name"v:"":crucible0",","namelevel":"30:"crucible","level":30":"2023-09-22T23:14:58.692874543Z",","time":hostname":"","timeip-10-150-1-55.us-west-2.compute.internal"",:"2023-09-22T23:14:58.692968105Z","hostname":""pid":4759ip-10-150-1-55.us-west-2.compute.internal","}pid
2213 ":4759}
2214 {2023-09-22T23:14:58.69296285Z{"""msg"msg:"":","hostname[2] Transition from New to WaitActive[1] Transition from WaitQuorum to Active""",":"vip-10-150-1-55.us-west-2.compute.internal",""pid"::47590,"name}"
2215 :"crucible","level":{30",msg"":"v":0,"name":"crucible","level"[0] af9c4a58-0d51-4892-b110-0ff415b3afef (90c473a8-fc29-4ea8-a43c-81300d27fad5) New New New ds_transition to WaitActive",,"v":0,""name"time":"2023-09-22T23:14:58.69302446Z":,"30hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2216 :"{crucible","",msg""level:"":30[2] 3bb7876f-b8c5-49a9-9144-66268618f386 (910334ff-5f0d-4dec-a302-b396ca5c2b47) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30time":"2023-09-22T23:14:58.693047994Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759},
2217 ","time":"time":{2023-09-22T23:14:58.693074672Z"",""msghostname""2023-09-22T23:14:58.693069119Z":,"hostname":""ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",","pid"pid"::4759}4759
2218 }
2219 {{""msg"msg":":"[2] Transition from New to WaitActive","v"[2] 4755c4b6-38b2-4fc5-b920-a3b417f63d0d (29aa10dc-2c76-4f2c-8851-5916dfa1f48d) Active Active WaitActive ds_transition to WaitQuorum:"0,",name"":v"":0crucible",","name":level"":crucible30","level":30:"[0] Transition from New to WaitActive","v":0,",",name"time"::""crucible","2023-09-22T23:14:58.69313506Zlevel":"30,"hostname":""timeip-10-150-1-55.us-west-2.compute.internal"":,""pid":47592023-09-22T23:14:58.693136881Z"}
2220 ,"time":"{2023-09-22T23:14:58.693157683Z"","msg":"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759[2] 3bb7876f-b8c5-49a9-9144-66268618f386 (910334ff-5f0d-4dec-a302-b396ca5c2b47) Active Active WaitActive ds_transition to WaitQuorum}"
2221 ,"v":0,"name":,"{crucible"msg":"","level":30[0] af9c4a58-0d51-4892-b110-0ff415b3afef (90c473a8-fc29-4ea8-a43c-81300d27fad5) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.693202225Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2222 {""msg"hostname":":"[2] Transition from WaitActive to WaitQuorum"ip-10-150-1-55.us-west-2.compute.internal","v,"":pid"0:,"4759name":"}
2223 crucible{"",msg"":level"":30[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.693211625Z",","hostname"time"::,"time"":"2023-09-22T23:14:58.69325047Z"2023-09-22T23:14:58.693257263Z",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759}}
2224 
2225 {"msg":""ip-10-150-1-55.us-west-2.compute.internal"[2] 3bb7876f-b8c5-49a9-9144-66268618f386 (910334ff-5f0d-4dec-a302-b396ca5c2b47) Active Active WaitQuorum ds_transition to Active,{"",pid"""msgv":"":0,"name":"crucible","level":30[2] 4755c4b6-38b2-4fc5-b920-a3b417f63d0d (29aa10dc-2c76-4f2c-8851-5916dfa1f48d) Active Active WaitQuorum ds_transition to Active","v":0,"name"::"crucible"4759,"level":}30
2226 ,"time":"2023-09-22T23:14:58.693307412Z","{hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal"",msg"":pid":4759}
2227 {"msg":"2023-09-22T23:14:58.693315903Z"[2] Transition from WaitQuorum to Active",,""v":hostname"0:","name":""ip-10-150-1-55.us-west-2.compute.internalcrucible"",","pid":level":475930[0] Transition from WaitActive to WaitQuorum"}
2228 ,"v":0,{"name":""crucible"msg":,","level":30time":"2023-09-22T23:14:58.693350915Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2229 {{"msg":","time":"""2023-09-22T23:14:58.69336383Z"3bb7876f-b8c5-49a9-9144-66268618f386 is now active with session: 910334ff-5f0d-4dec-a302-b396ca5c2b47"[2] Transition from WaitQuorum to Activemsg",","":"vv""::00Crucible stats registered with UUID: 6d860aa7-cf0b-47de-b0a7-b9d3a68235ed,",""v"name:":"0crucible",",,"name""levelhostname":":crucible"30,"level":30,"time":"2023-09-22T23:14:58.693413505Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2230 {"msg":"[1] 3bb7876f-b8c5-49a9-9144-66268618f386 (910334ff-5f0d-4dec-a302-b396ca5c2b47) Active Active Active ds_transition to Faulted","v",:"0time"":":,""2023-09-22T23:14:58.693416317Zname""ip-10-150-1-55.us-west-2.compute.internal":,"pid":4759},"
2231 crucible","level"{:30,""msg":"hostname":""name":ip-10-150-1-55.us-west-2.compute.internal""crucible","[0] af9c4a58-0d51-4892-b110-0ff415b3afef (90c473a8-fc29-4ea8-a43c-81300d27fad5) WaitQuorum New New ds_transition to Active",,"",time"":level":"30v"2023-09-22T23:14:58.693465122Z:"0,","hostname"name":pid":"4759,ip-10-150-1-55.us-west-2.compute.internal"",time"":}pid
2232 ":"crucible","level":{30"msg":"Crucible 6d860aa7-cf0b-47de-b0a7-b9d3a68235ed has session id: 55205457-cbd1-4d6f-81eb-71ee3333eada","v":0,"name":"crucible","level":30:4759,"}time"
2233 "{2023-09-22T23:14:58.693483309Z""msg,"":"hostname":"[1] Transition from Active to Faulted",:""ip-10-150-1-55.us-west-2.compute.internal,"2023-09-22T23:14:58.693509786Z"v":,0time","hostname":""name":"crucible"ip-10-150-1-55.us-west-2.compute.internal,"level":30"",",pid"":pid":47594759}}
2234 
2235 ,{"{Job is DownstairsIO { ds_id: JobId(1002), guest_id: 1, work: ExtentFlushClose { dependencies: [JobId(1000), JobId(1001)], extent: 1, flush_number: 2, gen_number: 4, source_downstairs: ClientId(1), repair_downstairs: [ClientId(0), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 1, block: 0 }, ImpactedAddr { extent_id: 1, block: 2 }) }
2236 :time"2023-09-22T23:14:58.693524992Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":"4759msg"}:
2237 "{""msg"msg":":"[0] Transition from WaitQuorum to Active","v":0,"[0] 6d860aa7-cf0b-47de-b0a7-b9d3a68235ed (758265c4-df86-4cfd-92d1-c4c7741a69e6) New New New ds_transition to WaitActivename":""crucible","level":,"30v"::0","name":"2023-09-22T23:14:58.69355967Zcrucible"","level":,30"hostname":""ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2238 4755c4b6-38b2-4fc5-b920-a3b417f63d0d is now active with session: 29aa10dc-2c76-4f2c-8851-5916dfa1f48d","time":","{v":2023-09-22T23:14:58.693705021Z"0","msg,"":name""hostname":",":"time"[1] 3bb7876f-b8c5-49a9-9144-66268618f386 (910334ff-5f0d-4dec-a302-b396ca5c2b47) Active Faulted Active ds_transition to LiveRepairReadycrucible"",ip-10-150-1-55.us-west-2.compute.internal":","",2023-09-22T23:14:58.693713361Zpid"v"level:"0"::,"304759name"}":
2239 ","crucible"hostname","{level":":"30msgip-10-150-1-55.us-west-2.compute.internal"":",","timepid":"[1] af9c4a58-0d51-4892-b110-0ff415b3afef (90c473a8-fc29-4ea8-a43c-81300d27fad5) Active New New ds_transition to WaitActive":,"4759",2023-09-22T23:14:58.69375028Z""time}v":
2240 0,"":"hostname","2023-09-22T23:14:58.693765185Z:"",{"namehostnameip-10-150-1-55.us-west-2.compute.internal"":,""""pid"msg:":ip-10-150-1-55.us-west-2.compute.internal"4759,"}pid
2241 "crucible":""{:,""level":msg4759":"}30
2242 [0] Transition from New to WaitActive",[1] 4755c4b6-38b2-4fc5-b920-a3b417f63d0d (29aa10dc-2c76-4f2c-8851-5916dfa1f48d) Active Active Active ds_transition to Faulted{"",""v",":timevmsg""::0","":name[1] Transition from Faulted to LiveRepairReady"":",crucible""v,"":"0level,"":name30"2023-09-22T23:14:58.693818056Z"0:","cruciblehostname","":name":,,"""timelevel"":"ip-10-150-1-55.us-west-2.compute.internal"":,"crucible"2023-09-22T23:14:58.693844996Z30,"pid,""level"hostname":"4759:,:""time}":ip-10-150-1-55.us-west-2.compute.internal""
2243 2023-09-22T23:14:58.693863963Z,""{30pid,"":"4759hostname"}:
2244 "msg":"{[1] Transition from New to WaitActive"ip-10-150-1-55.us-west-2.compute.internal,""",v":msg""pid:"":04759,"[1] Transition from Active to Faulted}"
2245 ,name":{,"""msgv"":"time":0","crucible"name":",":cruciblelevel":30""[1] 3bb7876f-b8c5-49a9-9144-66268618f386 (910334ff-5f0d-4dec-a302-b396ca5c2b47) Active LiveRepairReady Active ds_transition to LiveRepair,"",level""v:"30:2023-09-22T23:14:58.693884855Z"0,",,"name"time":"":"2023-09-22T23:14:58.693921071Z"hostname,"crucible,":""hostname",""timelevel""::"30ip-10-150-1-55.us-west-2.compute.internal":"2023-09-22T23:14:58.693925442Z",,""ip-10-150-1-55.us-west-2.compute.internal"pid","hostname,"pid"::""time"::47594759"ip-10-150-1-55.us-west-2.compute.internal"}2023-09-22T23:14:58.693946584Z,"",pid"":
2246 4759hostname"}:
2247 "}{
2248 {ip-10-150-1-55.us-west-2.compute.internal",{"""pid"":msg4759msg":"msg"}"
2249 :"[1] af9c4a58-0d51-4892-b110-0ff415b3afef (90c473a8-fc29-4ea8-a43c-81300d27fad5) Active WaitActive New ds_transition to WaitQuorum"{,":v":0[1] 4755c4b6-38b2-4fc5-b920-a3b417f63d0d (29aa10dc-2c76-4f2c-8851-5916dfa1f48d) Active Faulted Active ds_transition to LiveRepairReady,"",msg"""":v""name":":[1] Transition from LiveRepairReady to LiveRepair0",",name""v:"":crucible0crucible,"","name"":level""[0] 6d860aa7-cf0b-47de-b0a7-b9d3a68235ed (758265c4-df86-4cfd-92d1-c4c7741a69e6) WaitActive New New ds_transition to WaitQuorum","crucible:"30,level":"30,level":"30v":0,"name":",","crucible"time",time,"":"timelevel"""::"2023-09-22T23:14:58.694022685Z""2023-09-22T23:14:58.694025353Z",2023-09-22T23:14:58.694028441Z"":,hostname"",hostname:"":"30"hostnameip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"",pid""":":pid"4759:4759ip-10-150-1-55.us-west-2.compute.internal"}}
2250 
2251 ,"pid":4759}{
2252 "msg":"{"msg":",[1] Transition from Faulted to LiveRepairReady"[1] Transition from WaitActive to WaitQuorum","","time":"v":v"2023-09-22T23:14:58.694054778Z"0:,,"0hostname":",""name"ip-10-150-1-55.us-west-2.compute.internal"name:"",crucible:""crucible""pid",":,"level":4759level":3030}
2253 ,"time":"2023-09-22T23:14:58.694105568Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4759}
2254 ",msg"":time{"":""msg":"[0] Transition from WaitActive to WaitQuorum"2023-09-22T23:14:58.694106381Z","v":,0"[1] af9c4a58-0d51-4892-b110-0ff415b3afef (90c473a8-fc29-4ea8-a43c-81300d27fad5) Active WaitQuorum New ds_transition to Activehostname"":,""name":",crucible"ip-10-150-1-55.us-west-2.compute.internal""v,,""level":"0pid",":name4759"::}30"
2255 crucible","level":30,"time":"2023-09-22T23:14:58.694159252Z","hostname":","ip-10-150-1-55.us-west-2.compute.internaltime":"","pid":2023-09-22T23:14:58.694169329Z"4759,"hostname"}:
2256 "ip-10-150-1-55.us-west-2.compute.internal","pid":{4759}
2257 "{msg":"{"msg":""msg[0] 6d860aa7-cf0b-47de-b0a7-b9d3a68235ed (758265c4-df86-4cfd-92d1-c4c7741a69e6) WaitQuorum New New ds_transition to Active"":","v":0[1] Transition from WaitQuorum to Active","[1] 4755c4b6-38b2-4fc5-b920-a3b417f63d0d (29aa10dc-2c76-4f2c-8851-5916dfa1f48d) Active LiveRepairReady Active ds_transition to LiveRepair"name",,"":"vv":":00,"crucible"name,":,""cruciblelevel":"30","name"level:"":crucible"30,"level":30,"time":","2023-09-22T23:14:58.694239838Z"time":",",2023-09-22T23:14:58.694244594Z""hostname",time"::"""hostname":"2023-09-22T23:14:58.6942469Z"ip-10-150-1-55.us-west-2.compute.internal",ip-10-150-1-55.us-west-2.compute.internal"",,""pid"hostname"::4759pid"":}4759
2258 }ip-10-150-1-55.us-west-2.compute.internal"
2259 ,{"pid":{4759""}msgmsg"
2260 :"":"[0] Transition from WaitQuorum to Active"{,"v"[1] Transition from LiveRepairReady to LiveRepair""msg":,":0"v":,"0name,":"test live_repair::repair_test::test_live_repair_deps_super_spanner ... name"ok:"
2261 crucible"[2] af9c4a58-0d51-4892-b110-0ff415b3afef (90c473a8-fc29-4ea8-a43c-81300d27fad5) Active Active New ds_transition to WaitActive"",",cruciblev":"0level,"""name,""level":::3030"crucible","level":30,,""time":time":""2023-09-22T23:14:58.694355231Z",,"2023-09-22T23:14:58.694352756Z""hostname,time":"":""2023-09-22T23:14:58.694353451Z"hostname":ip-10-150-1-55.us-west-2.compute.internal",""hostname",:ip-10-150-1-55.us-west-2.compute.internal""",pid"ip-10-150-1-55.us-west-2.compute.internal""pid",:":4759pid"4759}:}4759
2262 }
2263 
2264 {"msg":{"[2] Transition from New to WaitActive",""msg"v":":0,"name":"crucible","level":30[1] 6d860aa7-cf0b-47de-b0a7-b9d3a68235ed (758265c4-df86-4cfd-92d1-c4c7741a69e6) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.694430191Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"time":"}
2265 2023-09-22T23:14:58.694437295Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal"",msg"":"pid":4759}
2266 [2] af9c4a58-0d51-4892-b110-0ff415b3afef (90c473a8-fc29-4ea8-a43c-81300d27fad5) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"{crucible","level":30"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.694481957Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2267 ,"time":"{2023-09-22T23:14:58.694490924Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","[2] Transition from WaitActive to WaitQuorum"pid",:"4759v":0},
2268 "name":"crucible","level":{30"msg":"[1] 6d860aa7-cf0b-47de-b0a7-b9d3a68235ed (758265c4-df86-4cfd-92d1-c4c7741a69e6) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level,"":time"30:"2023-09-22T23:14:58.694532488Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2269 {,""timemsg":"":"2023-09-22T23:14:58.694548786Z","hostname":"[2] af9c4a58-0d51-4892-b110-0ff415b3afef (90c473a8-fc29-4ea8-a43c-81300d27fad5) Active Active WaitQuorum ds_transition to Active"ip-10-150-1-55.us-west-2.compute.internal",,""pidv""::47590,"name":"}crucible
2270 ","level":30{"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.69458955Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2271 ,"{time":""2023-09-22T23:14:58.694602911Z"msg":","hostname":"[2] Transition from WaitQuorum to Active","v":ip-10-150-1-55.us-west-2.compute.internal"0,",pid""name":":crucible4759","level"}:
2272 30test live_repair::repair_test::test_live_repair_deps_repair_wspan_right ... {ok
2273 ","msg":time"":"2023-09-22T23:14:58.694640919Z","hostname":"[1] 6d860aa7-cf0b-47de-b0a7-b9d3a68235ed (758265c4-df86-4cfd-92d1-c4c7741a69e6) Active WaitQuorum New ds_transition to Active",ip-10-150-1-55.us-west-2.compute.internal"","v"pid:":04759,"name":"}
2274 crucible","level":{30"msg":"af9c4a58-0d51-4892-b110-0ff415b3afef is now active with session: 90c473a8-fc29-4ea8-a43c-81300d27fad5","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.694682492Z","hostname":","time"ip-10-150-1-55.us-west-2.compute.internal":,""pid":2023-09-22T23:14:58.694693995Z"4759,"hostname":"}
2275 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2276 {{"msg":""msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible"[1] af9c4a58-0d51-4892-b110-0ff415b3afef (90c473a8-fc29-4ea8-a43c-81300d27fad5) Active Active Active ds_transition to Faulted",",level"":v30":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.694741088Z",","time"hostname":":"2023-09-22T23:14:58.694745545Zip-10-150-1-55.us-west-2.compute.internal"",","pid":hostname4759":"}
2277 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}{
2278 "msg":"{"msg"[2] 6d860aa7-cf0b-47de-b0a7-b9d3a68235ed (758265c4-df86-4cfd-92d1-c4c7741a69e6) Active Active New ds_transition to WaitActive":","v":[1] Transition from Active to Faulted0",",name":""crucible"v,"":level"0:,"30name":"crucible","level":30{,"time":",""2023-09-22T23:14:58.694805036Zmsg"time":":""2023-09-22T23:14:58.694808073Z",",hostname":""Crucible stats registered with UUID: c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab"hostname":"ip-10-150-1-55.us-west-2.compute.internal",,ip-10-150-1-55.us-west-2.compute.internal"","pid""pid"::47594759v":}}
2279 0
2280 {,"name":"crucible"{,""level"msg""msg":::30""[2] Transition from New to WaitActive","v":0,"name":"[1] af9c4a58-0d51-4892-b110-0ff415b3afef (90c473a8-fc29-4ea8-a43c-81300d27fad5) Active Faulted Active ds_transition to LiveRepairReady"crucible",,""level"v:":300,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.694867888Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,4759",time":""}time
2281 ":2023-09-22T23:14:58.694880771Z"","{hostname"2023-09-22T23:14:58.694885375Z":"",msg"":"ip-10-150-1-55.us-west-2.compute.internal"hostname",:""pidCrucible c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab has session id: 62b5a2a7-dfe7-4c3b-a240-ba1a85e4e77f""ip-10-150-1-55.us-west-2.compute.internal,"":,v4759"":pid"0}:,4759"
2282 }name"
2283 :"crucible","level"{:30"msg":"[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.694950615Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid",":time4759":"}
2284 2023-09-22T23:14:58.69496082Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759"msg":}"
2285 [0] c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab (10aa236c-40fd-46b5-be15-bb668c9fdf95) New New New ds_transition to WaitActive","v":{0,"name":"crucible""msg":","Job is DownstairsIO { ds_id: JobId(1002), guest_id: 1, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001)], extent: 1 }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 1, block: 0 }, ImpactedAddr { extent_id: 1, block: 2 }) }
2286 {[1] af9c4a58-0d51-4892-b110-0ff415b3afef (90c473a8-fc29-4ea8-a43c-81300d27fad5) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"crucible"","msglevel""::"30Crucible stats registered with UUID: 82297280-9384-4cb6-a5ef-2a47addce4c7","v":0,"name":"crucible","level":30{"msg":","time":"2023-09-22T23:14:58.695073169Z","hostname":[2] 6d860aa7-cf0b-47de-b0a7-b9d3a68235ed (758265c4-df86-4cfd-92d1-c4c7741a69e6) Active Active WaitActive ds_transition to WaitQuorum","v":"0,,"ip-10-150-1-55.us-west-2.compute.internaltime":"2023-09-22T23:14:58.695083195Z","level"hostname","::30"ip-10-150-1-55.us-west-2.compute.internal"","pid"name":":"4759pid"crucible}:"4759
2287 {,"time":"2023-09-22T23:14:58.695113289Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2288 Job is DownstairsIO { ds_id: JobId(1003), guest_id: 1, work: ExtentLiveNoOp { dependencies: [JobId(1001), JobId(1002)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 1, block: 0 }, ImpactedAddr { extent_id: 1, block: 2 }) }
2289 "msg,":"Crucible 82297280-9384-4cb6-a5ef-2a47addce4c7 has session id: aad1ab20-eb49-4087-bb03-986281672803","v":0},"name"
2290 :"crucible","level":30{"msg":"[1] Transition from LiveRepairReady to LiveRepair","v":0,"name":"crucible","level":30"level":30,"time":"2023-09-22T23:14:58.695315537Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2291 ,"time":"2023-09-22T23:14:58.695325255Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":"4759msg,"}
2292 time":"2023-09-22T23:14:58.695330627Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2293 ":"{{[0] 82297280-9384-4cb6-a5ef-2a47addce4c7 (b2023c24-e7e0-43e8-ac1f-929ca1abdea3) New New New ds_transition to WaitActive"",msg":""v":"0[2] Transition from WaitActive to WaitQuorummsg","":,"name":""[0] Transition from New to WaitActive"v"crucible,"":,"0level"v":,:"30name":"0crucible",","name"level":":crucible"30,"level":,"30time":"2023-09-22T23:14:58.695404181Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2294 {,"time":"","msg":time":""2023-09-22T23:14:58.695412649Z"[0] Transition from New to WaitActive",",hostname"":"v"2023-09-22T23:14:58.695415897Z":ip-10-150-1-55.us-west-2.compute.internal,"","hostname0","pid":":name":"4759crucible"ip-10-150-1-55.us-west-2.compute.internal,""}level":
2295 ,30"pid":4759}{
2296 ","msgtime":"":{2023-09-22T23:14:58.695467951Z"","hostname":""msg"ip-10-150-1-55.us-west-2.compute.internal":,""[2] 6d860aa7-cf0b-47de-b0a7-b9d3a68235ed (758265c4-df86-4cfd-92d1-c4c7741a69e6) Active Active WaitQuorum ds_transition to Active"pid":4759,"}v
2297 "[0] c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab (10aa236c-40fd-46b5-be15-bb668c9fdf95) WaitActive New New ds_transition to WaitQuorum":{0,",""v"name":"msg":"crucible":,"0level,""[0] 82297280-9384-4cb6-a5ef-2a47addce4c7 (b2023c24-e7e0-43e8-ac1f-929ca1abdea3) WaitActive New New ds_transition to WaitQuorum":name",:"30crucible""v":,0,""name":"level":crucible"30,"level":30,"time":"2023-09-22T23:14:58.695521901Z","hostname":","ip-10-150-1-55.us-west-2.compute.internaltime"":","pid":47592023-09-22T23:14:58.695514151Z",",time"}"
2298 :"hostname":"{2023-09-22T23:14:58.695521314Z"ip-10-150-1-55.us-west-2.compute.internal",""hostname",msg":"":pid"[0] Transition from WaitActive to WaitQuorum"",:"4759ip-10-150-1-55.us-west-2.compute.internal"v"},
2299 ":pid"0:,4759"{name"}:"
2300 crucible"","msg"level"{::30""msg":"[2] Transition from WaitQuorum to Active","v":[0] Transition from WaitActive to WaitQuorum0",",name":""cruciblev",:"0",",name":"time":""level"crucible2023-09-22T23:14:58.695583376Z:""30,test live_repair::repair_test::test_live_repair_deps_writes ... ,"okhostname"
2301 :""level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2302 ,"time":"{2023-09-22T23:14:58.695609357Z",""hostname"msg,":":""time":"2023-09-22T23:14:58.695623867Zip-10-150-1-55.us-west-2.compute.internal"","[0] 82297280-9384-4cb6-a5ef-2a47addce4c7 (b2023c24-e7e0-43e8-ac1f-929ca1abdea3) WaitQuorum New New ds_transition to Activepid",",""hostname":"v"::ip-10-150-1-55.us-west-2.compute.internal"04759,","name}":pid":
2303 "4759crucible"},
2304 {"level":30{"msg":""msg":"6d860aa7-cf0b-47de-b0a7-b9d3a68235ed is now active with session: 758265c4-df86-4cfd-92d1-c4c7741a69e6","v":[0] c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab (10aa236c-40fd-46b5-be15-bb668c9fdf95) WaitQuorum New New ds_transition to Active"0,,""name":v"",crucible":,""0time,level":"""name"2023-09-22T23:14:58.6956737Z:":"30crucible",","hostname"level:"":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2305 {"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":",crucible"",,time"""timelevel":":":30"2023-09-22T23:14:58.6957073Z",2023-09-22T23:14:58.695702253Z""hostname":","hostname":"ip-10-150-1-55.us-west-2.compute.internal,""time",ip-10-150-1-55.us-west-2.compute.internal:"""2023-09-22T23:14:58.695729676Z",,""pidpid":"hostname:4759":"4759}ip-10-150-1-55.us-west-2.compute.internal"}
2306 {
2307 {"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30"msg":"[1] 6d860aa7-cf0b-47de-b0a7-b9d3a68235ed (758265c4-df86-4cfd-92d1-c4c7741a69e6) Active Active Active ds_transition to Faulted,"time":"2023-09-22T23:14:58.695829594Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2308 {""msg":","v":0,"name":"crucible",[1] c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab (10aa236c-40fd-46b5-be15-bb668c9fdf95) Active New New ds_transition to WaitActive"","levelv":"0:,"30name":"crucible","level":30,"time":","2023-09-22T23:14:58.695880801Z"time":",",hostname""2023-09-22T23:14:58.695877633Z"pid"::,4759""}hostname
2309 ":"ip-10-150-1-55.us-west-2.compute.internal","pid":{4759ip-10-150-1-55.us-west-2.compute.internal"},
2310 ""pid":msg{4759":""}msg"
2311 [1] 82297280-9384-4cb6-a5ef-2a47addce4c7 (b2023c24-e7e0-43e8-ac1f-929ca1abdea3) Active New New ds_transition to WaitActive":","v":0[1] Transition from New to WaitActive,"",name":""{v":crucible"0,""level"msg:,30"":"name":"crucible"[1] Transition from Active to Faulted",",level":"30v":0,"name":","crucible"time",:""level"2023-09-22T23:14:58.695950915Z":,"30hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2312 ,"time":"{"2023-09-22T23:14:58.695959448Z"msg":","hostname":"[1] Transition from New to WaitActive","v":0,"ip-10-150-1-55.us-west-2.compute.internal"name",:""crucible"pid",",":level":30time"4759:"}
2313 2023-09-22T23:14:58.695969761Z","time":","hostname2023-09-22T23:14:58.695992885Z"{,""hostname":""msg":ip-10-150-1-55.us-west-2.compute.internal":",""pid":4759}
2314 [1] c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab (10aa236c-40fd-46b5-be15-bb668c9fdf95) Active WaitActive New ds_transition to WaitQuorum"ip-10-150-1-55.us-west-2.compute.internal"{,","vmsg":"":"0pid",:"4759name":"[1] 82297280-9384-4cb6-a5ef-2a47addce4c7 (b2023c24-e7e0-43e8-ac1f-929ca1abdea3) Active WaitActive New ds_transition to WaitQuorumcrucible"}"
2315 ,{",v":"0"level"msg,"{::"30name"":""crucible"msg":Crucible stats registered with UUID: 0df2f9b9-49ac-4d4a-a922-07b39a2dd1e6,""",level""v:":300,"name":"crucible","level":[1] 6d860aa7-cf0b-47de-b0a7-b9d3a68235ed (758265c4-df86-4cfd-92d1-c4c7741a69e6) Active Faulted Active ds_transition to LiveRepairReady30","v":0,"name":"crucible",,",""time"time":"level":2023-09-22T23:14:58.696055584Z"",","hostname"2023-09-22T23:14:58.696048676Z":time",":ip-10-150-1-55.us-west-2.compute.internal:"","302023-09-22T23:14:58.696060829Zpid"":,4759""hostname}"
2316 :"hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid"ip-10-150-1-55.us-west-2.compute.internal"msg""::4759","}pid
2317 [1] Transition from WaitActive to WaitQuorum"",:{,"""vmsg""::4759"0time"},
2318 :"Crucible 0df2f9b9-49ac-4d4a-a922-07b39a2dd1e6 has session id: e9bc9f9b-c734-4b03-90fb-6c8b80f7d91c""name",:"2023-09-22T23:14:58.696086447Z"{v"":,""0crucible,""msgname"",:""level"crucible:"30,":level":"30hostname":"[1] Transition from WaitActive to WaitQuorum","v"ip-10-150-1-55.us-west-2.compute.internal":,0","pid"name",,"":timetime""::"":crucible"2023-09-22T23:14:58.696137453Z""4759,2023-09-22T23:14:58.696141991Z"},
2319 ""level":,30"hostname"{:"ip-10-150-1-55.us-west-2.compute.internal",""pid":msg4759":"}
2320 [1] Transition from Faulted to LiveRepairReady","v"{,":time"0:,""hostnamemsg""::""name":""ip-10-150-1-55.us-west-2.compute.internal"crucible,2023-09-22T23:14:58.696171883Z""[0] 0df2f9b9-49ac-4d4a-a922-07b39a2dd1e6 (443b4f09-6d76-4302-8b47-0237fa76d564) New New New ds_transition to WaitActive""pid",:,,"4759"}v
2321 "level":"{:hostname""0msg,"":"name"::""30crucible"[1] 82297280-9384-4cb6-a5ef-2a47addce4c7 (b2023c24-e7e0-43e8-ac1f-929ca1abdea3) Active WaitQuorum New ds_transition to Active",",level""v:"30:Job is DownstairsIO { ds_id: JobId(1003), guest_id: 1, work: ExtentLiveRepair { dependencies: [JobId(1001), JobId(1002)], extent: 1, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 1, block: 0 }, ImpactedAddr { extent_id: 1, block: 2 }) }
2322 ,",time":"2023-09-22T23:14:58.696222159Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":"4759time0":},""
2323 name2023-09-22T23:14:58.696226577Z"":"crucible",","hostnameip-10-150-1-55.us-west-2.compute.internallevel"{""::","30pid":"ip-10-150-1-55.us-west-2.compute.internalmsg""4759:",}"
2324 pid"[1] 6d860aa7-cf0b-47de-b0a7-b9d3a68235ed (758265c4-df86-4cfd-92d1-c4c7741a69e6) Active LiveRepairReady Active ds_transition to LiveRepair":,"4759,time""}{v"
2325 ::"0msg","{:""2023-09-22T23:14:58.696376677Z""name":,msg""hostname:"":"[1] c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab (10aa236c-40fd-46b5-be15-bb668c9fdf95) Active WaitQuorum New ds_transition to Active[0] Transition from New to WaitActive"""crucible"ip-10-150-1-55.us-west-2.compute.internal,"",v""pid:"0:,4759",}"
2326 ,v"name{":"":msgcrucible0"",:""level",":name[1] Transition from WaitQuorum to Active30""":"level",:crucible"30","v"level"::300,","time"name:"":"crucible",2023-09-22T23:14:58.696441599Z""level,"":30hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2327 {,"time"":msg"",",2023-09-22T23:14:58.69645717Z:"time"time""",::"""2023-09-22T23:14:58.696452852Z"hostname":[0] 0df2f9b9-49ac-4d4a-a922-07b39a2dd1e6 (443b4f09-6d76-4302-8b47-0237fa76d564) WaitActive New New ds_transition to WaitQuorum"",",2023-09-22T23:14:58.696448653Z"hostname":,ip-10-150-1-55.us-west-2.compute.internal""v,"":""hostname":0pid,":"4759name"}:
2328 "ip-10-150-1-55.us-west-2.compute.internal"{,""cruciblemsg"":,"""level"pid"::475930[2] 82297280-9384-4cb6-a5ef-2a47addce4c7 (b2023c24-e7e0-43e8-ac1f-929ca1abdea3) Active Active New ds_transition to WaitActiveip-10-150-1-55.us-west-2.compute.internal""},
2329 ,""{v":",msg0",time""name:"":"":"crucible2023-09-22T23:14:58.696507891Z"",,""[1] Transition from WaitQuorum to Active"hostnamelevel""::"30pid",":v":04759,"ip-10-150-1-55.us-west-2.compute.internal"},name":"
2330 crucible"",pid""time:"4759:,}"
2331 2023-09-22T23:14:58.696532891Z"",{"levelhostname""{:msg"""::""ip-10-150-1-55.us-west-2.compute.internal30"[0] Transition from WaitActive to WaitQuorum,"",pid""v:"4759:msg"}0
2332 ,,":{"time""namemsg""::""crucible"":"[2] Transition from New to WaitActive,"","levelv""::300[1] Transition from LiveRepairReady to LiveRepair"2023-09-22T23:14:58.696566659Z",,"","v"hostname",name"":":time""ip-10-150-1-55.us-west-2.compute.internal",":crucible"":,2023-09-22T23:14:58.696590366Z""0,level"",hostname:"30:""pid":name"4759:ip-10-150-1-55.us-west-2.compute.internal"},""pid,"":time4759":}
2333 
2334 "{crucible"{",msg":"""2023-09-22T23:14:58.696615146Zmsg"":,""hostname"level"::"[2] c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab (10aa236c-40fd-46b5-be15-bb668c9fdf95) Active Active New ds_transition to WaitActive"30,"ip-10-150-1-55.us-west-2.compute.internal[0] 0df2f9b9-49ac-4d4a-a922-07b39a2dd1e6 (443b4f09-6d76-4302-8b47-0237fa76d564) WaitQuorum New New ds_transition to Active"",",v"pid""v:"4759::}0
2335 ,"0,"name{":""name":"msgcrucible"":,""crucible"level":,",30level":[2] 82297280-9384-4cb6-a5ef-2a47addce4c7 (b2023c24-e7e0-43e8-ac1f-929ca1abdea3) Active Active WaitActive ds_transition to WaitQuorum30""time",:""v":0,2023-09-22T23:14:58.696647482Z",,,""time"hostname":"":nametime""::""""crucible2023-09-22T23:14:58.696669662Z"","ip-10-150-1-55.us-west-2.compute.internal"level,"":,302023-09-22T23:14:58.696671851Z"hostname"",pid":"hostname":":,"4759ip-10-150-1-55.us-west-2.compute.internal""ip-10-150-1-55.us-west-2.compute.internaltime"",:""pid"}:2023-09-22T23:14:58.696697924Z,""4759,"}pid"hostname
2336 ":
2337 {:4759""}
2338 ip-10-150-1-55.us-west-2.compute.internalmsg"":{",""pid"[0] Transition from WaitQuorum to Active:"4759msg"},
2339 ":"v{":0[2] Transition from New to WaitActive"",msg""name:"":","vcrucible"[2] Transition from WaitActive to WaitQuorum,"",":0level""v:"30:,0",name":""crucible"name",":level":,""crucibletime"",":level"":30302023-09-22T23:14:58.696754892Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"}time"
2340 ,":"time{":"2023-09-22T23:14:58.696768481Z",""2023-09-22T23:14:58.696768949Zmsg"",:""hostname":"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2341 {"msg"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2342 {"msg":"[2] 82297280-9384-4cb6-a5ef-2a47addce4c7 (b2023c24-e7e0-43e8-ac1f-929ca1abdea3) Active Active WaitQuorum ds_transition to Active","v":0,"name"::""crucible","level":30[2] c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab (10aa236c-40fd-46b5-be15-bb668c9fdf95) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.696848906Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,4759"time":"}
2343 2023-09-22T23:14:58.696857251Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal",""pid":msg"4759:}
23442023-09-22T23:14:58.696ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
2345 {"msg":""[2] Transition from WaitQuorum to Active","v":0,"name":"crucible"[2] c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab (10aa236c-40fd-46b5-be15-bb668c9fdf95) Active Active WaitQuorum ds_transition to Active,""level":,30"v":0,"name":"crucible","level":30[1] 0df2f9b9-49ac-4d4a-a922-07b39a2dd1e6 (443b4f09-6d76-4302-8b47-0237fa76d564) Active New New ds_transition to WaitActive,""time":","2023-09-22T23:14:58.696918446Zv"":,0","hostname"name:"":"crucible","ip-10-150-1-55.us-west-2.compute.internal"level",:"30,"pid"time:"4759:"}
2346 2023-09-22T23:14:58.696926301Z","hostname":"{,"time":""ip-10-150-1-55.us-west-2.compute.internal"msg2023-09-22T23:14:58.696940506Z"":",,""pid":hostname"test live_repair::repair_test::test_live_repair_enqueue_close ... 82297280-9384-4cb6-a5ef-2a47addce4c7 is now active with session: b2023c24-e7e0-43e8-ac1f-929ca1abdea3:""4759,ip-10-150-1-55.us-west-2.compute.internal"",v""pid:"0:,4759}ok
2347 }"
2348 name":"{crucible{"
2349 ",msg""level:"":"30[1] Transition from New to WaitActive",msg"":"v":0,"[2] Transition from WaitQuorum to Active"name,"":,""time"crucible":,""v":level02023-09-22T23:14:58.696996698Z",":"30name":",crucible"","level":hostname"30:","ip-10-150-1-55.us-west-2.compute.internaltime"":,""pid":47592023-09-22T23:14:58.697013145Z"}
2350 ,","time":{hostname""":"msg":"2023-09-22T23:14:58.697019081Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internal","pid[1] 82297280-9384-4cb6-a5ef-2a47addce4c7 (b2023c24-e7e0-43e8-ac1f-929ca1abdea3) Active Active Active ds_transition to Faulted"":,4759"pid"}v
2351 "::0{,4759""name":msg""}crucible:"
2352 ,""level":30{"msg":"[1] 0df2f9b9-49ac-4d4a-a922-07b39a2dd1e6 (443b4f09-6d76-4302-8b47-0237fa76d564) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab is now active with session: 10aa236c-40fd-46b5-be15-bb668c9fdf95"crucible",,"",level"":time30"v":":0,"2023-09-22T23:14:58.697055949Z"name",":"hostname":"crucible",,"ip-10-150-1-55.us-west-2.compute.internal"level":"30,time"":pid"":4759}2023-09-22T23:14:58.697069019Z
2353 ","hostname"{:""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759[1] Transition from Active to Faulted"},
2354 "v":0,{,"""name"time:msg"":crucible""":",[1] Transition from WaitActive to WaitQuorum""level,""2023-09-22T23:14:58.697082294Z:v30":"0,","name"hostname":":"crucible","ip-10-150-1-55.us-west-2.compute.internallevel",:"30"time,":""pid":47592023-09-22T23:14:58.697106177Z","}
2355 hostname,"":"time":"ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:14:58.697115777Z"",",{"pid":hostname"4759:"}
2356 ip-10-150-1-55.us-west-2.compute.internal"",{"msg":""pid"msg:":4759"}
2357 [1] c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab (10aa236c-40fd-46b5-be15-bb668c9fdf95) Active Active Active ds_transition to Faulted"{,"[1] 82297280-9384-4cb6-a5ef-2a47addce4c7 (b2023c24-e7e0-43e8-ac1f-929ca1abdea3) Active Faulted Active ds_transition to LiveRepairReady""msg",:""v"v"::00,","name[1] 0df2f9b9-49ac-4d4a-a922-07b39a2dd1e6 (443b4f09-6d76-4302-8b47-0237fa76d564) Active WaitQuorum New ds_transition to Activename":""":,""cruciblev"",:"0level,"":crucible"name30":,""level":crucible30,"time":"2023-09-22T23:14:58.697163236Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2358 ",","time":"level{":302023-09-22T23:14:58.697167606Z"",msg":""hostname":"[1] Transition from Faulted to LiveRepairReady"ip-10-150-1-55.us-west-2.compute.internal",","v"pid":,0",:time""name:"":"4759crucible2023-09-22T23:14:58.697193362Z"","}level,"":
2359 hostname30":"ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4759}"
2360 ,"msg":{time":"""msg"2023-09-22T23:14:58.697219028Z:"","[1] Transition from Active to Faulted"hostname"[1] Transition from WaitQuorum to Active,"":",v"":vip-10-150-1-55.us-west-2.compute.internal"":,0",0pid","name:"4759:"}
2361 crucible"",{"name":level""msg:"30:""crucible","level":30[1] 82297280-9384-4cb6-a5ef-2a47addce4c7 (b2023c24-e7e0-43e8-ac1f-929ca1abdea3) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,,""timename""::""crucible","2023-09-22T23:14:58.697255148Z"level":,30"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2362 ,"time"{,":""time"msg:"":"2023-09-22T23:14:58.697259369Z"2023-09-22T23:14:58.697268363Z",","hostname":hostname"[2] 0df2f9b9-49ac-4d4a-a922-07b39a2dd1e6 (443b4f09-6d76-4302-8b47-0237fa76d564) Active Active New ds_transition to WaitActive"":","v":ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal0",,""namepid""::"4759,crucible}"
2363 ,""level{"pid"":msg30"::"4759}
2364 [1] Transition from LiveRepairReady to LiveRepair","v":,0","time"name:"":{"2023-09-22T23:14:58.697302642Z"crucible",","hostnamelevel""::30""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2365 [1] c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab (10aa236c-40fd-46b5-be15-bb668c9fdf95) Active Faulted Active ds_transition to LiveRepairReady",{","timemsg":"":""v":2023-09-22T23:14:58.697318501Z[2] Transition from New to WaitActive"",0,"",vhostname""::0",""name"nameip-10-150-1-55.us-west-2.compute.internal:"",":"crucible""pid,"":crucible"4759level"}:
2366 30,"level{":30"msg":"No repair needed for extent 1","v":0,"name":",crucible""time,"":"level":302023-09-22T23:14:58.69735316Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2367 ,","{time"time"":msg"":":2023-09-22T23:14:58.697364546Z"","hostname"2023-09-22T23:14:58.6973564Z":","[2] 0df2f9b9-49ac-4d4a-a922-07b39a2dd1e6 (443b4f09-6d76-4302-8b47-0237fa76d564) Active Active WaitActive ds_transition to WaitQuorumhostname""ip-10-150-1-55.us-west-2.compute.internal",,""v"pid:"0:,4759":",name"":"":"crucible"downstairs,""}level
2368 ":ip-10-150-1-55.us-west-2.compute.internal"30,"pid":4759}
2369 ,"time":"{2023-09-22T23:14:58.697399747Z",""hostnamemsg":"":"[1] Transition from Faulted to LiveRepairReady"ip-10-150-1-55.us-west-2.compute.internal",,""v":pid"0:,4759"name}
2370 ":"crucible","{level":30"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30{","msgtime":"":"2023-09-22T23:14:58.697433873Z",",hostname"Crucible stats registered with UUID: 0ee60585-2ce5-46cc-99f9-95940127d893"":","time":"v"ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:14:58.697442867Z":"0,,"",hostname":pid"":"4759name"ip-10-150-1-55.us-west-2.compute.internal"},:
2371 ""pid"crucible":{,4759"level"}:"
2372 msg"30:"{"msg":"[1] c5b1fe31-ed78-40c7-a076-a3fdfc0cb3ab (10aa236c-40fd-46b5-be15-bb668c9fdf95) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"[2] 0df2f9b9-49ac-4d4a-a922-07b39a2dd1e6 (443b4f09-6d76-4302-8b47-0237fa76d564) Active Active WaitQuorum ds_transition to Active"crucible",",v":"0level",":name":"30crucible","level":30,"time":"2023-09-22T23:14:58.697496383Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"time}"
2373 :,""time":"2023-09-22T23:14:58.697514676Z"{,2023-09-22T23:14:58.697517133Z""hostname":",""msg"ip-10-150-1-55.us-west-2.compute.internal"hostname:":"","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,"pid"}Crucible 0ee60585-2ce5-46cc-99f9-95940127d893 has session id: 3bbe3d15-6556-4aef-8fef-434df4baecd5
2374 :"4759,"{v":}0
2375 ,""name"msg":{:""crucible"","msg[1] Transition from LiveRepairReady to LiveRepair"":level,"""v"[2] Transition from WaitQuorum to Active"::,"0v,30"test live_repair::repair_test::test_live_repair_enqueue_repair_noop ... ":ok0,
2376 "name":"name":crucible"","crucible"level":,30",level"":time":"302023-09-22T23:14:58.697589777Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2377 ,","time":time"":"{2023-09-22T23:14:58.697608189Z"2023-09-22T23:14:58.697613189Z","hostname":",""msghostname":"ip-10-150-1-55.us-west-2.compute.internal"":"ip-10-150-1-55.us-west-2.compute.internal",,""pid":pid"[0] 0ee60585-2ce5-46cc-99f9-95940127d893 (9ee1ca43-1cdd-48fb-af14-ac82b4ae106d) New New New ds_transition to WaitActive"4759:,"}4759
2378 v"}:
2379 {0,"name":"crucible"",msg":""level":300df2f9b9-49ac-4d4a-a922-07b39a2dd1e6 is now active with session: 443b4f09-6d76-4302-8b47-0237fa76d564","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.69767551Z","hostname":","ip-10-150-1-55.us-west-2.compute.internaltime"",":"pid":47592023-09-22T23:14:58.697684279Z","}hostname":"
2380 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2381 {{"msg":""msg":[0] Transition from New to WaitActive"","v":0,"name":"crucible","level":[1] 0df2f9b9-49ac-4d4a-a922-07b39a2dd1e6 (443b4f09-6d76-4302-8b47-0237fa76d564) Active Active Active ds_transition to Faulted"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.697735938Z",","time":hostname":""2023-09-22T23:14:58.697741246Z"ip-10-150-1-55.us-west-2.compute.internal,""hostname":","pid":4759ip-10-150-1-55.us-west-2.compute.internal","}pid"
2382 :4759}
2383 {{"msg":""msg":"[1] Transition from Active to Faulted","v":[0] 0ee60585-2ce5-46cc-99f9-95940127d893 (9ee1ca43-1cdd-48fb-af14-ac82b4ae106d) WaitActive New New ds_transition to WaitQuorum"0,,""vname""::"0crucible",","namelevel":"30:"crucible","level":30,"time":"2023-09-22T23:14:58.697801879Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","pid":2023-09-22T23:14:58.697805276Z"4759,"}
2384 hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":"4759msg":"}
2385 [1] 0df2f9b9-49ac-4d4a-a922-07b39a2dd1e6 (443b4f09-6d76-4302-8b47-0237fa76d564) Active Faulted Active ds_transition to LiveRepairReady","v":0,{"name":"crucible",""levelmsg":"":30[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.697853007Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2386 {,"time":""msg":"2023-09-22T23:14:58.697860652Z","[1] Transition from Faulted to LiveRepairReady"hostname",:test live_repair::repair_test::test_live_repair_enqueue_reopen ... ""okv"
2387 ip-10-150-1-55.us-west-2.compute.internal":,"0pid",":name":"4759crucible"},"
2388 level":30{"msg":"[0] 0ee60585-2ce5-46cc-99f9-95940127d893 (9ee1ca43-1cdd-48fb-af14-ac82b4ae106d) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.697913502Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2389 ,"time":"2023-09-22T23:14:58.697927982Z"{,"hostname":""msg":ip-10-150-1-55.us-west-2.compute.internal"","pid":4759}
2390 [1] 0df2f9b9-49ac-4d4a-a922-07b39a2dd1e6 (443b4f09-6d76-4302-8b47-0237fa76d564) Active LiveRepairReady Active ds_transition to LiveRepair",{"{v":0,"name":"""msg"crucible"msg:"",:"[0] Transition from WaitQuorum to Active"level"",:"30v":0Crucible stats registered with UUID: 33e28e69-0d9e-4c9f-b7a6-74c7045e411b","name":",crucible""v",":level":030,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.697976126Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,4759"time":}"
2391 2023-09-22T23:14:58.697984533Z",",hostname""{:"time":""msg"2023-09-22T23:14:58.69798927Z":"ip-10-150-1-55.us-west-2.compute.internal",","[1] Transition from LiveRepairReady to LiveRepair"pid",":hostname"4759v":"}:
2392 ip-10-150-1-55.us-west-2.compute.internal"0,,""namepid"{:":4759""crucible"},msg":"
2393 "level":[1] 0ee60585-2ce5-46cc-99f9-95940127d893 (9ee1ca43-1cdd-48fb-af14-ac82b4ae106d) Active New New ds_transition to WaitActive30","v"{:0,"name":"crucible"","msglevel"":":30,"time":"Crucible 33e28e69-0d9e-4c9f-b7a6-74c7045e411b has session id: 17410f8a-bf36-4608-9f9d-0991c783a7cc"2023-09-22T23:14:58.698053348Z,""v":,0","hostname":name"":"crucible","ip-10-150-1-55.us-west-2.compute.internallevel"":,"30pid,":"4759time":}"
2394 2023-09-22T23:14:58.698064813Z","hostname":{"ip-10-150-1-55.us-west-2.compute.internal"","msg"pid",":"time":":Repair for extent 1 s:0 d:[ClientId(2)]"47592023-09-22T23:14:58.698085472Z","},v"":
2395 0{,""name":"msg"crucible":","level":30[1] Transition from New to WaitActive","v":0,"hostnamename""::""crucible","level":ip-10-150-1-55.us-west-2.compute.internal"30,"pid":4759}
2396 ,"time":"2023-09-22T23:14:58.698136582Z","{hostname":"ip-10-150-1-55.us-west-2.compute.internal",""pidmsg","":time"":"2023-09-22T23:14:58.698147423Z","hostname":"[0] 33e28e69-0d9e-4c9f-b7a6-74c7045e411b (1c5e44dd-707c-49ea-88a0-d303601a440f) New New New ds_transition to WaitActive"ip-10-150-1-55.us-west-2.compute.internal",,""v"pid"::04759,"name":"}
2397 crucible","level":30:{4759,""":msg"":"downstairs"}
2398 [1] 0ee60585-2ce5-46cc-99f9-95940127d893 (9ee1ca43-1cdd-48fb-af14-ac82b4ae106d) Active WaitActive New ds_transition to WaitQuorum","v":0,","timename":"":"crucible"2023-09-22T23:14:58.698191068Z","level",":hostname":"30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2399 {"msg":"[0] Transition from New to WaitActive",,""v"time":":0,"2023-09-22T23:14:58.698217701Z"name":","crucible"hostname":","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2400 {"msg":","time":"[1] Transition from WaitActive to WaitQuorum",2023-09-22T23:14:58.698246038Z""v",:"0hostname,"":"name":"crucible","ip-10-150-1-55.us-west-2.compute.internal"level,""{pid"msg":"":4759Crucible stats registered with UUID: 6b3331ad-9fa4-46f8-ac51-e5e343fd372f"}
2401 ,"v":0,"name":"{crucible","level":30"msg":"[0] 33e28e69-0d9e-4c9f-b7a6-74c7045e411b (1c5e44dd-707c-49ea-88a0-d303601a440f) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.698297864Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,4759"time":}"
2402 {"msg":"2023-09-22T23:14:58.698310132Z"Crucible 6b3331ad-9fa4-46f8-ac51-e5e343fd372f has session id: 6f5b1cf0-5769-42d2-849f-2f27f3b8dfa7","v",::0"30,hostname"":name"":"crucible","ip-10-150-1-55.us-west-2.compute.internal"level":,"30pid":4759}
2403 ,{"time":""msg,"":time"":"[0] Transition from WaitActive to WaitQuorum"2023-09-22T23:14:58.698357384Z",,""v":hostname":0","name":"ip-10-150-1-55.us-west-2.compute.internalcrucible"",","pid":level"4759:30}
2404 {"msg":"[0] 6b3331ad-9fa4-46f8-ac51-e5e343fd372f (b654e397-ac15-4a47-a3bf-f5ba3d4d7f25) New New New ds_transition to WaitActive","v":,"0,time"":"name":"crucible"2023-09-22T23:14:58.698386293Z",",level":2023-09-22T23:14:58.698353512Z30","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2405 ,"time":"2023-09-22T23:14:58.698410863Z"{,"hostname":"""ip-10-150-1-55.us-west-2.compute.internal"hostname":"test live_repair::repair_test::test_live_repair_enqueue_repair_repair ... ,ok"
2406 pid":4759msg}
2407 {":""msg":"ip-10-150-1-55.us-west-2.compute.internal[0] Transition from New to WaitActive"",[1] 0ee60585-2ce5-46cc-99f9-95940127d893 (9ee1ca43-1cdd-48fb-af14-ac82b4ae106d) Active WaitQuorum New ds_transition to Active"","v":0,"name"v":":crucible"0,","level"name,"":pid"":crucible"4759,"level":}30
2408 {:"30msg":","time":"[0] 33e28e69-0d9e-4c9f-b7a6-74c7045e411b (1c5e44dd-707c-49ea-88a0-d303601a440f) WaitQuorum New New ds_transition to Active"2023-09-22T23:14:58.698486335Z",",v"":hostname"0:","name":"crucible"ip-10-150-1-55.us-west-2.compute.internal",","level"pid:":304759}
2409 {"msg":,""time":"2023-09-22T23:14:58.698496651Z",,"[0] 6b3331ad-9fa4-46f8-ac51-e5e343fd372f (b654e397-ac15-4a47-a3bf-f5ba3d4d7f25) WaitActive New New ds_transition to WaitQuorumtime":""2023-09-22T23:14:58.69850831Z,""v",:"0,hostname"":"name":"crucible","ip-10-150-1-55.us-west-2.compute.internal"level,"":pid30":4759}
2410 "{hostname"":"msg":,""time":"ip-10-150-1-55.us-west-2.compute.internal"[0] Transition from WaitQuorum to Active"2023-09-22T23:14:58.698533909Z,"",,""pid"hostname"::"4759ip-10-150-1-55.us-west-2.compute.internal}"
2411 ,"pid":4759}
2412 v":0,"{name":"crucible""msg,"":"level":30[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","{level":30"msg":"[1] Transition from WaitQuorum to Active","time,",:""2023-09-22T23:14:58.69857772Ztime":"","2023-09-22T23:14:58.698583845Z"hostname",:""hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internalpid""":,"v"pid4759":}:
2413 04759,"}name"
2414 {:"crucible","level":30,"time":"2023-09-22T23:14:58.698636666Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid"":4759msg":"}
2415 [0] 6b3331ad-9fa4-46f8-ac51-e5e343fd372f (b654e397-ac15-4a47-a3bf-f5ba3d4d7f25) WaitQuorum New New ds_transition to Active","v":{0,"{name""msg"::""msg":"crucible","level":30[1] 33e28e69-0d9e-4c9f-b7a6-74c7045e411b (1c5e44dd-707c-49ea-88a0-d303601a440f) Active New New ds_transition to WaitActive","v":0,""name":"crucible","level":30[2] 0ee60585-2ce5-46cc-99f9-95940127d893 (9ee1ca43-1cdd-48fb-af14-ac82b4ae106d) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.69867768Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal,"",time"":pid"":47592023-09-22T23:14:58.698686528Z"}
2416 {,"time":"2023-09-22T23:14:58.698691629Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2417 "{msg":""[0] Transition from WaitQuorum to Activemsg":"","v":0[2] Transition from New to WaitActive,""name","v":0,"name:":""crucible"crucible,"","level":30level":30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,"4759time":"}
2418 2023-09-22T23:14:58.698758286Z,""time":","hostname2023-09-22T23:14:58.698760116Z"{",":""msgip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2419 {"msg":"[1] 6b3331ad-9fa4-46f8-ac51-e5e343fd372f (b654e397-ac15-4a47-a3bf-f5ba3d4d7f25) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30":","time":"[1] Transition from New to WaitActive",2023-09-22T23:14:58.698804772Z""v",:"0hostname,"":"name":"crucible",ip-10-150-1-55.us-west-2.compute.internal"",level"":pid":304759}
2420 {"msg":"hostname":[1] Transition from New to WaitActive","time":"2023-09-22T23:14:58.698824862Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2421 {"msg":"[1] 33e28e69-0d9e-4c9f-b7a6-74c7045e411b (1c5e44dd-707c-49ea-88a0-d303601a440f) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible",,""v"level:":030,"name":"crucible","level":30,"time,"":"time":"{2023-09-22T23:14:58.698864576Z"2023-09-22T23:14:58.698867733Z",","hostname"hostname:"":""msg"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""ip-10-150-1-55.us-west-2.compute.internalpid""":pid"4759:4759},
2422 }"
2423 {{"msg":"[1] 6b3331ad-9fa4-46f8-ac51-e5e343fd372f (b654e397-ac15-4a47-a3bf-f5ba3d4d7f25) Active WaitActive New ds_transition to WaitQuorum","v"pid:"0":msg,"":name4759"}
2424 :"{crucible","level":"30"msg":"[1] Transition from WaitActive to WaitQuorum","[2] 0ee60585-2ce5-46cc-99f9-95940127d893 (9ee1ca43-1cdd-48fb-af14-ac82b4ae106d) Active Active WaitActive ds_transition to WaitQuorum"v",:"0v":,0,,"name""time":":"2023-09-22T23:14:58.698980848Z"crucible",","hostname"level"::"30:"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}Crucible stats registered with UUID: 8022d443-8555-401b-9593-47c602f81c95"
2425 ,"{time":""2023-09-22T23:14:58.699004568Z"msg,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2426 {"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":""crucible","level":name"30:"crucible","level":30,"time":"2023-09-22T23:14:58.69904335Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2427 ":"{[1] Transition from WaitActive to WaitQuorum","","msg":"vtime""::"0,"name"2023-09-22T23:14:58.699047438Z:""crucible,"","hostname"level"::","ip-10-150-1-55.us-west-2.compute.internalv"30:0,"name":"crucible","[2] 0ee60585-2ce5-46cc-99f9-95940127d893 (9ee1ca43-1cdd-48fb-af14-ac82b4ae106d) Active Active WaitQuorum ds_transition to Activelevel"":30,"v":0,",name":""crucible","level":30time":"2023-09-22T23:14:58.699075967Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":"4759,}"
2428 {,"msg":"[1] 6b3331ad-9fa4-46f8-ac51-e5e343fd372f (b654e397-ac15-4a47-a3bf-f5ba3d4d7f25) Active WaitQuorum New ds_transition to Active","v":"0time",:"",name2023-09-22T23:14:58.699085396Z"""time"::""2023-09-22T23:14:58.699091652Z"pid,"":hostname":"4759}ip-10-150-1-55.us-west-2.compute.internal
2429 ","pid":4759}
2430 {{"msg":""msg":"[2] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30[1] 33e28e69-0d9e-4c9f-b7a6-74c7045e411b (1c5e44dd-707c-49ea-88a0-d303601a440f) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":crucible"30,"level":30,,""time":"hostname":"2023-09-22T23:14:58.699201665Z","ip-10-150-1-55.us-west-2.compute.internal"hostname":","pid":4759ip-10-150-1-55.us-west-2.compute.internal","}pid
2431 ":4759}
2432 {{""msg"msg:"":"0ee60585-2ce5-46cc-99f9-95940127d893 is now active with session: 9ee1ca43-1cdd-48fb-af14-ac82b4ae106dCrucible 8022d443-8555-401b-9593-47c602f81c95 has session id: bc94380a-dffd-40a5-bb00-352c87a8d400"",","v"v:":00,,"",name"name"":":"crucible"crucible,"",level"":level30":time30":"2023-09-22T23:14:58.69920968Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2433 {"msg",,""time"time:"":":"2023-09-22T23:14:58.699251626Z"2023-09-22T23:14:58.699253172Z",,""[1] Transition from WaitQuorum to Active"hostnamehostname""::"","v":0,"name":"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"pid:"4759:crucible"4759,}
2434 ,"{time":""msg":2023-09-22T23:14:58.699208443Z"","hostname":"ip-10-150-1-55.us-west-2.compute.internal"[1] 0ee60585-2ce5-46cc-99f9-95940127d893 (9ee1ca43-1cdd-48fb-af14-ac82b4ae106d) Active Active Active ds_transition to Faulted",","pidv"":"0level":30,"name":"crucible","level":30,"time":":2023-09-22T23:14:58.699307386Z","4759hostname},"
2435 time}""{:":"2023-09-22T23:14:58.699312888Z""msg":,""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759[0] 8022d443-8555-401b-9593-47c602f81c95 (9a974847-a7c7-4136-88bb-99c862b855df) New New New ds_transition to WaitActive"}
2436 ,"v":0{,"name":""msgcrucible"":","level":30[1] Transition from Active to Faulted","v":0,"name":"crucible","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2437 ,"time":"2023-09-22T23:14:58.699351779Z","hostname":"
2438 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2439 {{"{msg":"""msg"msg"::""[1] 0ee60585-2ce5-46cc-99f9-95940127d893 (9ee1ca43-1cdd-48fb-af14-ac82b4ae106d) Active Faulted Active ds_transition to LiveRepairReady","v":[1] Transition from WaitQuorum to Active"0[2] 6b3331ad-9fa4-46f8-ac51-e5e343fd372f (b654e397-ac15-4a47-a3bf-f5ba3d4d7f25) Active Active New ds_transition to WaitActive,",,""vname":"crucible","level":30,"time":"2023-09-22T23:14:58.699399162Z","hostname":""ip-10-150-1-55.us-west-2.compute.internal:"0,",pid"":name4759":"}
2440 "crucible{v":"0msg":","name":"[1] Transition from Faulted to LiveRepairReadycrucible"",","levelv":"0:,"30name":"crucible",","level":30"level":30,"time":",2023-09-22T23:14:58.699447664Z"",time","time":"2023-09-22T23:14:58.699440152Z","hostname":","time":ip-10-150-1-55.us-west-2.compute.internal"","pid":2023-09-22T23:14:58.699456691Z"4759,"hostname":"}
2441 ip-10-150-1-55.us-west-2.compute.internal","pid":hostname4759":"}
2442 {ip-10-150-1-55.us-west-2.compute.internal","pid"{:4759"}
2443 ":"2023-09-22T23:14:58.699347119Z","hostname":""msg"ip-10-150-1-55.us-west-2.compute.internal:"","pid":4759}
2444 [1] 0ee60585-2ce5-46cc-99f9-95940127d893 (9ee1ca43-1cdd-48fb-af14-ac82b4ae106d) Active LiveRepairReady Active ds_transition to LiveRepair","v":{0,"name":"crucible""msg",:""{level":30[0] Transition from New to WaitActive",""msgv":","time":"2023-09-22T23:14:58.699519214Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2445 :"{[2] Transition from New to WaitActive",""msg"v:"":0,"name":"crucible"[1] Transition from LiveRepairReady to LiveRepair",",level""v":00,"name":","crucible"name",:""level"crucible:"30,"level":30,"time":":30msg":"2023-09-22T23:14:58.699560692Z,"time":"2023-09-22T23:14:58.69957927Z",[2] 33e28e69-0d9e-4c9f-b7a6-74c7045e411b (1c5e44dd-707c-49ea-88a0-d303601a440f) Active Active New ds_transition to WaitActive""hostname":"ip-10-150-1-55.us-west-2.compute.internal,"","v"pid"::47590}
2446 {"msg":"[2] 6b3331ad-9fa4-46f8-ac51-e5e343fd372f (b654e397-ac15-4a47-a3bf-f5ba3d4d7f25) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible",,""levelname":"crucible","level":30,"time":""2023-09-22T23:14:58.699562266Z":30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2447 ,,""time":"{2023-09-22T23:14:58.699636033Z"",msg"":"hostname":"ip-10-150-1-55.us-west-2.compute.internal","[0] 8022d443-8555-401b-9593-47c602f81c95 (9a974847-a7c7-4136-88bb-99c862b855df) WaitActive New New ds_transition to WaitQuorum"pid",":v":47590,"name":"crucible"},"
2448 level"time"::"302023-09-22T23:14:58.699643188Z","{hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid"":"4759msg,"}
2449 ,"time":"2023-09-22T23:14:58.699688461Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759"}
2450 :"{{"[2] Transition from New to WaitActivemsg""msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30:,""time":"2023-09-22T23:14:58.699732552Z"[2] Transition from WaitActive to WaitQuorum",",hostname""v":":0,"name":"crucible","ip-10-150-1-55.us-west-2.compute.internallevel"":,"30hostnamepid":"4759:"}
2451 ip-10-150-1-55.us-west-2.compute.internal{"msg":","time":"2023-09-22T23:14:58.699759474Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","[0] 8022d443-8555-401b-9593-47c602f81c95 (9a974847-a7c7-4136-88bb-99c862b855df) WaitQuorum New New ds_transition to Active"pid":,4759"v"}:
2452 {"msg":"[2] 6b3331ad-9fa4-46f8-ac51-e5e343fd372f (b654e397-ac15-4a47-a3bf-f5ba3d4d7f25) Active Active WaitQuorum ds_transition to Active","v":0,"name":"0crucible",","namepid""::""crucible4759",",}"
2453 level":level30":30",","time"v":":2023-09-22T23:14:58.699807051Z",0,",hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
24542023-09-22T23:14:58.699ZINFOcrucible: [2] Transition from WaitQuorum to Active
2455 {"msg":"6b3331ad-9fa4-46f8-ac51-e5e343fd372f is now active with session: b654e397-ac15-4a47-a3bf-f5ba3d4d7f25","v":0,"name":""crucible","timelevel":30"name":"crucible",""level"::"30,"2023-09-22T23:14:58.699804837Ztime"":","2023-09-22T23:14:58.699868741Z"hostname",:""hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internal"pid",:"pid4759":4759}}
2456 ,"time":"2023-09-22T23:14:58.699879825Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2457 {{""msgmsg":"[2] 33e28e69-0d9e-4c9f-b7a6-74c7045e411b (1c5e44dd-707c-49ea-88a0-d303601a440f) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30":","time":"[1] 6b3331ad-9fa4-46f8-ac51-e5e343fd372f (b654e397-ac15-4a47-a3bf-f5ba3d4d7f25) Active Active Active ds_transition to Faulted"2023-09-22T23:14:58.699936874Z",","v":hostname":"0,"name":"ip-10-150-1-55.us-west-2.compute.internal"crucible,"","pidlevel":"4759:30}
2458 {
2459 ,""msg":time"{:"msg":"[0] Transition from WaitQuorum to Active","v":0","name":"2023-09-22T23:14:58.699969424Z"crucible",","hostname"level:""ip-10-150-1-55.us-west-2.compute.internal","pid:":304759}
2460 {"msg":"[1] Transition from Active to Faulted","v":0,"name":","time":"test live_repair::repair_test::test_live_repair_flush_is_flush ... 2023-09-22T23:14:58.700003736Zok
2461 ","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2462 {"msg":"crucible","level":30[1] 8022d443-8555-401b-9593-47c602f81c95 (9a974847-a7c7-4136-88bb-99c862b855df) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30","time":"[2] Transition from WaitActive to WaitQuorum"2023-09-22T23:14:58.700044982Z",",hostname,"":v":0,"name":"crucible","level":30","time":"ip-10-150-1-55.us-west-2.compute.internal","pid"2023-09-22T23:14:58.700070545Z":4759,"}hostname
2463 {""time"msg"::""2023-09-22T23:14:58.700051897Z","hostname":"[1] 6b3331ad-9fa4-46f8-ac51-e5e343fd372f (b654e397-ac15-4a47-a3bf-f5ba3d4d7f25) Active Faulted Active ds_transition to LiveRepairReady"ip-10-150-1-55.us-west-2.compute.internal",,""vpid":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.700140452Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2464 ":4759}
2465 {{""msgmsg":"":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2466 {":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30"msg":"[2] 33e28e69-0d9e-4c9f-b7a6-74c7045e411b (1c5e44dd-707c-49ea-88a0-d303601a440f) Active Active WaitQuorum ds_transition to Active"[1] Transition from Faulted to LiveRepairReady,""v":0,,",v""name"time""::"":crucible"02023-09-22T23:14:58.700298282Z,"",",namelevel"":"":crucible"hostname,30""level"::30"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2467 {,"time":"2023-09-22T23:14:58.700340785Z","hostname"":msg"",":ip-10-150-1-55.us-west-2.compute.internaltime""",:""pid":47592023-09-22T23:14:58.700338285Z}"[1] 8022d443-8555-401b-9593-47c602f81c95 (9a974847-a7c7-4136-88bb-99c862b855df) Active WaitActive New ds_transition to WaitQuorum"
2468 ,,""v":{0hostname,"""msg":":name":""crucible","level":ip-10-150-1-55.us-west-2.compute.internal30",[1] 6b3331ad-9fa4-46f8-ac51-e5e343fd372f (b654e397-ac15-4a47-a3bf-f5ba3d4d7f25) Active LiveRepairReady Active ds_transition to LiveRepair"",pid"":v"4759:0,"}name
2469 ":"crucible","level{":30,"time{":""msg":""2023-09-22T23:14:58.700389439Z"msg":","Crucible stats registered with UUID: 7fad8992-b20b-4cb6-990d-67be4c403b6f"hostname"[2] Transition from WaitQuorum to Active,":,v""time:":0"",","2023-09-22T23:14:58.700405911Z""name,"":"hostname"crucible":,""level":ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal30"",v",""pid"pid"::4759:4759}0
2470 ,",time"}"{:"name""2023-09-22T23:14:58.700432715Zmsg"":,
2471 ""hostname":":[1] Transition from LiveRepairReady to LiveRepaircrucible""{,"ip-10-150-1-55.us-west-2.compute.internal""v,","msglevel"""":pid0",:":name4759":}"
2472 crucible""{,":level[1] Transition from WaitActive to WaitQuorum30"":msg30":"","v":0,"name":"Crucible 7fad8992-b20b-4cb6-990d-67be4c403b6f has session id: c32553ed-11d5-4914-9299-facbcae54f86crucible"",","levelv":":0,,""time"name:"":30"2023-09-22T23:14:58.700489131Z"crucible",,""levelhostname""::"30,"timeip-10-150-1-55.us-west-2.compute.internal":"","pid":2023-09-22T23:14:58.700488217Z4759"}
2473 ,,"{"time""msg:"":,"2023-09-22T23:14:58.700507367Z"",Create read repair deps for extent 1time"hostname"::""""hostname,"":v""ip-10-150-1-55.us-west-2.compute.internal":ip-10-150-1-55.us-west-2.compute.internal0",,""2023-09-22T23:14:58.700504721Z","pidname"":,4759:"}"
2474 crucible"hostname"pid":":{,4759""levelmsg""::40"ip-10-150-1-55.us-west-2.compute.internal"},"
2475 pid"[0] 7fad8992-b20b-4cb6-990d-67be4c403b6f (e14ba783-7271-41eb-9b62-8aeb270fd61b) New New New ds_transition to WaitActive,"",time":"v{4759"::"0},2023-09-22T23:14:58.700560084Z""name"",:"
2476 msg"hostname"":crucible"{:""ip-10-150-1-55.us-west-2.compute.internal",","level""pid:"msg":30475933e28e69-0d9e-4c9f-b7a6-74c7045e411b is now active with session: 1c5e44dd-707c-49ea-88a0-d303601a440f":"},"
2477 v"[1] 8022d443-8555-401b-9593-47c602f81c95 (9a974847-a7c7-4136-88bb-99c862b855df) Active WaitQuorum New ds_transition to Active,""{time":",msg"":"2023-09-22T23:14:58.700593882Z:""v":0IO Read 1004 extent 0 added deps 1,"",,0"hostnamev"":,""":name"ip-10-150-1-55.us-west-2.compute.internal0",,"":name":""crucible"pidname"":crucible",,:4759""}crucible
2478 "level,----------------------------------------------------------------
2479 Crucible gen:0 GIO:true work queues: Upstairs:1 downstairs:1
2480 "levelGW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
2481 { 1 AckReady 1000 Write 1" Newmsg" New":40 New false
2482 STATES DS:0 DS:1 DS:2 TOTAL
2483 New 1 " 1 ",level 1 :: 3
2484 Sent " 0 " 0 time" 0 : 0
2485 Done 0 0 0 " 0
2486 Skipped :2023-09-22T23:14:58.70071889Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2487 0 30 0 0 0
2488 Error 0 0 0 0
2489 Last Flush: 0 0 0
2490 "30[0] Transition from New to WaitActive"Downstairs last five completed:,",
2491 ","time":"Upstairs last five completed: 2023-09-22T23:14:58.700817791Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal"
2492 time":"2023-09-22T23:14:58.700801546Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2493 {,"pid":4759}
2494 {""msg":"msg":"[1] Transition from WaitQuorum to Active","v":v0",":name"0:","crucible"name",:""level"crucible":,"30[1] 33e28e69-0d9e-4c9f-b7a6-74c7045e411b (1c5e44dd-707c-49ea-88a0-d303601a440f) Active Active Active ds_transition to Faulted"level":30,"v":0,"name":"crucible","level":30,,""timetime""::""2023-09-22T23:14:58.700942619Z2023-09-22T23:14:58.700944516Z"",,""hostname"hostname:"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",","pid"pid:",":47594759time"}}
2495 
2496 {test live_repair::repair_test::test_live_repair_repair_read_push ... "{msg"::""msg":""ok2023-09-22T23:14:58.700951146Z"
2497 [2] 8022d443-8555-401b-9593-47c602f81c95 (9a974847-a7c7-4136-88bb-99c862b855df) Active Active New ds_transition to WaitActive[0] 7fad8992-b20b-4cb6-990d-67be4c403b6f (e14ba783-7271-41eb-9b62-8aeb270fd61b) WaitActive New New ds_transition to WaitQuorum"",,""vv""::00,,,""name"name:"":""cruciblecrucible""hostname,,""level"level:"30:"30:"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2498 ,,""timetime""::""2023-09-22T23:14:58.701013888Z2023-09-22T23:14:58.701012536Z"",{,""hostnamehostname""::"""msg":ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"pid:"4759:"4759}
2499 }[1] Transition from Active to Faulted"
2500 {,"v":"{0msg""msg:"":","name":"[0] Transition from WaitActive to WaitQuorum[2] Transition from New to WaitActive"",,""vv""::00,,""name"name:"":crucible""crucible",crucible""level":,30",level""level"::3030,"time":"2023-09-22T23:14:58.70106546Z","hostname",:""time":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.701070317Z,"",pid"":4759hostname":"}
2501 ip-10-150-1-55.us-west-2.compute.internal","pid":{4759}
2502 ",msg{""":"msgtime":":""2023-09-22T23:14:58.701071377Z",[2] 8022d443-8555-401b-9593-47c602f81c95 (9a974847-a7c7-4136-88bb-99c862b855df) Active Active WaitActive ds_transition to WaitQuorum"",[0] 7fad8992-b20b-4cb6-990d-67be4c403b6f (e14ba783-7271-41eb-9b62-8aeb270fd61b) WaitQuorum New New ds_transition to Active""v",:"0v,"":hostname":name0",:""name"crucible":,"""levelcrucible"":,30"levelip-10-150-1-55.us-west-2.compute.internal"":,30"pid":4759}
2503 ,,""timetime""::""2023-09-22T23:14:58.701113248Z2023-09-22T23:14:58.701116874Z"",","hostname":hostname""{:"ip-10-150-1-55.us-west-2.compute.internal","pid":4759ip-10-150-1-55.us-west-2.compute.internal","}pid
2504 ":"4759{msg"}"
2505 msg"::{"""msg":"[2] Transition from WaitActive to WaitQuorum","v":[0] Transition from WaitQuorum to Active0",","namev""::"0[1] 33e28e69-0d9e-4c9f-b7a6-74c7045e411b (1c5e44dd-707c-49ea-88a0-d303601a440f) Active Faulted Active ds_transition to LiveRepairReady"crucible,"",name""level:"":,"crucible30"v,""level"::300,"name":"crucible","level",":time"30:"2023-09-22T23:14:58.701160068Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2506 {"msg":"[2] 8022d443-8555-401b-9593-47c602f81c95 (9a974847-a7c7-4136-88bb-99c862b855df) Active Active WaitQuorum ds_transition to Active","v":0,"name":"crucible",",level"":time"30:"2023-09-22T23:14:58.701164997Z",","time"hostname":":"2023-09-22T23:14:58.701172752Z"ip-10-150-1-55.us-west-2.compute.internal",,""timepid""::"4759,"}2023-09-22T23:14:58.701197858Z"
2507 ,hostname""{:"hostname"":msg"":"ip-10-150-1-55.us-west-2.compute.internal","pid"ip-10-150-1-55.us-west-2.compute.internal":,4759"[1] 7fad8992-b20b-4cb6-990d-67be4c403b6f (e14ba783-7271-41eb-9b62-8aeb270fd61b) Active New New ds_transition to WaitActive"pid":,"}v4759":}0
2508 ,"
2509 name{":""crucible"msg":,""level":30{[2] Transition from WaitQuorum to Active","v":0,"name"":"msg":"crucible","level":[1] Transition from Faulted to LiveRepairReady30,""time",":"v":02023-09-22T23:14:58.701238324Z,""name":",,""hostname"time:"":"crucible","ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:14:58.701247664Z"",,""pid"hostname:"4759:level""}:
2510 ip-10-150-1-55.us-west-2.compute.internal"30,{"pid":4759"msg"}:
2511 "{[1] Transition from New to WaitActive",""msgv""::"0,"name":"crucible","level":308022d443-8555-401b-9593-47c602f81c95 is now active with session: 9a974847-a7c7-4136-88bb-99c862b855df","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.701273742Z",",time":""hostname":",2023-09-22T23:14:58.701284913Z""time",:"ip-10-150-1-55.us-west-2.compute.internal"hostname"","2023-09-22T23:14:58.701290613Z:"",pid"":ip-10-150-1-55.us-west-2.compute.internalhostname4759"":","pid"}ip-10-150-1-55.us-west-2.compute.internal:"4759,"
2512 }{"msg":"[1] 33e28e69-0d9e-4c9f-b7a6-74c7045e411b (1c5e44dd-707c-49ea-88a0-d303601a440f) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"crucible","level":30
2513 {"msg":"[1] 7fad8992-b20b-4cb6-990d-67be4c403b6f (e14ba783-7271-41eb-9b62-8aeb270fd61b) Active WaitActive New ds_transition to WaitQuorum","v":0,,""nametime":":""crucible2023-09-22T23:14:58.7013447Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2514 {"msg":"[1] Transition from LiveRepairReady to LiveRepair","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.701395395Z",{"hostname":""ip-10-150-1-55.us-west-2.compute.internal"msg",:""pid":4759}
2515 Crucible stats registered with UUID: f40c9366-ba51-49e8-a7ac-2e8d70c8171b","v":0,"name":"crucible","level":30","level":30,","time"time:"":"pid2023-09-22T23:14:58.701438519Z2023-09-22T23:14:58.701434469Z""",,"":hostnamehostname"":4759:""}ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"",
2516 "pid"pid:":47594759}}
2517 
2518 {{"{msg":""msg":""[1] Transition from WaitActive to WaitQuorum",msg"":vCrucible f40c9366-ba51-49e8-a7ac-2e8d70c8171b has session id: 6ab93a12-24ca-44b2-9736-92c62af6e777"":,"0v,""name:"0:,"""namecrucible"":","crucible"level",:"30level":30[1] 8022d443-8555-401b-9593-47c602f81c95 (9a974847-a7c7-4136-88bb-99c862b855df) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30,,""timetime""::""2023-09-22T23:14:58.701493358Z2023-09-22T23:14:58.701493996Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759}}
2519 
2520 {{"msg":""msg":","time":"[1] 7fad8992-b20b-4cb6-990d-67be4c403b6f (e14ba783-7271-41eb-9b62-8aeb270fd61b) Active WaitQuorum New ds_transition to Active"[0] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) New New New ds_transition to WaitActive",",v""v:"0:,0",name""2023-09-22T23:14:58.70150495Zname:"":"crucible"crucible",,""level":level30":"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2521 ,,""timetime""::""{2023-09-22T23:14:58.701541222Z2023-09-22T23:14:58.701539743Z"",,""hostname"hostnamemsg"""::"":"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759[1] Transition from Active to Faulted"}}
2522 
2523 {,"v"{":msg"0:""msg":","[1] Transition from WaitQuorum to Activename"[0] Transition from New to WaitActive"",",v""v:"0:,0",name""::""namecrucible""crucible:"",,crucible""level,"""level:"30:level"30:30,",time"":time"":"2023-09-22T23:14:58.701598034Z"2023-09-22T23:14:58.701599736Z",","hostname"hostname:"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"","pid"pid:"4759:,}4759
2524 }"
2525 {time":"{"msg":""msgtest live_repair::repair_test::test_live_repair_no_repair_yet ... 2023-09-22T23:14:58.701601002Z"ok,
2526 "[2] 7fad8992-b20b-4cb6-990d-67be4c403b6f (e14ba783-7271-41eb-9b62-8aeb270fd61b) Active Active New ds_transition to WaitActive:""","hostname"v":":[0] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) WaitActive New New ds_transition to WaitQuorum0",","name"v:":"0ip-10-150-1-55.us-west-2.compute.internal",crucible""name":,"",levelcrucible"":,30""level"pid"::304759}
2527 ,"time":","time":"2023-09-22T23:14:58.701655828Z"{,2023-09-22T23:14:58.701660036Z"",hostname"":"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759}}
2528 
2529 {[1] 8022d443-8555-401b-9593-47c602f81c95 (9a974847-a7c7-4136-88bb-99c862b855df) Active Faulted Active ds_transition to LiveRepairReady""{msg",":msg"":""[0] Transition from WaitActive to WaitQuorum"v",[2] Transition from New to WaitActive""v,""v:"0:,0",name""name:"":"crucible"crucible":,0",,level""level:"30":name30":"crucible","level":30,"time":","time":"2023-09-22T23:14:58.701704711Z","hostname2023-09-22T23:14:58.701706869Z"":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,"}pid
2530 ",:"{4759time":"}"
2531 msg":"{2023-09-22T23:14:58.70171076Z"","msg":"hostname":"[2] 7fad8992-b20b-4cb6-990d-67be4c403b6f (e14ba783-7271-41eb-9b62-8aeb270fd61b) Active Active WaitActive ds_transition to WaitQuorum",ip-10-150-1-55.us-west-2.compute.internal"","v[0] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) WaitQuorum New New ds_transition to Active"":,0",v"":name0",:""cruciblename"":,""pid"levelcrucible"":,30":level":475930}
2532 ,","time"time:""{:"2023-09-22T23:14:58.701759009Z","2023-09-22T23:14:58.701761588Z"hostname","":msg":"hostname"":"[1] Transition from Faulted to LiveRepairReady"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,"",pidpid""::47594759"}}
2533 
2534 {v":{"0msg""msg:"":,""name"[2] Transition from WaitActive to WaitQuorum"[0] Transition from WaitQuorum to Active",:",""vv""::00,,""crucible"namename,"""::""level"cruciblecrucible"",,""level"level:"30::3030,,""timetime""::""2023-09-22T23:14:58.701813635Z2023-09-22T23:14:58.701814764Z"",,""hostname"hostname:"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",","pid"pid:"4759:,"}4759
2535 }time":"{
2536 "2023-09-22T23:14:58.701815695Z"msg{":,"""msg"hostname"::""ip-10-150-1-55.us-west-2.compute.internal","pid"[1] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) Active New New ds_transition to WaitActive":4759,"v"}[2] 7fad8992-b20b-4cb6-990d-67be4c403b6f (e14ba783-7271-41eb-9b62-8aeb270fd61b) Active Active WaitQuorum ds_transition to Active:"0,,""v"name:"0:,""cruciblename"",:""level"crucible:"30,
2537 "level":30{"msg":","time":"2023-09-22T23:14:58.701862406Z"[1] 8022d443-8555-401b-9593-47c602f81c95 (9a974847-a7c7-4136-88bb-99c862b855df) Active LiveRepairReady Active ds_transition to LiveRepair,"",hostname"":time",""v":ip-10-150-1-55.us-west-2.compute.internal0":,"","pid2023-09-22T23:14:58.701867762Z"":,name"4759":}hostname
2538 "":{"crucible","level"":msgip-10-150-1-55.us-west-2.compute.internal"":,""30pid":[1] Transition from New to WaitActive4759",}"
2539 v":0,"{name":""cruciblemsg"":,""level":30[2] Transition from WaitQuorum to Active","v":0,"name":"crucible",,""timelevel"":",:"30time":"2023-09-22T23:14:58.701901577Z","2023-09-22T23:14:58.701909561Z"hostname",:""hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internal",,""time":pid"":pid"47592023-09-22T23:14:58.701919327Z"}:,
2540 "4759hostname{":"}
2541 ip-10-150-1-55.us-west-2.compute.internal""msg,"":"pid":4759{}
2542 "[1] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) Active WaitActive New ds_transition to WaitQuorum"{msg,"""vmsg""::0",:""name":"crucible","[1] Transition from LiveRepairReady to LiveRepair"level7fad8992-b20b-4cb6-990d-67be4c403b6f is now active with session: e14ba783-7271-41eb-9b62-8aeb270fd61b"":,30","v":v":00,,""namename":"":,""cruciblecrucible""time,"":level"",":level":302023-09-22T23:14:58.701963321Z"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2543 ,"time":"{,"time"2023-09-22T23:14:58.701981956Z""msg:","":"2023-09-22T23:14:58.701983061Zhostname""[1] Transition from WaitActive to WaitQuorum:","",hostname":""ip-10-150-1-55.us-west-2.compute.internal"v",:"0pid,"":name"4759:"}ip-10-150-1-55.us-west-2.compute.internal"
2544 crucible",",{"pid""level:"msg:"30:"4759}
2545 {[1] 7fad8992-b20b-4cb6-990d-67be4c403b6f (e14ba783-7271-41eb-9b62-8aeb270fd61b) Active Active Active ds_transition to Faulted",""v":,0",msg":"timename""::"""crucible"2023-09-22T23:14:58.702016694Z",,""level"hostname:"30:"Write to Extent 0:2:9 under repair","v":0,"nameip-10-150-1-55.us-west-2.compute.internal"":","crucible"pid,","":level4759time"}:
2546 "":40{2023-09-22T23:14:58.702035899Z",""hostname"msg:"":"ip-10-150-1-55.us-west-2.compute.internal","pid":,"4759time":"}[1] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) Active WaitQuorum New ds_transition to Active
2547 "2023-09-22T23:14:58.702050805Z",{","v""msg:"0:"hostname":"[1] Transition from Active to Faulted,"",name""v:"":0crucible,""ip-10-150-1-55.us-west-2.compute.internalname,"":level"":crucible30"",,""pid"level"::304759}
2548 ,"time":{{""2023-09-22T23:14:58.70208067Z,"",time"":"hostname"msg"2023-09-22T23:14:58.702086024Zmsg:"":"",":"hostname"Write 1:0:9 past extent under repair 0""ip-10-150-1-55.us-west-2.compute.internal:",",""Crucible stats registered with UUID: c8868e06-7e14-4a37-b718-8f66383d6b07v"":ip-10-150-1-55.us-west-2.compute.internalpid"",:"4759pid"}0
2549 :,,""v"name:"0:,""{4759namecrucible"":,""crucible"level",:""}msg
2550 "level40":{:"30"msg":[1] Transition from WaitQuorum to Active"","v":0,"name":"crucible","level":30[1] 7fad8992-b20b-4cb6-990d-67be4c403b6f (e14ba783-7271-41eb-9b62-8aeb270fd61b) Active Faulted Active ds_transition to LiveRepairReady","v":0,",name"":"time":"crucible","2023-09-22T23:14:58.702133987Z"level,"":time30":,,""hostnametime"":":""ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:14:58.702142782Z2023-09-22T23:14:58.70213642Zpid"":,4759","hostname}"
2551 :","hostname"{ip-10-150-1-55.us-west-2.compute.internal":",msg""pid:"":"4759Write 1:1:9 past extent under repair 0",}"
2552 v":ip-10-150-1-55.us-west-2.compute.internal"0{,",name""msg""pidtime""::"4759"::""2023-09-22T23:14:58.702154321Z}"crucible
2553 ,"Crucible c8868e06-7e14-4a37-b718-8f66383d6b07 has session id: e17a1d32-1fd2-4f6a-9143-a7392fe78df3,""{level,""""v:"40:hostnamemsg""::""0,"name"ip-10-150-1-55.us-west-2.compute.internal":","crucible"pid":[2] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) Active Active New ds_transition to WaitActive4759",,""time"level:"":,}"
2554 v"302023-09-22T23:14:58.702204826Z"{:,0",msg"""name:":""hostname"crucible":",[1] Transition from Faulted to LiveRepairReady""level",:"30v",ip-10-150-1-55.us-west-2.compute.internal""time,"":pid""::2023-09-22T23:14:58.702224011Z4759"0},
2555 ,",name"":{"time""hostname""msg:"":"crucible:"","ip-10-150-1-55.us-west-2.compute.internalIO Write 1004 on eur 0 Added deps 1"2023-09-22T23:14:58.702235715Zlevel"",:"30hostname","":pid",:"4759v":"0}
2556 ,",ip-10-150-1-55.us-west-2.compute.internal""{name":"",time"":pid"":cruciblemsg2023-09-22T23:14:58.702260212Z4759""":,"",}level"
2557 "[0] c8868e06-7e14-4a37-b718-8f66383d6b07 (8778efe1-ed15-438a-a5de-27252f5bb5a3) New New New ds_transition to WaitActive:"40,{hostname""":msg""v":ip-10-150-1-55.us-west-2.compute.internal:"","0pid[2] Transition from New to WaitActive"":,4759",,""}v"
2558 :timename"":0{,":crucible""",msg""name:"":"2023-09-22T23:14:58.70228638Z""crucible",,"level"hostname:"30:""[1] 7fad8992-b20b-4cb6-990d-67be4c403b6f (e14ba783-7271-41eb-9b62-8aeb270fd61b) Active LiveRepairReady Active ds_transition to LiveRepairlevel"":,30"ip-10-150-1-55.us-west-2.compute.internal"v",:"0pid",:,4759""}time
2559 ":,name""time:"""crucible:"","2023-09-22T23:14:58.70231671Z"2023-09-22T23:14:58.702319093Zlevel",","":hostname":"hostname30":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internal"pid":,4759"pid"}:,4759"
2560 time}"
2561 :{"{"2023-09-22T23:14:58.702340767Z"msg",:"""hostname"msg":":"[0] Transition from New to WaitActive",ip-10-150-1-55.us-west-2.compute.internal"[2] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) Active Active WaitActive ds_transition to WaitQuorum,"""pid,"":v4759":}0
2562 ,"v"name"{:":"crucible"0,msg"",level:"":"30name":"[1] Transition from LiveRepairReady to LiveRepair"crucible",","vlevel""::030,"name":","crucible"time",:""level":302023-09-22T23:14:58.70238446Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2563 ,"time"{:",""2023-09-22T23:14:58.702396157Zmsg""time",:"":"hostname":"[2] Transition from WaitActive to WaitQuorum"2023-09-22T23:14:58.702392508Z",ip-10-150-1-55.us-west-2.compute.internal""v,"":,"0pid,hostname"""name:"4759:"}:"crucible
2564 "ip-10-150-1-55.us-west-2.compute.internal",","level":pid30":4759}
2565 {,"time":""msg":2023-09-22T23:14:58.702431939Z"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":[0] c8868e06-7e14-4a37-b718-8f66383d6b07 (8778efe1-ed15-438a-a5de-27252f5bb5a3) WaitActive New New ds_transition to WaitQuorum"4759,"}v":
2566 0,"name":"crucible"{,"level":30"msg":"[2] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) Active Active WaitQuorum ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.702469682Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2567 ,"time":"2023-09-22T23:14:58.702482781Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":[0] Transition from WaitActive to WaitQuorum"4759,"v":}0
2568 test live_repair::repair_test::test_live_repair_repair_write_push ... {,ok"
2569 msg"":"name":"crucible"[2] Transition from WaitQuorum to Active",","level":v":300,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.702534142Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2570 ,"time":"{"2023-09-22T23:14:58.702538227Zmsg":"","hostname":"[0] c8868e06-7e14-4a37-b718-8f66383d6b07 (8778efe1-ed15-438a-a5de-27252f5bb5a3) WaitQuorum New New ds_transition to Active","vip-10-150-1-55.us-west-2.compute.internal"":,0","pidname":""crucible":,4759"level}":
2571 30{"msg":"f40c9366-ba51-49e8-a7ac-2e8d70c8171b is now active with session: b7f69b5e-7fe1-4604-9c09-0d5b9217b341",,""timev":"0:","name":"2023-09-22T23:14:58.702575395Z"crucible",","hostname"level"::"30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2572 ,"time":"{2023-09-22T23:14:58.702597986Z","hostname":""ip-10-150-1-55.us-west-2.compute.internalmsg":"","pid":4759[0] Transition from WaitQuorum to Active"}
2573 ,"v":0,"{name":"crucible"",msg":""level":30[1] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.70262871Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:14:58.702637271Z"pid",":hostname":"4759ip-10-150-1-55.us-west-2.compute.internal"},"
2574 pid":4759}
2575 {{""msgmsg":"":"[1] Transition from Active to Faulted","v":0,"name":"crucible","level":30[1] c8868e06-7e14-4a37-b718-8f66383d6b07 (8778efe1-ed15-438a-a5de-27252f5bb5a3) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.70267502Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2576 {,"time":""msg":"2023-09-22T23:14:58.702682796Z","hostname":"test live_repair::repair_test::test_live_repair_send_io_write_below ... [1] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) Active Faulted Active ds_transition to LiveRepairReadyok",
2577 "ip-10-150-1-55.us-west-2.compute.internal"v",":pid"0:,"4759name":"crucible}"
2578 ,"level":30{"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.702727196Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2579 ,"time":"{2023-09-22T23:14:58.702740239Z",""hostname":"msg":"ip-10-150-1-55.us-west-2.compute.internal","[1] Transition from Faulted to LiveRepairReadypid"":,4759"v":}0
2580 ,"name":"crucible","{level":30"msg":"[1] c8868e06-7e14-4a37-b718-8f66383d6b07 (8778efe1-ed15-438a-a5de-27252f5bb5a3) Active WaitActive New ds_transition to WaitQuorum","v":0,",time":""name"2023-09-22T23:14:58.702776106Z":,""hostname":"crucible","ip-10-150-1-55.us-west-2.compute.internallevel"":,30"pid":4759}
2581 {"msg":","time":"2023-09-22T23:14:58.702797661Z"[1] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) Active LiveRepairReady Active ds_transition to LiveRepair,"","hostname":v"":0,"name":"ip-10-150-1-55.us-west-2.compute.internal"crucible",,""pid"level"::304759}
2582 {"msg":"[1] Transition from WaitActive to WaitQuorum",",v"":time":0","name":"2023-09-22T23:14:58.702828119Z"crucible",,""hostname"level":":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2583 {,"time"":msg"":"2023-09-22T23:14:58.702854089Z","[1] Transition from LiveRepairReady to LiveRepair"hostname":","v":0,"name"ip-10-150-1-55.us-west-2.compute.internal":,""pidcrucible"",":level":475930}
2584 ,"time":"2023-09-22T23:14:58.702881383Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2585 "msg"{:""msg":"[1] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) Active LiveRepair Active ds_transition to Faulted",[1] c8868e06-7e14-4a37-b718-8f66383d6b07 (8778efe1-ed15-438a-a5de-27252f5bb5a3) Active WaitQuorum New ds_transition to Active""v":,0","v"name":":crucible","0level":,"30name":"crucible","level":30,"time":"2023-09-22T23:14:58.702917482Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2586 {"msg":"[1] Transition from LiveRepair to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.702921335Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal",2023-09-22T23:14:58.702936752Z"","pid"hostname":":4759ip-10-150-1-55.us-west-2.compute.internal","pid":}4759
2587 }
2588 {{"msg":""msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"[1] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) Active Faulted Active ds_transition to LiveRepairReady"crucible,"",v"":level"0:,"30name":"crucible","level":30{"msg":"Crucible stats registered with UUID: 430e0668-1443-47e0-a582-1e8e56ebed58",",,v""time:"0:",""name2023-09-22T23:14:58.702978681Z"":",crucible""time",hostname""level:""::"30ip-10-150-1-55.us-west-2.compute.internal","pid"2023-09-22T23:14:58.702975869Z":4759,"}
2589 hostname":"{ip-10-150-1-55.us-west-2.compute.internal"","msgpid":"":4759,"[1] Transition from Faulted to LiveRepairReady"time"},:""v":
2590 2023-09-22T23:14:58.70300481Z0",",name"":"hostname":crucible""{,"level"ip-10-150-1-55.us-west-2.compute.internal:"30,"pid""msg":":4759}
2591 ,"[2] c8868e06-7e14-4a37-b718-8f66383d6b07 (8778efe1-ed15-438a-a5de-27252f5bb5a3) Active Active New ds_transition to WaitActivetime""{:","v":"2023-09-22T23:14:58.703033836Zmsg"",:""0hostname",:""Crucible 430e0668-1443-47e0-a582-1e8e56ebed58 has session id: 3ccf3c6d-266e-431a-8c1f-43b8c19cd637"name",ip-10-150-1-55.us-west-2.compute.internal""v,""pid:"0:,4759":name}"
2592 :""{crucible"crucible""msg,"":level"":,"30level":30[1] f40c9366-ba51-49e8-a7ac-2e8d70c8171b (b7f69b5e-7fe1-4604-9c09-0d5b9217b341) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.703072723Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal",",pid""2023-09-22T23:14:58.703074176Z"time:"4759:","}2023-09-22T23:14:58.70308182Z
2593 "hostname,"{":"hostname"":"msg":"ip-10-150-1-55.us-west-2.compute.internal",ip-10-150-1-55.us-west-2.compute.internal"",pid""pid[0] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) New New New ds_transition to WaitActive""::,4759"4759}v
2594 "}:{0
2595 ","msg"name:"":"crucible{",[1] Transition from LiveRepairReady to LiveRepair""level,"":v""30:msg":"0,"name":"[2] Transition from New to WaitActive"crucible",,""v":level,"":time30"0:,""name":"crucible"2023-09-22T23:14:58.703127235Z",",","hostnametime""::""level":302023-09-22T23:14:58.703137458Zip-10-150-1-55.us-west-2.compute.internal"",,""pid":hostname"4759:"}
2596 ip-10-150-1-55.us-west-2.compute.internal","pid":{4759}
2597 "msg":"{"[0] Transition from New to WaitActivemsg"":","v":0,"Write to Extent 0:2:9 under repairname"",:""v"crucible:,"0,"timename"":":"crucible","level"2023-09-22T23:14:58.703150139Z":40,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2598 ,""time,"":"level":30{2023-09-22T23:14:58.703176559Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2599 ,"[2] c8868e06-7e14-4a37-b718-8f66383d6b07 (8778efe1-ed15-438a-a5de-27252f5bb5a3) Active Active WaitActive ds_transition to WaitQuorumtime{"":"",msg2023-09-22T23:14:58.703190102Z"":",""v"hostnameWrite 1:0:9 past extent under repair 0"":",:"0vip-10-150-1-55.us-west-2.compute.internal"":,0",",namepid""::"4759"crucible}"
2600 ,"name":"{level":crucible"40","msg"level":":30[0] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) WaitActive New New ds_transition to WaitQuorum,"",time"":v"":0,"2023-09-22T23:14:58.703230046Zname"":","crucible"hostname",:""level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2601 ,",time""{:"time":""2023-09-22T23:14:58.703245288Z"msg":,""2023-09-22T23:14:58.703234702Z"hostname"Write 1:1:9 past extent under repair 0:"",,"ip-10-150-1-55.us-west-2.compute.internal""v,"":pid0"hostname":,4759":"}name
2602 ":"ip-10-150-1-55.us-west-2.compute.internal"crucible{",,""msg"level:"":"40pid":[0] Transition from WaitActive to WaitQuorum"4759,"v":0},"
2603 name,"":"time":crucible"","level"2023-09-22T23:14:58.703282454Z:"30{,"hostname":""msg"ip-10-150-1-55.us-west-2.compute.internal":,""pid":4759,"}
2604 time":[2] Transition from WaitActive to WaitQuorum"{","v""2023-09-22T23:14:58.703296893Zmsg"",:"":hostname"0:IO Write 1004 on eur 0 Added deps 1"",,"ip-10-150-1-55.us-west-2.compute.internal""v":,0",pid""name:"4759:"}name"crucible
2605 ":,{"""levelmsg""::40"crucible","level":30[0] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible,"","time":level"":302023-09-22T23:14:58.703335225Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2606 ,",time"":time":""2023-09-22T23:14:58.703346689Z"2023-09-22T23:14:58.703338449Z","hostname",:""hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internalpid"":,4759"pid"}:
2607 4759}{
2608 "msg":"[0] Transition from WaitQuorum to Active"{,"v":0,"name":""msg":"crucible","level":30[2] c8868e06-7e14-4a37-b718-8f66383d6b07 (8778efe1-ed15-438a-a5de-27252f5bb5a3) Active Active WaitQuorum ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.703394876Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2609 ,"time":"{2023-09-22T23:14:58.703404221Z"{",msg""":"msg":[1] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) Active New New ds_transition to WaitActive","v":"0,"name":"crucible","Crucible stats registered with UUID: 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9"level":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.703437291Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","pid":2023-09-22T23:14:58.703441887Z"4759,"}
2610 hostname":"{hostnameip-10-150-1-55.us-west-2.compute.internal"","pid":4759}
2611 {"msg":""msg":Crucible 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 has session id: e97f0585-b400-40dd-918c-c5075104b8e6"","v":0,"name":"crucible","level":ip-10-150-1-55.us-west-2.compute.internal30","pid":4759}
2612 :{""msg":"[1] Transition from New to WaitActive","time"[2] Transition from WaitQuorum to Active:"",,2023-09-22T23:14:58.703489126Z""v,"hostname":"ip-10-150-1-55.us-west-2.compute.internal"",:"0pid",:"4759name":"}
2613 {"msg":"crucible","level":30"[0] 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 (a14811af-cf7f-465a-acfb-10f5efafbe32) New New New ds_transition to WaitActive","vv""::0,0"name":,","crucible"time":,""level":302023-09-22T23:14:58.703526492Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",""pid"name"::","crucible"time,":"2023-09-22T23:14:58.703541225Z"","level"hostname:"30:"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2614 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.703558902Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,}"
2615 time{"msg":"[1] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.703609186Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}"
2616 :"{2023-09-22T23:14:58.70357726Z"","msg":"hostname":"[1] Transition from WaitActive to WaitQuorum"ip-10-150-1-55.us-west-2.compute.internal,"",v":"test live_repair::repair_test::test_live_repair_span_write_write ... pid"ok:4759
2617 0,"name":"crucible","level":30}
2618 {,"time":"47592023-09-22T23:14:58.703670629Z""msg":,}""
2619 [0] 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 (a14811af-cf7f-465a-acfb-10f5efafbe32) WaitActive New New ds_transition to WaitQuorum{"msg":"c8868e06-7e14-4a37-b718-8f66383d6b07 is now active with session: 8778efe1-ed15-438a-a5de-27252f5bb5a3","v":0,"name":"crucible","level":30","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.703724015Z","hostname":",ip-10-150-1-55.us-west-2.compute.internal"",time":"2023-09-22T23:14:58.703734906Z","hostname":""pid":4759ip-10-150-1-55.us-west-2.compute.internal","pid":hostname4759}"
2620 }{"msg":"[1] c8868e06-7e14-4a37-b718-8f66383d6b07 (8778efe1-ed15-438a-a5de-27252f5bb5a3) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30
2621 {,:""time"":ip-10-150-1-55.us-west-2.compute.internal""msg,""pid"::4759}
2622 {"msg":"[1] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":30"[0] Transition from WaitActive to WaitQuorum","v":0,",name"":time":2023-09-22T23:14:58.703797189Z""crucible"2023-09-22T23:14:58.703844345Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"}
2623 level":30{"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30",,""time":,""hostname"time":"2023-09-22T23:14:58.703874427Zip-10-150-1-55.us-west-2.compute.internal","pid":4759","}hostname"
2624 :"ip-10-150-1-55.us-west-2.compute.internal{","pid":4759"}msg"
2625 {"msg":":"[0] 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 (a14811af-cf7f-465a-acfb-10f5efafbe32) WaitQuorum New New ds_transition to Active2023-09-22T23:14:58.703883695Z"",",v""hostname::"[1] Transition from Active to Faulted","v":0,"name":"crucible","level":300,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.703997638Z"",,""hostname":"ip-10-150-1-55.us-west-2.compute.internal","time"pid:":2023-09-22T23:14:58.704005004Z","hostname":""ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",","pidpid":":47594759}
2626 }
2627 {{"msg":"{"msg[2] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) Active Active New ds_transition to WaitActive"":","v":0,"name":Crucible stats registered with UUID: a5309061-73a6-4f0b-bc10-fb7ff5b5432a""crucible",",v"":level"0:,"30name":"crucible","level":30"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,","time"time:"":"2023-09-22T23:14:58.704078113Z"2023-09-22T23:14:58.70408087Z",","hostname"hostname:":"ip-10-150-1-55.us-west-2.compute.internal","pid":"4759}
2628 ip-10-150-1-55.us-west-2.compute.internal",,"{""pid"":msg4759":}"
2629 time:Crucible a5309061-73a6-4f0b-bc10-fb7ff5b5432a has session id: 83187140-217c-405d-a29e-e92b0fb42bdc{"4759"",msg""v:"":}:[2] Transition from New to WaitActive
2630 "","{v":0,"name":"crucible","level":30"msg":"[1] c8868e06-7e14-4a37-b718-8f66383d6b07 (8778efe1-ed15-438a-a5de-27252f5bb5a3) Active Faulted Active ds_transition to LiveRepairReady","time",:""v":2023-09-22T23:14:58.704144548Z"0,",hostname":""name":"ip-10-150-1-55.us-west-2.compute.internal","cruciblepid":"4759,"}level"2023-09-22T23:14:58.704093084Z
2631 "{,""hostname":"msg":"ip-10-150-1-55.us-west-2.compute.internal"[2] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) Active Active WaitActive ds_transition to WaitQuorum",":,"pidv""::047590},
2632 30{",msg""name":":"crucible","level":30[1] 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 (a14811af-cf7f-465a-acfb-10f5efafbe32) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.704211504Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2633 {","msg"time:"":"[2] Transition from WaitActive to WaitQuorum2023-09-22T23:14:58.704221472Z"",""vname"",:""crucible"hostname",,"":time"level"":30ip-10-150-1-55.us-west-2.compute.internal","pid"::47590,"}name
2634 :","2023-09-22T23:14:58.704231398Z"time":"{,2023-09-22T23:14:58.704263518Z""",:hostname""":"cruciblehostname"":"ip-10-150-1-55.us-west-2.compute.internalmsg":"","pid":[1] Transition from New to WaitActive"4759,"v":},0","name"level":":crucible"30,"level":30
2635 ","time":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.704313034Z",","pid":hostname":4759"{}
2636 "ip-10-150-1-55.us-west-2.compute.internal",,"{pid":"4759"msg}"
2637 :"time":"{2023-09-22T23:14:58.704318962Z""msg":","[0] a5309061-73a6-4f0b-bc10-fb7ff5b5432a (16f3c8a3-5547-4c93-b43b-aa71fb92fbd0) New New New ds_transition to WaitActive"hostname":","v"msgip-10-150-1-55.us-west-2.compute.internal"[2] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) Active Active WaitQuorum ds_transition to Active"":,""[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30pid":4759,"}time"
2638 ,{:0","msg":name"":"crucible","level":30[1] 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 (a14811af-cf7f-465a-acfb-10f5efafbe32) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30"v":,"0time":,""name":"2023-09-22T23:14:58.704419396Z"crucible",,""level"hostname:":30"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2639 ,"time":":{,""2023-09-22T23:14:58.704427779Ztime""msg:"":"2023-09-22T23:14:58.704371708Z""[0] Transition from New to WaitActive,","hostname"hostname":":",ip-10-150-1-55.us-west-2.compute.internal","pid":4759"v":}0
2640 "{"ip-10-150-1-55.us-west-2.compute.internal"msg":","pid":[1] Transition from WaitActive to WaitQuorum"4759,,""}name"
2641 :"vcrucible","level":30{"msg":","time":"[1] c8868e06-7e14-4a37-b718-8f66383d6b07 (8778efe1-ed15-438a-a5de-27252f5bb5a3) Active LiveRepairReady Active ds_transition to LiveRepair"2023-09-22T23:14:58.704537842Z",",hostname"":"v":ip-10-150-1-55.us-west-2.compute.internal0",,""pidname":":"4759crucible"}"
2642 :{0,"name":""2023-09-22T23:14:58.704439767Zcruciblemsg"":"",","hostnamelevel":""[0] a5309061-73a6-4f0b-bc10-fb7ff5b5432a (16f3c8a3-5547-4c93-b43b-aa71fb92fbd0) WaitActive New New ds_transition to WaitQuorum","v":0:,"30name"ip-10-150-1-55.us-west-2.compute.internal:"","crucible"pid",:"4759level":30}
2643 {"msg":",,""time[2] Transition from WaitQuorum to Active"":,level,""""2023-09-22T23:14:58.70459067Z":,"30timehostname":""ip-10-150-1-55.us-west-2.compute.internal":,""pid":47592023-09-22T23:14:58.704587496Z"}v
2644 ",:"0{hostname":,"""nameip-10-150-1-55.us-west-2.compute.internal","pid"":,"time":"2023-09-22T23:14:58.70461357Z","hostname":"msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":[0] Transition from WaitActive to WaitQuorum"4759,"v":}0
2645 ,"name":"crucible","level":30{4759}
2646 :"crucible",","level"time"::"302023-09-22T23:14:58.704657697Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2647 ,"time":"{2023-09-22T23:14:58.704671648Z","hostname"":msg"":{"ip-10-150-1-55.us-west-2.compute.internal","pid":4759"}msg"
2648 [0] a5309061-73a6-4f0b-bc10-fb7ff5b5432a (16f3c8a3-5547-4c93-b43b-aa71fb92fbd0) WaitQuorum New New ds_transition to Active":",{"v":0,""msg"name:"":"[1] 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 (a14811af-cf7f-465a-acfb-10f5efafbe32) Active WaitQuorum New ds_transition to Active"crucible",",v":430e0668-1443-47e0-a582-1e8e56ebed58 is now active with session: 4b1819be-ee7b-4367-994d-70218dcb0427","v":0,"name":"crucible","level":30,"time":""2023-09-22T23:14:58.704709676Z"0msg",":""[1] Transition from LiveRepairReady to LiveRepair"level":30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.704731894Zname"",:""hostname"crucible:"","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2649 ,"time":"{2023-09-22T23:14:58.704738535Z"","hostname":","time":"2023-09-22T23:14:58.704753666Zip-10-150-1-55.us-west-2.compute.internal"",",pid""hostname"::"4759ip-10-150-1-55.us-west-2.compute.internal"},"
2650 pid":4759}
2651 {"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level"msg":":30[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.704796977Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2652 {",,"msg"time"hostname:"":":"2023-09-22T23:14:58.704803918Z"","ip-10-150-1-55.us-west-2.compute.internal"hostname":,""pid":4759[2] 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 (a14811af-cf7f-465a-acfb-10f5efafbe32) Active Active New ds_transition to WaitActiveip-10-150-1-55.us-west-2.compute.internal}"
2653 ,""pid":,"4759{v"}:
2654 "0msg",":{"name":"crucible"",msg"":level":[1] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) Active Active Active ds_transition to Faulted"30,"v":0,"name":"crucible","level":30","time":"2023-09-22T23:14:58.704851109Z","hostname":",[1] a5309061-73a6-4f0b-bc10-fb7ff5b5432a (16f3c8a3-5547-4c93-b43b-aa71fb92fbd0) Active New New ds_transition to WaitActive""time":"ip-10-150-1-55.us-west-2.compute.internal,2023-09-22T23:14:58.704854183Z""",,"pid":4759}
2655 v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.70488915Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2656 {"msg"{:"[2] Transition from New to WaitActive",""v"msg:":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":300,"name":"crucible","level",:"30time,"time":"2023-09-22T23:14:58.70494225Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2657 {"msg":"[2] 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 (a14811af-cf7f-465a-acfb-10f5efafbe32) Active Active WaitActive ds_transition to WaitQuorum","v":"0,"name":""crucible"hostname",:"level"::30"2023-09-22T23:14:58.704928826Z","hostname":""ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internalpid"":,"time4759":,""pid":4759}
2658 test live_repair::repair_test::test_live_repair_update ... 2023-09-22T23:14:58.70498661Z{","}hostname":"
2659 "ip-10-150-1-55.us-west-2.compute.internal"msg"ok:"
2660 [1] Transition from Active to Faulted","{pid",":v":47590","}name"
2661 {:"crucible","level"":msg":"30[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30msg":","time":"2023-09-22T23:14:58.705093074Z",",hostname"":"time":"ip-10-150-1-55.us-west-2.compute.internal[1] a5309061-73a6-4f0b-bc10-fb7ff5b5432a (16f3c8a3-5547-4c93-b43b-aa71fb92fbd0) Active WaitActive New ds_transition to WaitQuorum2023-09-22T23:14:58.705088542Z""",","pid":,4759"v":0,"name":"crucible","level":30}
2662 {,"time":""msg":"2023-09-22T23:14:58.70512194Z"hostname,"":"hostname":"[2] 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 (a14811af-cf7f-465a-acfb-10f5efafbe32) Active Active WaitQuorum ds_transition to Active"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",",pid":"4759pid",}"
2663 v":0,"name":"crucible","{level":30"msg":"[1] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) Active Faulted Active ds_transition to LiveRepairReady",",time":""2023-09-22T23:14:58.705172155Z"v",":hostname":"0,"ip-10-150-1-55.us-west-2.compute.internal"name":,"pid":4759}
26642023-09-22T23:14:58.705ZINFOcrucible: [2] Transition from WaitQuorum to Active
2665 {"msg":""crucible"5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 is now active with session: a14811af-cf7f-465a-acfb-10f5efafbe32":,,""vlevel""::04759,"30name":"}crucible"
2666 ,"level":30{"msg",":"[1] Transition from WaitActive to WaitQuorum"time":","v":2023-09-22T23:14:58.705255734Z"0,,""name":hostname"":crucible"","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2667 {"msg":","time"[1] 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 (a14811af-cf7f-465a-acfb-10f5efafbe32) Active Active Active ds_transition to Faulted":","v":,2023-09-22T23:14:58.705247973Z0","name":"crucible","level":30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2668 ,"time":""2023-09-22T23:14:58.705306393Z","time{hostname"":":""msgip-10-150-1-55.us-west-2.compute.internal"":","2023-09-22T23:14:58.705281157Z"pid":[1] Transition from Faulted to LiveRepairReady"4759,,}"
2669 {v":0,"name":"crucible","level":30,""msgtime""::""2023-09-22T23:14:58.705351624Z"[1] Transition from Active to Faulted",","vhostname""::"0,"name":"ip-10-150-1-55.us-west-2.compute.internalcrucible"",,""pid"level:":475930}
2670 "{hostname"":msg",:""time"":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.705375362Z",,""pid":hostname":[1] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) Active LiveRepairReady Active ds_transition to LiveRepair""4759,"vip-10-150-1-55.us-west-2.compute.internal"",}
2671 "pid":4759}
2672 :0,"name":{"crucible",""msg"level:"":30[1] 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 (a14811af-cf7f-465a-acfb-10f5efafbe32) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible",,""time"level:"":302023-09-22T23:14:58.705416667Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",",pid"":time"4759:"}
2673 2023-09-22T23:14:58.705431042Z{"{"msg":""msg":"[1] Transition from LiveRepairReady to LiveRepair","v":0,"name":Crucible stats registered with UUID: 65642e3b-b3b8-4cfd-95ff-31b56650ceaf""crucible",",v"":level":030,"name":"crucible","level":30,"hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:14:58.705469656Z"pid",:"4759hostname":"}
2674 {,""{time"msg"ip-10-150-1-55.us-west-2.compute.internal"":"msg",:[1] Transition from Faulted to LiveRepairReady":"2023-09-22T23:14:58.705474637Z"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":[1] a5309061-73a6-4f0b-bc10-fb7ff5b5432a (16f3c8a3-5547-4c93-b43b-aa71fb92fbd0) Active WaitQuorum New ds_transition to Active"4759}
2675 ,,""v"v"::0{0,","namename"":""msgcrucible"::"""pid"crucible":Crucible 65642e3b-b3b8-4cfd-95ff-31b56650ceaf has session id: e2d26d29-154e-43c6-a019-1f3dd806ff40"4759,",level}""v":0,":name30":"crucible","level"
2676 :30"{,"time":",""2023-09-22T23:14:58.705559601Z"msg",:""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2677 [0] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) Active LiveRepair Active ds_transition to Faulted"{,,""msglevel"":":time":"302023-09-22T23:14:58.705571822Z","hostname":"[1] 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 (a14811af-cf7f-465a-acfb-10f5efafbe32) Active LiveRepairReady Active ds_transition to LiveRepair","v":ip-10-150-1-55.us-west-2.compute.internal0","name":"crucible","level",":pid30":4759}
2678 ,"time":","2023-09-22T23:14:58.705596208Z"time{"":"msg":,"2023-09-22T23:14:58.705608467Z""v,"[0] 65642e3b-b3b8-4cfd-95ff-31b56650ceaf (eda3e3b2-1531-45ea-b904-d701ad430ba2) New New New ds_transition to WaitActive":"0,hostname,":""name":"crucible","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2679 {,""v""":msg0",:""name":[1] Transition from LiveRepairReady to LiveRepair"",crucible""v,":"0level,time""name":"crucible","level":30":30,"time":"2023-09-22T23:14:58.705679478Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2680 {:""2023-09-22T23:14:58.705641117Zmsg"":",","Write to Extent 0:0:9 under repair"time",":v""2023-09-22T23:14:58.705688902Z:"0,",name":"hostnamecrucible"hostname:""",":level"":40ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",","pid":pid":47594759}
2681 },
2682 {"msg":"[0] Transition from Active to Faulted{","v":0,"name":""crucible"msg",:""level":30[1] Transition from WaitQuorum to Active","v":0,"name":"crucible",""timelevel""::"302023-09-22T23:14:58.705729029Z",",time"":"hostname":"2023-09-22T23:14:58.705762234Z","ip-10-150-1-55.us-west-2.compute.internal"hostname",:""pid":4759}ip-10-150-1-55.us-west-2.compute.internal"
2683 {,"time":""2023-09-22T23:14:58.705774278Z","hostname"hostname"::""ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal","pid":,4759"pid":}
2684 4759"msg":"}
2685 {Write to Extent 0:0:9 under repair",""msg"v{","pid""::msg"47590:,}"
2686 name"":"crucible",[0] Transition from New to WaitActive"{level:""msg":"":40[2] a5309061-73a6-4f0b-bc10-fb7ff5b5432a (16f3c8a3-5547-4c93-b43b-aa71fb92fbd0) Active Active New ds_transition to WaitActive"[2] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) Faulted LiveRepair Active ds_transition to Faulted",",v"":v"0:,0,"name":",""timecrucible"name"":,":""level":2023-09-22T23:14:58.705850012Z"crucible"30,,""hostname":"level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2687 "{","msg":v",""[1] client skip 3 in process jobs because fault","v":0,"timename""::""crucible",","leveltime":"2023-09-22T23:14:58.705876514Z""::,"030hostname":,""name":"ip-10-150-1-55.us-west-2.compute.internalcrucible","level":30","pid",:"4759time":"}2023-09-22T23:14:58.705875857Z2023-09-22T23:14:58.705903968Z
2688 "",",hostname"":"hostname":"ip-10-150-1-55.us-west-2.compute.internal"{ip-10-150-1-55.us-west-2.compute.internal,"","pid"pid:":47594759,},""
2689 "time"":{""2023-09-22T23:14:58.705912846Z"msg:"":","downstairs"hostname"[2] Transition from Active to Faulted}"
2690 msg----------------------------------------------------------------
2691 Crucible gen:0 GIO:true work queues: Upstairs:6 downstairs:6
2692 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
2693 1 AckReady 1000 Write 1 Done Done Done false
2694 2 NotAcked 1001 Read 1 Done Done Done false
2695 { 3 NotAcked 1002 WriteU 1 Done Done Done false
2696 ":,"" 4 AckReady 1003 Write 1[2] Transition from New to WaitActive New Skip New false
2697 " 5 NotAcked 1004 Read 1 New Skip New false
2698 :,""v":0, 6 NotAcked 1005 WriteU 1 Newv Skip New false
2699 STATES DS:0 DS:1 DS:2 TOTAL
2700 " New 6 msg" 3 :"" 6 name" 15
2701 Sent 0 0 0 0
2702 Done 0 0 0 0
2703 Skipped 0 3 0 3
2704 " Error :0 0 , 0 0 [1] job 1000 middle: AckReady 0
2705 Last Flush: "","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.706075878Z","hostname":"0 0 0
2706 Downstairs last five completed:ip-10-150-1-55.us-west-2.compute.internal","pid":4759name"}:"
2707 crucible","level":30{"msg":":
2708 [0] 65642e3b-b3b8-4cfd-95ff-31b56650ceaf (eda3e3b2-1531-45ea-b904-d701ad430ba2) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible,"","time"level"::Upstairs last five completed: 30
2709 ","time":"2023-09-22T23:14:58.70614485Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2710 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30"crucible","level":2023-09-22T23:14:58.706120864Z"30,"hostname":,""ip-10-150-1-55.us-west-2.compute.internal","pid":4759time":"}
2711 2023-09-22T23:14:58.706193745Z{","hostname":",msg""":"ip-10-150-1-55.us-west-2.compute.internal"time,Write to Extent 0:0:9 under repair""pid,"""::4759v"ip-10-150-1-55.us-west-2.compute.internal:"}0,,""
2712 "pidname""::"4759crucible",{2023-09-22T23:14:58.706205336Z"","":"level",:downstairs40"""}msg
2713 "hostname":":{""ip-10-150-1-55.us-west-2.compute.internal"msg":[0] 65642e3b-b3b8-4cfd-95ff-31b56650ceaf (eda3e3b2-1531-45ea-b904-d701ad430ba2) WaitQuorum New New ds_transition to Active",",",[1] changed 3 jobs to fault skipped""",time""v:"":v":2023-09-22T23:14:58.706256455Z0"0pid,,,""name"hostname""::""crucible"name,ip-10-150-1-55.us-west-2.compute.internal""level,"":pid30":":""crucible"4759,":level"4759}:
2714 }30
2715 {,"time":""msg":"2023-09-22T23:14:58.706296125Z","Write to Extent 0:0:9 under repair"hostname{",:""v":,"0ip-10-150-1-55.us-west-2.compute.internal,time":""msgname"",:"""pid""crucible:"4759,",2023-09-22T23:14:58.706308232Z":"level"""::40","downstairshostname":""[2] a5309061-73a6-4f0b-bc10-fb7ff5b5432a (16f3c8a3-5547-4c93-b43b-aa71fb92fbd0) Active Active WaitActive ds_transition to WaitQuorum}ip-10-150-1-55.us-west-2.compute.internal"","
2716 ,",pid":"4759{v"time""msg:""::}2023-09-22T23:14:58.706337967Z""0
2717 ,","hostnamename"{:""[1] 5ef2dbd8-14fa-49c7-9429-be5eaaf14af9 (a14811af-cf7f-465a-acfb-10f5efafbe32) Active LiveRepair Active ds_transition to Faulted:"",""msg":ip-10-150-1-55.us-west-2.compute.internalv"",:"0pid,"":crucible""4759name"[0] Transition from WaitQuorum to Active",}:
2718 ""crucible{,"""msglevelv":0,"",:"":"level[1] client skip 3 in process jobs because fault"":3030,name":""crucible"v",":level":030,"name":","crucibletime"":,""level":302023-09-22T23:14:58.706399349Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.70640666Z",",pid"":,4759"hostname":"time}"
2719 :ip-10-150-1-55.us-west-2.compute.internal"{",2023-09-22T23:14:58.706410805Z""msg,"":,"hostname"":[1] Transition from LiveRepair to Faulted"""timepid"ip-10-150-1-55.us-west-2.compute.internal,"",v"":pid0",:"4759name"":,4759":"":crucible"":,downstairs""level}"
2720 :}"{30
2721 "2023-09-22T23:14:58.706399565Z"msg"{,:"""msg",[1] job 1000 middle: AckReady""time",:""v"::02023-09-22T23:14:58.706463033Z,"",hostname"name"""hostname:"":"crucible":",ip-10-150-1-55.us-west-2.compute.internal""level,"":pid30[1] 65642e3b-b3b8-4cfd-95ff-31b56650ceaf (eda3e3b2-1531-45ea-b904-d701ad430ba2) Active New New ds_transition to WaitActive"ip-10-150-1-55.us-west-2.compute.internal",":"v"4759:0,,}"
2722 time,""name"{"::""msg""pid"crucible"2023-09-22T23:14:58.706491382Z:"",,":level":Extent 0 Aborting repair""4759,hostname""v:"":30}0ip-10-150-1-55.us-west-2.compute.internal,"","namepid""::"4759crucible",,""":"level"downstairs:"40}
2723 
2724 ,","{time":"{"timemsg""::""2023-09-22T23:14:58.706530551Z"2023-09-22T23:14:58.706543062Z"",,"[1] notify = true for 1001"hostname,"""v:"":hostname"0ip-10-150-1-55.us-west-2.compute.internal,":,""namepid""::"4759"crucible}"
2725 ip-10-150-1-55.us-west-2.compute.internal",,"msg"pid"level":"::304759"}
2726 [2] Transition from WaitActive to WaitQuorum"{,""v"msg":":0[1] Transition from New to WaitActive",,",""vname"time"":0:,":""name":"crucible2023-09-22T23:14:58.706591383Z"crucible",,"""level"hostname",":30:"level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759,",":""time":"downstairs"2023-09-22T23:14:58.706622298Z"},"
2727 hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":{4759}
2728 ",msg{""":"time"msg":"[1] notify = true for 1002":","v":02023-09-22T23:14:58.706625361Z","name[1] 65642e3b-b3b8-4cfd-95ff-31b56650ceaf (eda3e3b2-1531-45ea-b904-d701ad430ba2) Active WaitActive New ds_transition to WaitQuorum",",":""hostname"crucible":,v":""0level","ip-10-150-1-55.us-west-2.compute.internalname""::30,""crucible"pid,"":level"4759:30}
2729 ,"time":"2023-09-22T23:14:58.706675778Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"":",""downstairstime":"msg"":2023-09-22T23:14:58.706682916Z"}"
2730 ,"hostname":"{[2] a5309061-73a6-4f0b-bc10-fb7ff5b5432a (16f3c8a3-5547-4c93-b43b-aa71fb92fbd0) Active Active WaitQuorum ds_transition to Active"",msg"":"v":[1] changed 3 jobs to fault skipped"ip-10-150-1-55.us-west-2.compute.internal,0"",v,""pid":0","name"name":":crucible":"4759,"crucible"}level"
2731 :,"30level":30{"msg":","time":"[1] Transition from WaitActive to WaitQuorum"2023-09-22T23:14:58.706742472Z",",hostname"":"v":0ip-10-150-1-55.us-west-2.compute.internal",,""pid":name":"4759crucible",",",":"time""downstairs:"level""}
2732 2023-09-22T23:14:58.706744429Z":30,"hostname":"{ip-10-150-1-55.us-west-2.compute.internal"",msg":""pid":4759}
2733 [1] 430e0668-1443-47e0-a582-1e8e56ebed58 (4b1819be-ee7b-4367-994d-70218dcb0427) Faulted LiveRepair Faulted ds_transition to Faulted","v":,0","time":name"{":"crucible"2023-09-22T23:14:58.706777199Z",""levelmsg":",":"30[2] Transition from WaitQuorum to Active"hostname":","v":0ip-10-150-1-55.us-west-2.compute.internal",",name":""pid",:crucible""4759,time":""}level2023-09-22T23:14:58.706809618Z"
2734 ,"":30hostname"{:""ip-10-150-1-55.us-west-2.compute.internalmsg"":","pid":4759}
2735 [1] 65642e3b-b3b8-4cfd-95ff-31b56650ceaf (eda3e3b2-1531-45ea-b904-d701ad430ba2) Active WaitQuorum New ds_transition to Active","v":0test live_repair::repair_test::test_repair_abort_basic ... {"okmsg
2736 ":",,"[1] Transition from LiveRepair to Faulted"time"":,name":""crucible"v",":"0level",2023-09-22T23:14:58.706833737Z"":name":30","crucible"hostname":","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2737 ,"time":"2023-09-22T23:14:58.706880196Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759,}"
2738 "msg"time":":{"2023-09-22T23:14:58.706885928Z"","msg":"hostname":a5309061-73a6-4f0b-bc10-fb7ff5b5432a is now active with session: 16f3c8a3-5547-4c93-b43b-aa71fb92fbd0""[1] Transition from WaitQuorum to Active",",ip-10-150-1-55.us-west-2.compute.internal"v",v":"0:","pid":0name"4759,":"name"crucible}:
2739 ""crucible,""{level":,"30msg":""Extent 0 Aborting repair"level",":v":300,"name":"crucible","level":40,"time":"2023-09-22T23:14:58.706943587Z","hostname":","time":"2023-09-22T23:14:58.70695209Z"ip-10-150-1-55.us-west-2.compute.internal",",hostname"":",pid"ip-10-150-1-55.us-west-2.compute.internal":","4759pid":time4759"}:}
2740 
2741 {""msg":"2023-09-22T23:14:58.706949964Z"{Abort repair on extent 0: All downstairs are Faulted",","v":0hostname",""name":"msg"crucible":":","level":ip-10-150-1-55.us-west-2.compute.internal"50,"pid":4759[2] 65642e3b-b3b8-4cfd-95ff-31b56650ceaf (eda3e3b2-1531-45ea-b904-d701ad430ba2) Active Active New ds_transition to WaitActive"},"
2742 v{"msg":"[1] a5309061-73a6-4f0b-bc10-fb7ff5b5432a (16f3c8a3-5547-4c93-b43b-aa71fb92fbd0) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30":0,"name":"crucible","level",:"30time":"2023-09-22T23:14:58.707039436Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"}
2743 time":"2023-09-22T23:14:58.707003904Z","hostname{,":""time":""msg"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.707059504Z",":","pid":hostname4759[1] Transition from Active to Faulted"","}:"v":
2744 0ip-10-150-1-55.us-west-2.compute.internal",",name"":"pid"crucible:"4759,"level":}30
2745 {"msg":"[2] Transition from New to WaitActive","v":0,"name":,""crucible"time",:""level":2023-09-22T23:14:58.707106641Z"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2746 ,{"time":""2023-09-22T23:14:58.707130206Z"msg":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759[1] a5309061-73a6-4f0b-bc10-fb7ff5b5432a (16f3c8a3-5547-4c93-b43b-aa71fb92fbd0) Active Faulted Active ds_transition to LiveRepairReady"}
2747 ,"v":0,{"name":""crucible"msg",":level":"30[2] 65642e3b-b3b8-4cfd-95ff-31b56650ceaf (eda3e3b2-1531-45ea-b904-d701ad430ba2) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.707168857Z","hostname":","ip-10-150-1-55.us-west-2.compute.internaltime"",":pid"":47592023-09-22T23:14:58.707180231Z"},
2748 "hostname":"ip-10-150-1-55.us-west-2.compute.internal","{pid":4759}
2749 "msg":"{[1] Transition from Faulted to LiveRepairReady","v":"0msg,""name"::""crucible","[2] Transition from WaitActive to WaitQuorum"level":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.70722831Z",","time"hostname:":""2023-09-22T23:14:58.707233459Zip-10-150-1-55.us-west-2.compute.internal"",","pid"hostname:"4759:"}
2750 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}{
2751 "msg":"{"msg":"[1] a5309061-73a6-4f0b-bc10-fb7ff5b5432a (16f3c8a3-5547-4c93-b43b-aa71fb92fbd0) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"[2] 65642e3b-b3b8-4cfd-95ff-31b56650ceaf (eda3e3b2-1531-45ea-b904-d701ad430ba2) Active Active WaitQuorum ds_transition to Active"crucible",",v":"0level",":name":30"{crucible","level":30"msg":"Crucible stats registered with UUID: 9cf1cfee-1bb8-4140-8e2f-38535cb4be54","v":0,"name":"crucible",",level":",time"test live_repair::repair_test::test_repair_abort_all_failed_reserved_jobs ... 30timeok
2752 ":"":"2023-09-22T23:14:58.707293995Z"2023-09-22T23:14:58.707297331Z",","hostname":hostname"",":"time":ip-10-150-1-55.us-west-2.compute.internal""ip-10-150-1-55.us-west-2.compute.internal",,""2023-09-22T23:14:58.707316526Z"pid"pid":,4759":hostname"4759:"}}
2753 ip-10-150-1-55.us-west-2.compute.internal"
2754 {,"pid":4759{"}msg
2755 "":msg"":"{[1] Transition from LiveRepairReady to LiveRepair"[2] Transition from WaitQuorum to Active"",msg,":"""v":v"0:Crucible 9cf1cfee-1bb8-4140-8e2f-38535cb4be54 has session id: b5809739-3766-4449-8296-1dc8c62282b90,"",",name"":"v":crucible0name":","","name":"levelcrucible"",":crucible"level30",":level":3030,"time":"2023-09-22T23:14:58.70741212Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal",,2023-09-22T23:14:58.707415484Z"""pid"time",":hostname4759:""}:2023-09-22T23:14:58.707415882Z""
2756 ,ip-10-150-1-55.us-west-2.compute.internal"","hostname{"pid"::4759""msg"}:ip-10-150-1-55.us-west-2.compute.internal"
2757 ,""pid":4759}{
2758 "65642e3b-b3b8-4cfd-95ff-31b56650ceaf is now active with session: eda3e3b2-1531-45ea-b904-d701ad430ba2"msg":","v":0{,"name":"crucible",[0] 9cf1cfee-1bb8-4140-8e2f-38535cb4be54 (1c47f668-99d2-4cb8-b990-8ee01032ed06) New New New ds_transition to WaitActive"""levelmsg"","::"v":30Write to Extent 0:0:9 under repair"0,,""v"name"::"0crucible",",name":""level"crucible":,"30level":40,"time":"2023-09-22T23:14:58.707491616Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2759 ,"time":"2023-09-22T23:14:58.707505987Z","{,time":"""hostname2023-09-22T23:14:58.707507135Z"msg"","::""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid"ip-10-150-1-55.us-west-2.compute.internal":,"4759pid":[1] 65642e3b-b3b8-4cfd-95ff-31b56650ceaf (eda3e3b2-1531-45ea-b904-d701ad430ba2) Active Active Active ds_transition to Faulted4759"}
2760 ,}"
2761 {v":0,"name":""crucible"msg{",":"level":"30msg":"[0] Transition from New to WaitActive","Write to Extent 0:0:9 under repair"v":,"0v,":"0name",:""name":crucible""crucible",","levellevel"":,:3040"time":"2023-09-22T23:14:58.707570908Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2762 ,"time":","{2023-09-22T23:14:58.707591418Z"time":,"""hostname"msg":":"2023-09-22T23:14:58.707592067Z"ip-10-150-1-55.us-west-2.compute.internal",[1] Transition from Active to Faulted",",pid":"4759"v":hostname0},"
2763 :""name":"crucible"{ip-10-150-1-55.us-west-2.compute.internal",,"""level"msg":"pid"::475930}
2764 [0] 9cf1cfee-1bb8-4140-8e2f-38535cb4be54 (1c47f668-99d2-4cb8-b990-8ee01032ed06) WaitActive New New ds_transition to WaitQuorum","v":0,"name":{"crucible","level":"30msg":","[1] client skip 3 in process jobs because fault"time":,""v":02023-09-22T23:14:58.707648889Z","name",":"hostname":crucible"","level":30ip-10-150-1-55.us-west-2.compute.internal","pid,"":time"4759:"}
2765 2023-09-22T23:14:58.707663103Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal",""pid"msg"::4759","}time
2766 ":"[1] 65642e3b-b3b8-4cfd-95ff-31b56650ceaf (eda3e3b2-1531-45ea-b904-d701ad430ba2) Active Faulted Active ds_transition to LiveRepairReady",2023-09-22T23:14:58.707677823Z""{,v":"0"hostname":,""name":"msg"ip-10-150-1-55.us-west-2.compute.internalcrucible:"","","pid[0] Transition from WaitActive to WaitQuorum"level"",::30"4759v":0,",":""downstairs"name":}"
2767 crucible","level":30,"time{":"2023-09-22T23:14:58.707729835Z"","msg"hostname":":"[1] job 1000 middle: AckReady"ip-10-150-1-55.us-west-2.compute.internal",,"","vtime""pid"::0:,""4759name":"2023-09-22T23:14:58.707745732Z}crucible"
2768 ",",level"":hostname"{:30""msg"ip-10-150-1-55.us-west-2.compute.internal":","pid":4759[1] Transition from Faulted to LiveRepairReady"}
2769 ,"v":0,"{name":"crucible"","msg"level":":,30"time":"2023-09-22T23:14:58.707782816Z"[0] 9cf1cfee-1bb8-4140-8e2f-38535cb4be54 (1c47f668-99d2-4cb8-b990-8ee01032ed06) WaitQuorum New New ds_transition to Active",,""v":hostname0",":"name":"crucible","ip-10-150-1-55.us-west-2.compute.internallevel"":,"30pid":,4759"time":,""":"downstairs"2023-09-22T23:14:58.707803352Z"}
2770 ,"hostname":","ip-10-150-1-55.us-west-2.compute.internaltime"{,""pid":"":msg"2023-09-22T23:14:58.707818875Z"4759:,""hostname"}:[1] changed 3 jobs to fault skipped"
2771 ","v":0ip-10-150-1-55.us-west-2.compute.internal,"{",name"":pid":"4759msg":""crucible"},
2772 [1] 65642e3b-b3b8-4cfd-95ff-31b56650ceaf (eda3e3b2-1531-45ea-b904-d701ad430ba2) Active LiveRepairReady Active ds_transition to LiveRepair""level",":{v"30:0","msgname":"":"crucible","level":[0] Transition from WaitQuorum to Active30","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.707877398Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"time",:""pid":47592023-09-22T23:14:58.70788547Z",,","time"hostname":":""":2023-09-22T23:14:58.707892113Z""ip-10-150-1-55.us-west-2.compute.internal","downstairshostname",:"""}pid"ip-10-150-1-55.us-west-2.compute.internal":
2773 4759,"pid":}4759
2774 {}
2775 {"msg":""{msg":""[1] Transition from LiveRepairReady to LiveRepair"[1] a5309061-73a6-4f0b-bc10-fb7ff5b5432a (16f3c8a3-5547-4c93-b43b-aa71fb92fbd0) Active LiveRepair Active ds_transition to Faulted"msg",:"","v"v"::00,,"[1] 9cf1cfee-1bb8-4140-8e2f-38535cb4be54 (1c47f668-99d2-4cb8-b990-8ee01032ed06) Active New New ds_transition to WaitActive""name":,"name"vcrucible"":""crucible",",:"0level"level":,30":name"30:"crucible","level":30,"time":"2023-09-22T23:14:58.707980641Z",","time"hostname",":"time":":"ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:14:58.707985939Z""2023-09-22T23:14:58.707982707Z,","pid"":,4759hostname":"}"
2776 hostname"ip-10-150-1-55.us-west-2.compute.internal":","{pid":ip-10-150-1-55.us-west-2.compute.internal"4759,""{msg":"pid"}[1] 0 final dependency list []""msg,"v":0,"name":"crucible","level"":":Crucible stats registered with UUID: 3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf"30,"v":0,"name":"crucible","level":30:4759}
2777 ,,"
2778 time{"msg":"[1] Transition from LiveRepair to Faulted","v":0,"name":"crucible","level":30":"2023-09-22T23:14:58.708053678Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2779 {{"msg":"Crucible 3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf has session id: 39db2329-7823-4e21-9a0a-6f2899f6b59a","v":0,"name":"crucible","level":30"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible",,","time"level:"":""time30"time":"2023-09-22T23:14:58.708047756Z":","2023-09-22T23:14:58.708166404Z"hostname":,""hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internalpid"":,"pid4759":47592023-09-22T23:14:58.708100201Z}"
2780 ,{"msg":"[0] 3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf (00bc7fd9-cb37-40a9-bc42-8026d7e44a56) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30"time":"2023-09-22T23:14:58.708191306Z",,""hostname":"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid"ip-10-150-1-55.us-west-2.compute.internal":,"4759pid":,"4759time":"}
2781 2023-09-22T23:14:58.708245409Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4759}
2782 }
2783 {{""msg":"msg"Extent 0 Aborting repair":","v":0,"name":"crucible","level":40[1] 9cf1cfee-1bb8-4140-8e2f-38535cb4be54 (1c47f668-99d2-4cb8-b990-8ee01032ed06) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":""2023-09-22T23:14:58.708302662Zmsg"":","hostname":"[0] Transition from New to WaitActive","v":ip-10-150-1-55.us-west-2.compute.internal"0,,""pid",name","time":"":"2023-09-22T23:14:58.70831255Z"downstairs","hostname"}:
2784 "ip-10-150-1-55.us-west-2.compute.internal","{pid":4759"}msg
2785 ":"":"crucible","[1] 0 final dependency list []"level":{,30"v":0,"name":""msg"crucible":","level":30[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level"::30,4759"time":"}
2786 ,2023-09-22T23:14:58.708359747Z""time",:""hostname"{2023-09-22T23:14:58.708366846Z:"",""hostnameip-10-150-1-55.us-west-2.compute.internal":"","pid":4759}
2787 {"msg":"[0] 3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf (00bc7fd9-cb37-40a9-bc42-8026d7e44a56) WaitActive New New ds_transition to WaitQuorum","v":0,",name"":"time"crucible:"","level":2023-09-22T23:14:58.708374394Z"30ip-10-150-1-55.us-west-2.compute.internal",,""hostname":"pid":4759,ip-10-150-1-55.us-west-2.compute.internal"",":""downstairs","}time":"2023-09-22T23:14:58.708417927Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2788 msg":"{Extent 0 Create and send noop jobs",""v":msg0":"[0] Transition from WaitActive to WaitQuorum","v":0,"
2789 name":"crucible,{"","namelevel"":":crucible","level":40pid":4759}
2790 ,"time":"2023-09-22T23:14:58.708465961Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","{pid":4759}
2791 30"msg":"{"msg":"[1] 9cf1cfee-1bb8-4140-8e2f-38535cb4be54 (1c47f668-99d2-4cb8-b990-8ee01032ed06) Active WaitQuorum New ds_transition to Active"Abort repair on extent 0 starting with id 1003 deps:[JobId(1002), JobId(1001), JobId(1000)]",",v":"0v",:"0,,"name":"crucible","level":30"msg":"[1] 0 final dependency list []","v":0,"name":"crucible","level":30"time":","2023-09-22T23:14:58.70849167Z"time":","hostname":2023-09-22T23:14:58.708516368Z"",ip-10-150-1-55.us-west-2.compute.internal"name":"crucible","time"level:"":402023-09-22T23:14:58.708528483Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"":"downstairs"},"
2792 time":"2023-09-22T23:14:58.708547976Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid"",,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2793 "{pid":4759}
2794 {"msg":":4759}
2795 [0] 3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf (00bc7fd9-cb37-40a9-bc42-8026d7e44a56) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level"":30msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.708628077Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2796 ,{"time":""msg"2023-09-22T23:14:58.708638978Z":","hostname"[0] Transition from WaitQuorum to Active":","v":0,"ip-10-150-1-55.us-west-2.compute.internal"name",":"pid":crucible4759","level"}:
2797 30{"msg":"[2] 9cf1cfee-1bb8-4140-8e2f-38535cb4be54 (1c47f668-99d2-4cb8-b990-8ee01032ed06) Active Active New ds_transition to WaitActive","v":,0","time"name":":"crucible","2023-09-22T23:14:58.708688174Zlevel":"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2798 {"msg":","time":"2023-09-22T23:14:58.708712969Z","hostname"[1] 3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf (00bc7fd9-cb37-40a9-bc42-8026d7e44a56) Active New New ds_transition to WaitActive:"","v":ip-10-150-1-55.us-west-2.compute.internal"0,","pid"name"::"4759crucible","}level
2799 ":30{"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.708752662Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2800 ,"time":"{2023-09-22T23:14:58.708766001Z",""hostnamemsg":"":"[1] Transition from New to WaitActive","ip-10-150-1-55.us-west-2.compute.internalv":0,"name":"crucible","level":30","pid":4759}
2801 ,"time":"2023-09-22T23:14:58.70880214Z","hostname{":"ip-10-150-1-55.us-west-2.compute.internal",""msg"pid"::test live_repair::repair_test::test_repair_dep_cleanup_done ... 4759ok
2802 }
2803 test live_repair::repair_test::test_repair_abort_reserved_jobs ... ok
2804 {"msg":"[1] 3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf (00bc7fd9-cb37-40a9-bc42-8026d7e44a56) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible"","level":30[2] 9cf1cfee-1bb8-4140-8e2f-38535cb4be54 (1c47f668-99d2-4cb8-b990-8ee01032ed06) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.708888933Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2805 {"msg":"[1] Transition from WaitActive to WaitQuorum,""time",:""v":0,2023-09-22T23:14:58.70890282Z""name":,""crucible"hostname","level":30,"time":"2023-09-22T23:14:58.7089672Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2806 {"msg":"[1] 3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf (00bc7fd9-cb37-40a9-bc42-8026d7e44a56) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":30:"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2807 ,"time":"2023-09-22T23:14:58.709042813Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":"4759msg":"}
2808 {"msg":"[1] Transition from WaitQuorum to Active","v":0[2] Transition from WaitActive to WaitQuorum,""name":"crucible,"","v"level":30,"time":"2023-09-22T23:14:58.709124206Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
28092023-09-22T23:14:58.709ZINFOcrucible:: [2] 3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf (00bc7fd9-cb37-40a9-bc42-8026d7e44a56) Active Active New ds_transition to WaitActive 0level = 30
2810 {",msg"":"time":"[2] Transition from New to WaitActive"2023-09-22T23:14:58.709177679Z","v":,0","hostname"name:"":"crucible",ip-10-150-1-55.us-west-2.compute.internal"","level"pid":30:4759},"
2811 time":"2023-09-22T23:14:58.709221182Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759"}
2812 {"msg":"msg":"[2] 3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf (00bc7fd9-cb37-40a9-bc42-8026d7e44a56) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name[2] 9cf1cfee-1bb8-4140-8e2f-38535cb4be54 (1c47f668-99d2-4cb8-b990-8ee01032ed06) Active Active WaitQuorum ds_transition to Active"":"crucible",,""v"level:":030,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.709281541Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,}{
2813 "msg":"Crucible stats registered with UUID: 8ee74e9c-4380-4bf2-913b-9825b6e2923a{",""vmsg":"{[2] Transition from WaitActive to WaitQuorum","v":0","msg":name"":"crucible"Crucible stats registered with UUID: 4dc045ec-5a1e-410e-8896-79fc1c563313","level",:30"v":0,""name":"time"crucible:"","level":2023-09-22T23:14:58.709286218Z"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal",,""pid":time":"47592023-09-22T23:14:58.709345001Z"}
2814 ,,""hostname"time"::""2023-09-22T23:14:58.709354693Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"""pid:"0,"name":"crucible",:"4759level":}30
2815 ,"pid":4759{}
2816 ,""msg":"time":"{2023-09-22T23:14:58.709394578Z","hostname":""[2] 3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf (00bc7fd9-cb37-40a9-bc42-8026d7e44a56) Active Active WaitQuorum ds_transition to Activemsg""ip-10-150-1-55.us-west-2.compute.internal",,"v":0,"name":"crucible",""pid"level:":4759{30}
2817 "msg":"{[2] Transition from WaitQuorum to Active","v":0",msg"":name"":","crucible"time":,""Crucible 8ee74e9c-4380-4bf2-913b-9825b6e2923a has session id: 043467c1-af50-49df-8a85-89945b943bab"level2023-09-22T23:14:58.709438696Z,"","hostname":"vip-10-150-1-55.us-west-2.compute.internal":","pid":4759Crucible 4dc045ec-5a1e-410e-8896-79fc1c563313 has session id: edb2d2fc-df76-4605-ab5a-337da0dc2f65"}
2818 {":0,"name":"crucible"""msg,""::level"30":30[2] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":","time":"2023-09-22T23:14:58.709498714Z","2023-09-22T23:14:58.709499848Z","timehostname""::""2023-09-22T23:14:58.709506438Z"ip-10-150-1-55.us-west-2.compute.internal",",,pid"":hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid"4759:4759},}"
2819 hostname":"ip-10-150-1-55.us-west-2.compute.internal{","pid":"4759msg":""}v
2820 "3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf is now active with session: 00bc7fd9-cb37-40a9-bc42-8026d7e44a56:"0,","v"name"::"crucible"0,,""level"name"
2821 :"crucible","{level":30{"msg":""msg":"9cf1cfee-1bb8-4140-8e2f-38535cb4be54 is now active with session: 1c47f668-99d2-4cb8-b990-8ee01032ed06","v":0,"name":"crucible"[0] 8ee74e9c-4380-4bf2-913b-9825b6e2923a (7c4ead5b-3477-4337-95df-a84bbad26641) New New New ds_transition to WaitActive","level",":v"30,:"0time,":"2023-09-22T23:14:58.709590871Z","hostname":":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2822 {"msg":"","name"time"::""[1] 3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf (00bc7fd9-cb37-40a9-bc42-8026d7e44a56) Active Active Active ds_transition to Faulted"2023-09-22T23:14:58.709623599Z",","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,}"
2823 time":"2023-09-22T23:14:58.709609868Z","hostname{":"ip-10-150-1-55.us-west-2.compute.internal"","msgpid":"":4759}
2824 [0] 4dc045ec-5a1e-410e-8896-79fc1c563313 (67ef3947-ce9c-400e-a753-67511f30592a) New New New ds_transition to WaitActive","v":0,"name":"{crucible","level":30"msg":"v":0[1] 9cf1cfee-1bb8-4140-8e2f-38535cb4be54 (1c47f668-99d2-4cb8-b990-8ee01032ed06) Active Active Active ds_transition to Faulted",","name"v":","time":"crucible","2023-09-22T23:14:58.709689039Z"level":,"30cruciblehostname":"","level"ip-10-150-1-55.us-west-2.compute.internal":,"30pid"::47590}
2825 ,"name":"crucible"{,"level":,"30time":""msg":"2023-09-22T23:14:58.709711481Z"[0] Transition from New to WaitActive",","hostname":,""time":"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:14:58.709718829Z"pid":,4759,}
2826 {"hostname":"ip-10-150-1-55.us-west-2.compute.internal"","time"pid:"":2023-09-22T23:14:58.709730037Z"4759,"}hostname"
2827 :"ip-10-150-1-55.us-west-2.compute.internal","pid":4759{}
2828 ""msgmsg""::"{"[0] Transition from New to WaitActive","v"":msg"0:","[1] Transition from Active to Faulted"[1] Transition from Active to Faulted,"vv""::"00,,,""v":name0"":name":"crucible","level":"30crucible","level":name30":"crucible",,""name"level":":crucible"30,"level":30,"time":","time"2023-09-22T23:14:58.709825458Z","time":","hostname":2023-09-22T23:14:58.709839856Z"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal4759,""time":"}
2829 :"2023-09-22T23:14:58.709829312Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2830 {"msg":"[0] 4dc045ec-5a1e-410e-8896-79fc1c563313 (67ef3947-ce9c-400e-a753-67511f30592a) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30{"msg":","time":,"[1] 3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf (00bc7fd9-cb37-40a9-bc42-8026d7e44a56) Active Faulted Active ds_transition to LiveRepairReady2023-09-22T23:14:58.709893684Z","hostname":""ip-10-150-1-55.us-west-2.compute.internal",","pid"v:":47590","pidname"":}:"
2831 crucible{","level":302023-09-22T23:14:58.709838544Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2832 ,"time":"2023-09-22T23:14:58.709941421Z"",msg"":hostname"":"4759[0] Transition from WaitActive to WaitQuorum"ip-10-150-1-55.us-west-2.compute.internal",,"}v
2833 ":"0,"name"{:"crucible"pid","":msglevel4759"}
2834 {":30:"[1] 9cf1cfee-1bb8-4140-8e2f-38535cb4be54 (1c47f668-99d2-4cb8-b990-8ee01032ed06) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.710003207Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid",:"4759time":"}
2835 2023-09-22T23:14:58.710015662Z"",msg":{""msg":"[0] 4dc045ec-5a1e-410e-8896-79fc1c563313 (67ef3947-ce9c-400e-a753-67511f30592a) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level":30{"msg"[1] Transition from Faulted to LiveRepairReady:"","time,"[0] 8ee74e9c-4380-4bf2-913b-9825b6e2923a (7c4ead5b-3477-4337-95df-a84bbad26641) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759v"}
2836 :0,"name":"{crucible","level":"30msg":","time""[1] Transition from Faulted to LiveRepairReady":",:""2023-09-22T23:14:58.710049127Z"v":02023-09-22T23:14:58.710072115Z,,""name"hostname:"":"crucible"",","levelip-10-150-1-55.us-west-2.compute.internalhostname,"",:"""pid":4759}time"
2837 :"{:30"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.710120838Z"","hostname":",2023-09-22T23:14:58.710091083Z"ip-10-150-1-55.us-west-2.compute.internaltime"",:""pid":"2023-09-22T23:14:58.710129478Z4759",}",hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2838 ip-10-150-1-55.us-west-2.compute.internal{""msg":,""pid":4759[1] 3f90871a-d6d4-40b8-9b9f-b0ce0b47ecdf (00bc7fd9-cb37-40a9-bc42-8026d7e44a56) Active LiveRepairReady Active ds_transition to LiveRepair"},
2839 "v":0,"name":"crucible","level":30"{hostname":"ip-10-150-1-55.us-west-2.compute.internal",""msg"pid"::"4759}
2840 ,"{"msg":"[1] 4dc045ec-5a1e-410e-8896-79fc1c563313 (67ef3947-ce9c-400e-a753-67511f30592a) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30time":"
2841 2023-09-22T23:14:58.710174595Z,""time",":{"hostname":"2023-09-22T23:14:58.710204778Z""msg",:""ip-10-150-1-55.us-west-2.compute.internal"hostname",":pid":4759}
2842 {""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid[1] Transition from LiveRepairReady to LiveRepair"":,"4759v":0,"}name"
2843 :"crucible","level":30{"msg":"[0] Transition from WaitActive to WaitQuorum[1] Transition from New to WaitActive"","v":0,"name",:"",v"crucible"",time":"2023-09-22T23:14:58.710281077Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2844 {[1] 9cf1cfee-1bb8-4140-8e2f-38535cb4be54 (1c47f668-99d2-4cb8-b990-8ee01032ed06) Active LiveRepairReady Active ds_transition to LiveRepair""msg":"Write to Extent 0:1:9 under repair",,""v":v"0:,0","name":"namecrucible"""level":30,"level":40::0"crucible",",,"","nametime":"crucible","level":30level":30":"2023-09-22T23:14:58.710326524Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:14:58.710349871Z",","time"hostname:":""pid":4759ip-10-150-1-55.us-west-2.compute.internal}
2845 time{"msg":"[1] 1004 final dependency list [JobId(1001), JobId(1002), JobId(1003)]","v":0,"name":"crucible",""level":30:"2023-09-22T23:14:58.710321781Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,"4759"time":"},
2846 "2023-09-22T23:14:58.710408944Zpid":4759}
2847 {","hostname":"{"msg""ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.710346056Z,msg":""[1] Transition from LiveRepairReady to LiveRepair",","v":hostname"0:","name":"crucibleip-10-150-1-55.us-west-2.compute.internal"",",pid"":level":475930}"
2848 pid":4759,"":"downstairs"}{
2849 "msg":":[0] 8ee74e9c-4380-4bf2-913b-9825b6e2923a (7c4ead5b-3477-4337-95df-a84bbad26641) WaitQuorum New New ds_transition to Active"","v":0,"name":"crucible","[1] 4dc045ec-5a1e-410e-8896-79fc1c563313 (67ef3947-ce9c-400e-a753-67511f30592a) Active WaitActive New ds_transition to WaitQuorumlevel":"30,"v":{0,"name":""crucible"msg,""level:"":30[1] 1005 final dependency list [JobId(1004), JobId(1001)]","v":0,"name":"crucible","level":30,"time":",2023-09-22T23:14:58.710454326Z"",time",,""hostnametime":"2023-09-22T23:14:58.710502192Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"":"downstairs"}
2850 ":"":"2023-09-22T23:14:58.710488939Z"ip-10-150-1-55.us-west-2.compute.internal",","hostname":"pid":4759ip-10-150-1-55.us-west-2.compute.internal"}
2851 ,{"msg":"Write to Extent 0:1:9 under repair","v":0,"name":"crucible","level":40,"time":"2023-09-22T23:14:58.710570816Z","hostname":""pid":ip-10-150-1-55.us-west-2.compute.internal",4759"pid":4759}}
2852 
2853 {{time"":"msg":""2023-09-22T23:14:58.710497958ZWrite to Extent 0:1:9 under repair""msg,,""v"":hostname0",:""name":":crucibleip-10-150-1-55.us-west-2.compute.internal"","","levelpid""::40[0] Transition from WaitQuorum to Active"4759,"}v"
2854 :0,"name"{,":"time""crucible":msg"":,"2023-09-22T23:14:58.71062354Z"",level""[1] Transition from WaitActive to WaitQuorum":hostname,"":"v":300ip-10-150-1-55.us-west-2.compute.internal",","name"pid:"":4759crucible"},
2855 "level":30{"msg":"Write to Extent 0:2:9 under repair","v":0,"name":"crucible","level":40,"time":",2023-09-22T23:14:58.710652745Z"",,""time"timehostname""::"":"2023-09-22T23:14:58.710661037Z"ip-10-150-1-55.us-west-2.compute.internal,"","hostname"pid:"":2023-09-22T23:14:58.710647197Z"4759ip-10-150-1-55.us-west-2.compute.internal","}pid
2856 ":,"{4759hostname":}"
2857 "ip-10-150-1-55.us-west-2.compute.internal"{msg,"":msg"":""pid":4759Write 1:0:9 past extent under repair 0","}v[1] 4dc045ec-5a1e-410e-8896-79fc1c563313 (67ef3947-ce9c-400e-a753-67511f30592a) Active WaitQuorum New ds_transition to Active"":,0",v""name:"0:,""
2858 namecrucible"":","crucible"level",:"40level":30{"msg":",,""time"time:"":"[1] 8ee74e9c-4380-4bf2-913b-9825b6e2923a (7c4ead5b-3477-4337-95df-a84bbad26641) Active New New ds_transition to WaitActive"2023-09-22T23:14:58.710715336Z"2023-09-22T23:14:58.710714507Z,"","hostname"hostname:"":","v":ip-10-150-1-55.us-west-2.compute.internal"0ip-10-150-1-55.us-west-2.compute.internal,,"","pidpid""::47594759"}}
2859 
2860 {name":""{crucible""msg"msg:"":","level"IO Write 1013 on eur 0 Added deps 1[1] Transition from WaitQuorum to Active"",:,30""vv""::00,,""namename"test live_repair::repair_test::test_repair_dep_cleanup_sk_repair ... :"":crucible""okcrucible,"",level"":level30"
2861 ,:"40time":"2023-09-22T23:14:58.710760187Z",,""hostnametime"":":,2023-09-22T23:14:58.710777504Z""",time"":"hostname":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.710782976Z",,ip-10-150-1-55.us-west-2.compute.internal""",hostname""pid:"":pid"4759ip-10-150-1-55.us-west-2.compute.internal"},
2862 ":pid{"4759:"4759msg":}"
2863 }
2864 [2] 4dc045ec-5a1e-410e-8896-79fc1c563313 (67ef3947-ce9c-400e-a753-67511f30592a) Active Active New ds_transition to WaitActive{",""v"msg:"0:","name":"[1] 1007 final dependency list []"crucible"{,,""vlevel""::030,"name":""crucible"msg",:""level":30[1] Transition from New to WaitActive",",v"":time":"0,"name2023-09-22T23:14:58.710827682Z,"","time"hostname:"":""2023-09-22T23:14:58.710834781Z":",ip-10-150-1-55.us-west-2.compute.internalcrucible"""hostname,"":pid"",":ip-10-150-1-55.us-west-2.compute.internal4759"level,}"
2865 pid":"{4759:",msg"""::""30downstairs"[2] Transition from New to WaitActive"}
2866 ,"v":0,{"name":""msgcrucible"":","level":30[1] 1008 final dependency list [JobId(1004)]","v":0,"name":"crucible","level":30,","time"time"::"","2023-09-22T23:14:58.710868803Z2023-09-22T23:14:58.710876736Ztime""":",,"2023-09-22T23:14:58.710882463Z"hostname,"":""hostname":ip-10-150-1-55.us-west-2.compute.internal""hostname":",ip-10-150-1-55.us-west-2.compute.internal""pid,"":pidip-10-150-1-55.us-west-2.compute.internal""4759:,}4759
2867 ,""{":"pid""downstairsmsg""}:
2868 ":4759{"}msg
2869 "[2] 4dc045ec-5a1e-410e-8896-79fc1c563313 (67ef3947-ce9c-400e-a753-67511f30592a) Active Active WaitActive ds_transition to WaitQuorum:"","v":0,"name":"[1] 1013 final dependency list [JobId(1008), JobId(1004), JobId(1012)]"{,crucible""v",:"0,level"":name30":""msg"crucible":","level":30[1] 8ee74e9c-4380-4bf2-913b-9825b6e2923a (7c4ead5b-3477-4337-95df-a84bbad26641) Active WaitActive New ds_transition to WaitQuorum",",time":""v",2023-09-22T23:14:58.71093409Z""time",":"hostname:"2023-09-22T23:14:58.710939484Z0":",,""name"ip-10-150-1-55.us-west-2.compute.internalhostname"",:""pid"::4759ip-10-150-1-55.us-west-2.compute.internal"},"
2870 pid"":{4759crucible",","msg"level""::""":downstairs"30[2] Transition from WaitActive to WaitQuorum}"
2871 ,"v":0,"name":"crucible","level":30,",time":""time":2023-09-22T23:14:58.710988368Z"","hostname"2023-09-22T23:14:58.710982068Z":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,"pid"}:
2872 4759}
2873 {"msg":"{"msg":"[2] 4dc045ec-5a1e-410e-8896-79fc1c563313 (67ef3947-ce9c-400e-a753-67511f30592a) Active Active WaitQuorum ds_transition to Active","v":[1] Transition from WaitActive to WaitQuorum"0,,""namev"":":crucible"0,","level"name":":crucible30","level":30,"time":"2023-09-22T23:14:58.711046491Z","hostname":",ip-10-150-1-55.us-west-2.compute.internal"","time":"pid":47592023-09-22T23:14:58.711049274Z"}
2874 ,"hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759"msg":"}
2875 [2] Transition from WaitQuorum to Active","v":0,"name":"{crucible","level":30"msg":"[1] 8ee74e9c-4380-4bf2-913b-9825b6e2923a (7c4ead5b-3477-4337-95df-a84bbad26641) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.711095385Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2876 ,"time{":"2023-09-22T23:14:58.711108184Z""msg",":"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":47594dc045ec-5a1e-410e-8896-79fc1c563313 is now active with session: 67ef3947-ce9c-400e-a753-67511f30592a",}"
2877 v":0,"name":"crucible"{,"level":30"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.711147725Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2878 ,"time":"{2023-09-22T23:14:58.711157633Z",""hostname":msg":""{ip-10-150-1-55.us-west-2.compute.internal","pid":4759[1] 4dc045ec-5a1e-410e-8896-79fc1c563313 (67ef3947-ce9c-400e-a753-67511f30592a) Active Active Active ds_transition to Faulted}"
2879 msg"":","v{":0,"Crucible stats registered with UUID: 0a679673-3132-4061-840f-f267f0742c60"name":","cruciblev":""0msg",,":"level"":name"30:"crucible"[2] 8ee74e9c-4380-4bf2-913b-9825b6e2923a (7c4ead5b-3477-4337-95df-a84bbad26641) Active Active New ds_transition to WaitActive",",level"":v"30:0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.711212279Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2880 ,{,"""timetime":msg""::"""[1] Transition from Active to Faulted"2023-09-22T23:14:58.711219777Z"2023-09-22T23:14:58.711225807Z",",","hostnamev"hostname"":::0"","name":"test live_repair::repair_test::test_repair_dep_cleanup_repair ... ip-10-150-1-55.us-west-2.compute.internalcrucible"ok,
2881 "level":"30,ip-10-150-1-55.us-west-2.compute.internal""pid",:"4759pid":4759}
2882 }
2883 {,"time":""2023-09-22T23:14:58.711283829Z"{msg",":"hostname"":msg"":"Crucible 0a679673-3132-4061-840f-f267f0742c60 has session id: fde1a8d3-ce20-48f0-916c-50cd4aa6658c"ip-10-150-1-55.us-west-2.compute.internal",[2] Transition from New to WaitActive"",v,""":pid"0:v4759",}":
2884 name"0:","crucible{"name":",""cruciblemsg"":","level"level"::3030[1] 4dc045ec-5a1e-410e-8896-79fc1c563313 (67ef3947-ce9c-400e-a753-67511f30592a) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.711347501Z",","time":"hostname":",2023-09-22T23:14:58.711347847Z""ip-10-150-1-55.us-west-2.compute.internal,time""":",hostname"2023-09-22T23:14:58.711355499Z""pid":",":hostname4759":"}ip-10-150-1-55.us-west-2.compute.internal"
2885 ,"ip-10-150-1-55.us-west-2.compute.internal"pid","{pid"::47594759"msg":}"
2886 }
2887 {[0] 0a679673-3132-4061-840f-f267f0742c60 (30d6992e-fe0e-4614-b657-ff6d5d0c30d2) New New New ds_transition to WaitActive","v":"0msg"{:","name":"crucible"[1] Transition from Faulted to LiveRepairReady",",""level"v"::0msg",":"name":"30crucible","level":30[2] 8ee74e9c-4380-4bf2-913b-9825b6e2923a (7c4ead5b-3477-4337-95df-a84bbad26641) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time",:""time":"2023-09-22T23:14:58.711432992Z"2023-09-22T23:14:58.711435449Z",","hostname":"hostname":","ip-10-150-1-55.us-west-2.compute.internal"time","ip-10-150-1-55.us-west-2.compute.internal"pid":",:"2023-09-22T23:14:58.711443623Zpid"":4759,4759"}hostname}"
2888 :"
2889 ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4759{"}
2890 msg":""msg":"{[0] Transition from New to WaitActive","v":[1] 4dc045ec-5a1e-410e-8896-79fc1c563313 (67ef3947-ce9c-400e-a753-67511f30592a) Active LiveRepairReady Active ds_transition to LiveRepair""0,msg":","v":name":""0crucible",[2] Transition from WaitActive to WaitQuorum"",",name":"level"crucible"",:v30""level"::030,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.711530899Z","time,"":hostname"":",2023-09-22T23:14:58.711535164Z""ip-10-150-1-55.us-west-2.compute.internal,"time"hostname":"":","ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.711539154Z,"pid"pid""::47594759,"}hostname
2891 ":"}
2892 ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4759{"}msg
2893 "":msg":""{[1] Transition from LiveRepairReady to LiveRepair","v""[0] 0a679673-3132-4061-840f-f267f0742c60 (30d6992e-fe0e-4614-b657-ff6d5d0c30d2) WaitActive New New ds_transition to WaitQuorum":msg":,""v":00,,""name":name":"[2] 8ee74e9c-4380-4bf2-913b-9825b6e2923a (7c4ead5b-3477-4337-95df-a84bbad26641) Active Active WaitQuorum ds_transition to Active"crucible",",crucible"","level":level"30:"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.711623661Z",",hostname":""time":ip-10-150-1-55.us-west-2.compute.internal"","pid":47592023-09-22T23:14:58.711622339Z",},""
2894 hostname"time":"{:"2023-09-22T23:14:58.711629161Z""msg":"ip-10-150-1-55.us-west-2.compute.internal",[0] Transition from WaitActive to WaitQuorum"","hostnamev",":pid"":0:"4759,"name"ip-10-150-1-55.us-west-2.compute.internal:"}crucible"
2895 ",","level":pid"30{:{4759"}msg
2896 ":""msg",:""time"[1] 0 final dependency list []":"{Crucible stats registered with UUID: 7670fa61-da3c-40aa-a375-f47901b91c30"2023-09-22T23:14:58.711682974Z",",,""v"vmsg":""hostname:"0:,"""nameip-10-150-1-55.us-west-2.compute.internal"":",[2] Transition from WaitQuorum to Active""cruciblepid"":,4759",:}level
2897 "0",{:"30"name"v"::msg":"0","crucible"name",:""level"crucible[0] 0a679673-3132-4061-840f-f267f0742c60 (30d6992e-fe0e-4614-b657-ff6d5d0c30d2) WaitQuorum New New ds_transition to Active"":,,""v,"":time0",":name""level"2023-09-22T23:14:58.711728766Z:""30:crucible,"",hostname""30level:"":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2898 ,"time":"{,"2023-09-22T23:14:58.711760623Z""time",msg"":hostname""::"","Crucible 7670fa61-da3c-40aa-a375-f47901b91c30 has session id: d9c136ec-387e-4ef7-9b2c-d54c0a404272ip-10-150-1-55.us-west-2.compute.internal"","time"2023-09-22T23:14:58.711755674Z":v,"":pid0",:"4759name"}:
2899 ""crucible"{,2023-09-22T23:14:58.711760012Z",msg""level:"":"30[0] Transition from WaitQuorum to Active","",hostname""hostname"::""v":,0",time""name:"":ip-10-150-1-55.us-west-2.compute.internal"",crucible"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:14:58.711805485Z,"","pid"",hostnamelevel""::"30:"4759ip-10-150-1-55.us-west-2.compute.internal"pid",",:"pid,"":time4759":}"
2900 2023-09-22T23:14:58.711830957Z"4759{,"":}hostname"":"msg":""
2901 ip-10-150-1-55.us-west-2.compute.internal[0] 7670fa61-da3c-40aa-a375-f47901b91c30 (6b3a8f2e-9943-4b3e-b0b5-d3cbb5e6085a) New New New ds_transition to WaitActive"",downstairs",""pidv""::4759{}0}
2902 ",
2903 "msg{{name""":msg"":crucible""",msg"":level:"[1] 0a679673-3132-4061-840f-f267f0742c60 (30d6992e-fe0e-4614-b657-ff6d5d0c30d2) Active New New ds_transition to WaitActive"":,30""v":08ee74e9c-4380-4bf2-913b-9825b6e2923a is now active with session: 7c4ead5b-3477-4337-95df-a84bbad26641[1] 0 final dependency list [JobId(1006)]"",","namev":,""",:crucibletime"",:""level"":v"302023-09-22T23:14:58.711891071Z":,"00hostname,",":"name"":"name"crucible":,"",ip-10-150-1-55.us-west-2.compute.internal""time,"":"pid":level47592023-09-22T23:14:58.711909345Z"}crucible"":
2904 ,30","hostname{level""::""msg":30"ip-10-150-1-55.us-west-2.compute.internal","pid":[0] Transition from New to WaitActive"4759,"}v
2905 ":0,"{name":""crucible"msg":,""level":30[1] Transition from New to WaitActive","v":0,",name"":time"":"crucible","2023-09-22T23:14:58.711942859Z,level",:"30time":"""2023-09-22T23:14:58.711958688Z"time",,":hostname,""hostnametime"""::"":""2023-09-22T23:14:58.711971119Z"ip-10-150-1-55.us-west-2.compute.internal",","hostnamepid""::"4759ip-10-150-1-55.us-west-2.compute.internal"}2023-09-22T23:14:58.711949094Zip-10-150-1-55.us-west-2.compute.internal
2906 ",,{"""pidmsg""::4759",}""pid"hostname":"[0] 7670fa61-da3c-40aa-a375-f47901b91c30 (6b3a8f2e-9943-4b3e-b0b5-d3cbb5e6085a) WaitActive New New ds_transition to WaitQuorum
2907 "ip-10-150-1-55.us-west-2.compute.internal"{,":,"4759pid":"vmsg""::0",4759,""name"}"[1] 0a679673-3132-4061-840f-f267f0742c60 (30d6992e-fe0e-4614-b657-ff6d5d0c30d2) Active WaitActive New ds_transition to WaitQuorum:"",crucible""v,"":
2908 :level0",:"30name":"{crucible","level":30""downstairs"msg":"}
2909 ,,""timetime""::""[0] 8ee74e9c-4380-4bf2-913b-9825b6e2923a (7c4ead5b-3477-4337-95df-a84bbad26641) Active Active Active ds_transition to Faulted"{2023-09-22T23:14:58.712042969Z2023-09-22T23:14:58.712039818Z,""""v",,"":hostnamehostname""::""msg":"0ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759[1] 0 final dependency list [JobId(1007), JobId(1006)]"}}
2910 
2911 {,"{,"""msgmsg""::""v":name0[0] Transition from WaitActive to WaitQuorum[1] Transition from WaitActive to WaitQuorum"",,"""v,"":v0",:"0name":",name""name:"":crucible"crucible"crucible"",,""level"level:"30::,""30level":crucible"30,"level":30,"time":",2023-09-22T23:14:58.712112894Z""time",:""hostname":"2023-09-22T23:14:58.712116124Z","hostnameip-10-150-1-55.us-west-2.compute.internal"":","pid":4759,"}ip-10-150-1-55.us-west-2.compute.internal
2912 "time,":"{"pid2023-09-22T23:14:58.712118652Z""":msg4759",",}:""
2913 hostname":"{time":ip-10-150-1-55.us-west-2.compute.internal[1] 0a679673-3132-4061-840f-f267f0742c60 (30d6992e-fe0e-4614-b657-ff6d5d0c30d2) Active WaitQuorum New ds_transition to Active""msg":,""v"":"02023-09-22T23:14:58.712121534Z,,"[0] 7670fa61-da3c-40aa-a375-f47901b91c30 (6b3a8f2e-9943-4b3e-b0b5-d3cbb5e6085a) WaitQuorum New New ds_transition to Active"name,"":v"":"0crucible,,""pid":name"",:""4759hostname"level"":crucible:"30,"","level"":"ip-10-150-1-55.us-west-2.compute.internal"downstairs",:,30""time}"
2914 :,""pid"time2023-09-22T23:14:58.712176902Z"":,"":hostname4759"2023-09-22T23:14:58.712186174Z:"","}hostname"ip-10-150-1-55.us-west-2.compute.internal:"","
2915 pidip-10-150-1-55.us-west-2.compute.internal"":,"4759pid":}4759
2916 }{
2917 {""{msg":""msg"msg[1] Transition from WaitQuorum to Active"":":","[0] Transition from WaitQuorum to Activev"":,0[0] Transition from Active to Faulted",""v,"name"":v"0:,"":namecrucible0"":",",cruciblename":"""crucible,level"":level30":"30,"level":30,",time"":time"":"2023-09-22T23:14:58.712242754Z"2023-09-22T23:14:58.712244432Z",","hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internal",ip-10-150-1-55.us-west-2.compute.internal""pid,"":pid4759":4759}
2918 },"
2919 {time":{"""msg"msg:"":"2023-09-22T23:14:58.712246859Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid"[1] 7670fa61-da3c-40aa-a375-f47901b91c30 (6b3a8f2e-9943-4b3e-b0b5-d3cbb5e6085a) Active New New ds_transition to WaitActive[2] 0a679673-3132-4061-840f-f267f0742c60 (30d6992e-fe0e-4614-b657-ff6d5d0c30d2) Active Active New ds_transition to WaitActive"",,""vv""::00,,""namename""::""crucible"crucible,"",level"":level30"::304759}
2920 ,","time"time:"":"{2023-09-22T23:14:58.712291046Z2023-09-22T23:14:58.712292787Z"",,""hostnamehostname""::"""msg":"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"pid:":47594759}}
2921 
2922 {[0] 8ee74e9c-4380-4bf2-913b-9825b6e2923a (7c4ead5b-3477-4337-95df-a84bbad26641) Faulted Active Active ds_transition to LiveRepairReady"{",msg"""msgv:"":""[1] Transition from New to WaitActive":,"[2] Transition from New to WaitActivev"",:"0v,"":00name,"":name"":"crucible"crucible,"",,level""level"::3030"name":"crucible","level":30,,""timetime""::""2023-09-22T23:14:58.712338785Z2023-09-22T23:14:58.712339025Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759}}
2923 
2924 {,"time"{msg":"""msg:"":"2023-09-22T23:14:58.712343592Z","[2] 0a679673-3132-4061-840f-f267f0742c60 (30d6992e-fe0e-4614-b657-ff6d5d0c30d2) Active Active WaitActive ds_transition to WaitQuorumhostname"":"[1] 7670fa61-da3c-40aa-a375-f47901b91c30 (6b3a8f2e-9943-4b3e-b0b5-d3cbb5e6085a) Active WaitActive New ds_transition to WaitQuorum,""v,"":v"0:,0","namename""::""crucible"crucible,""level,""level:"30:ip-10-150-1-55.us-west-2.compute.internal"30,"pid":4759}
2925 ,,""timetime""::""{2023-09-22T23:14:58.712392492Z2023-09-22T23:14:58.712390748Z"",,""hostname"hostname:"":""msg":"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759[0] Transition from Faulted to LiveRepairReady"}}
2926 
2927 {test live_repair::repair_test::test_repair_dep_cleanup_some ... ,"{msg":""msg"ok
2928 [2] Transition from WaitActive to WaitQuorum:""",v"[1] Transition from WaitActive to WaitQuorum""v",:"0v,"":name0",:""name":crucible:"","crucible"level,"":level30"0:30,"name":"crucible","level":30,","time"time:"":"2023-09-22T23:14:58.712447646Z"2023-09-22T23:14:58.712449327Z",","hostname"hostname:"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",","pid"pid:":47594759}}
2929 
2930 {,"{"time"msgmsg""::""":"2023-09-22T23:14:58.712453911Z","hostname":"[1] 7670fa61-da3c-40aa-a375-f47901b91c30 (6b3a8f2e-9943-4b3e-b0b5-d3cbb5e6085a) Active WaitQuorum New ds_transition to Active[2] 0a679673-3132-4061-840f-f267f0742c60 (30d6992e-fe0e-4614-b657-ff6d5d0c30d2) Active Active WaitQuorum ds_transition to Active""ip-10-150-1-55.us-west-2.compute.internal",,""vv":"0:,0",,name""name:"":"crucible"crucible"",,pid"""levellevel""::3030:4759}
2931 ,,""timetime""::""{2023-09-22T23:14:58.712500134Z2023-09-22T23:14:58.712499968Z"",,""hostnamehostname"""::""msg":"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759}}
2932 
2933 {[0] 8ee74e9c-4380-4bf2-913b-9825b6e2923a (7c4ead5b-3477-4337-95df-a84bbad26641) LiveRepairReady Active Active ds_transition to LiveRepair"{",msg""msg:"":""v":[1] Transition from WaitQuorum to Active"[2] Transition from WaitQuorum to Active",0,""vv""::00,,""name":name"":crucible"",crucible""level,"":level30":,30"name":"crucible","level":30,"time",:""time":"2023-09-22T23:14:58.712550309Z",2023-09-22T23:14:58.712552113Z"",hostname"":"hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internal"pid",:"4759pid":}4759
2934 }
2935 {,{"""msgmsg""::""time":"2023-09-22T23:14:58.71255665Z","hostname":"0a679673-3132-4061-840f-f267f0742c60 is now active with session: 30d6992e-fe0e-4614-b657-ff6d5d0c30d2"[2] 7670fa61-da3c-40aa-a375-f47901b91c30 (6b3a8f2e-9943-4b3e-b0b5-d3cbb5e6085a) Active Active New ds_transition to WaitActive",","ip-10-150-1-55.us-west-2.compute.internal"vv""::00,,""name"name:"":"crucible"crucible,"",,level""level:"30:"30pid":4759}
2936 ,,""timetime""::""{2023-09-22T23:14:58.712599825Z2023-09-22T23:14:58.712598427Z"",,""hostname"hostname:""":"msg":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"","pid"pid:[0] Transition from LiveRepairReady to LiveRepair"4759":,}4759
2937 }"
2938 {v":{"0msg""msg:"":,""name":"crucible"[2] Transition from New to WaitActive",",level[0] 0a679673-3132-4061-840f-f267f0742c60 (30d6992e-fe0e-4614-b657-ff6d5d0c30d2) Active Active Active ds_transition to Faulted""v",":v0",:"0",name"":name:"":"crucible"crucible,"","level"level:"30:3030,,""timetime""::""2023-09-22T23:14:58.712650299Z2023-09-22T23:14:58.712651435Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"pid:"4759:,}4759
2939 }
2940 {"time""msg{":"":"msg":"2023-09-22T23:14:58.71265145Z","[0] Transition from Active to Faultedhostname":"","[2] 7670fa61-da3c-40aa-a375-f47901b91c30 (6b3a8f2e-9943-4b3e-b0b5-d3cbb5e6085a) Active Active WaitActive ds_transition to WaitQuorumv"":,0"ip-10-150-1-55.us-west-2.compute.internal"v,"":,"name0",:""name"crucible:"",pid"crucible"":,level"":level"30:475930}
2941 ,,""timetime""::""2023-09-22T23:14:58.712700845Z"2023-09-22T23:14:58.712702165Z",,""{hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759"}}
2942 
2943 {msg":""{msg":""Waiting for Close + ReOpen jobs"msg":","v":0,"[2] Transition from WaitActive to WaitQuorum"name":"[0] 0a679673-3132-4061-840f-f267f0742c60 (30d6992e-fe0e-4614-b657-ff6d5d0c30d2) Faulted Active Active ds_transition to LiveRepairReady,"",v""v:"0:,0","namename""::""crucible"cruciblecrucible"",,""levellevel""::3030,"level":30,,""timetime""::""2023-09-22T23:14:58.712757323Z2023-09-22T23:14:58.712757166Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",","pid"pid:":47594759},}
2944 
2945 {"time"{msg""":msg""::""2023-09-22T23:14:58.712759762Z"[0] Transition from Faulted to LiveRepairReady",,[2] 7670fa61-da3c-40aa-a375-f47901b91c30 (6b3a8f2e-9943-4b3e-b0b5-d3cbb5e6085a) Active Active WaitQuorum ds_transition to Active""v",:"0v,":"0",name"hostname""name:""::crucible"""crucible,"","level"level:"30:ip-10-150-1-55.us-west-2.compute.internal"30,"{pid":4759}
2946 ,,""time"time:""{":2023-09-22T23:14:58.712809087Z""msg",2023-09-22T23:14:58.712807284Z""",hostname":"hostname:""msg":"":"Crucible stats registered with UUID: 8004c607-ee2d-4e41-b83c-b23601ad76a4"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759,RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]"}},"
2947 
2948 {"v""{msg":"":msg[2] Transition from WaitQuorum to Active"":,""0vv":,0",[0] 0a679673-3132-4061-840f-f267f0742c60 (30d6992e-fe0e-4614-b657-ff6d5d0c30d2) LiveRepairReady Active Active ds_transition to LiveRepair""name,"":v""name"crucible:"0,,"":""namelevel""::"30crucible":crucible",0",",level"level"":,:"30time"30:name"":"crucible"2023-09-22T23:14:58.712878543Z",","level",hostname""time:"":":ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:14:58.712890126Z"",",pid"":30hostname4759":"}
2949 ip-10-150-1-55.us-west-2.compute.internal","pid":{4759}
2950 ",msg{":"""msg":time"":"7670fa61-da3c-40aa-a375-f47901b91c30 is now active with session: 6b3a8f2e-9943-4b3e-b0b5-d3cbb5e6085a"2023-09-22T23:14:58.712892122Z,[0] Transition from LiveRepairReady to LiveRepair""v,"":v0,,"":name0",:""name"crucible:"",""levelcrucible"":,30","level"time":":"hostname":"2023-09-22T23:14:58.71290749Z",30"time,ip-10-150-1-55.us-west-2.compute.internal"",":hostname"":""pid"2023-09-22T23:14:58.712937731Z,"","timeip-10-150-1-55.us-west-2.compute.internalhostname""::"":"2023-09-22T23:14:58.712947122Z"ip-10-150-1-55.us-west-2.compute.internal,"",,hostname"""pid:pid"4759""::4759}ip-10-150-1-55.us-west-2.compute.internal}"
2951 ,"
2952 4759{pid":4759}}"
2953 msg":{"{
2954 "[0] 7670fa61-da3c-40aa-a375-f47901b91c30 (6b3a8f2e-9943-4b3e-b0b5-d3cbb5e6085a) Active Active Active ds_transition to Faultedmsg""",:"msg"{:v""":Waiting for Close + ReOpen jobs0",",name""v":Crucible 8004c607-ee2d-4e41-b83c-b23601ad76a4 has session id: aebb10c0-c584-4415-ac78-481406e247a9""":crucible0",,""namelevel""msg::,"""30vcrucible"":":,"0level",":,30"RE:0 close id:1000 queued, notify DS"time"name:",""2023-09-22T23:14:58.713021333Z":"v"crucible",,""time"hostname:"":",":ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:14:58.713030212Z"",",level0""pidhostname""::"4759:,}
2955 ip-10-150-1-55.us-west-2.compute.internal"30"{,name":"""crucible"pidmsg""::"4759,"}level"
2956 [0] Transition from Active to Faulted":,{"30"v"msg:":0","name":"crucible",RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]""level,"":v30":,"0,time"":name"":"crucible"2023-09-22T23:14:58.713062633Z",,"",leveltime""::30"",hostname"2023-09-22T23:14:58.713080863Z:"",""time":ip-10-150-1-55.us-west-2.compute.internal"","hostname2023-09-22T23:14:58.713075992Z",:"pid"time"":""ip-10-150-1-55.us-west-2.compute.internal":2023-09-22T23:14:58.713093688Z,"",pid"",hostname:"4759:"}"
2957 ip-10-150-1-55.us-west-2.compute.internal"4759,{"hostname":"}"pidmsg"
2958 "::"4759ip-10-150-1-55.us-west-2.compute.internal"}
2959 {,"pid{[0] 7670fa61-da3c-40aa-a375-f47901b91c30 (6b3a8f2e-9943-4b3e-b0b5-d3cbb5e6085a) Faulted Active Active ds_transition to LiveRepairReady"",:"""vmsg"":msg"4759"::0",RE:0 close id:1000 queued, notify DS""name",:""v"crucible:}0",,""
2960 [0] 8004c607-ee2d-4e41-b83c-b23601ad76a4 (a534a5bb-c489-4ad8-9603-6af8970f56bd) New New New ds_transition to WaitActive"levelname""::30,""{crucible"v",":level0"":,30",time":""msg"2023-09-22T23:14:58.713176649Zname"",:",""crucible"hostnametime",":"":":"level":2023-09-22T23:14:58.713186436Zip-10-150-1-55.us-west-2.compute.internal30RE:0 Wait for result from close command 1000:1""",,""pid"hostname:"4759:",}"
2961 ip-10-150-1-55.us-west-2.compute.internal"v":{,0"",pidmsg""":,:4759"name"}[0] Transition from Faulted to LiveRepairReady"
2962 ,""{time":""vmsg""::0",2023-09-22T23:14:58.713206207Z"RE:0 Wait for result from close command 1000:1name""",":,"":cruciblehostname":"v,"":level0",:"30""name"crucible":ip-10-150-1-55.us-west-2.compute.internal",,""pid":"4759level"}:
2963 ,crucible""30,time""level:""{:302023-09-22T23:14:58.713252789Z",""msghostname"":":"[0] Transition from New to WaitActive","v":0ip-10-150-1-55.us-west-2.compute.internal",",name":",""pidtime""::4759"crucible"}2023-09-22T23:14:58.713273236Z
2964 ",,,{""""hostnamemsg""::""level":time"30:ip-10-150-1-55.us-west-2.compute.internal"",[0] 7670fa61-da3c-40aa-a375-f47901b91c30 (6b3a8f2e-9943-4b3e-b0b5-d3cbb5e6085a) LiveRepairReady Active Active ds_transition to LiveRepair""pid",:"4759v"}:
2965 0,"2023-09-22T23:14:58.713270314Ztime":,""name":"2023-09-22T23:14:58.713303832Z"crucible"",",,level":"30hostname":""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}ip-10-150-1-55.us-west-2.compute.internal"
2966 ,"pid{,""time":"msg":"2023-09-22T23:14:58.713325598Z"":,"4759hostname":"[0] 8004c607-ee2d-4e41-b83c-b23601ad76a4 (a534a5bb-c489-4ad8-9603-6af8970f56bd) WaitActive New New ds_transition to WaitQuorum"},"
2967 ip-10-150-1-55.us-west-2.compute.internal"v":,"0pid":,"4759name":"}crucible"
2968 ,"level":30{"msg":"[0] Transition from LiveRepairReady to LiveRepair","v":0,"name":","crucible"time":","level":2023-09-22T23:14:58.713363012Z"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2969 {"msg":,""time":"[0] Transition from WaitActive to WaitQuorum"2023-09-22T23:14:58.713374295Z",","v"hostname":":0,"name":"ip-10-150-1-55.us-west-2.compute.internalcrucible"",,""pid":level":304759}
2970 {"msg":"Waiting for Close + ReOpen jobs",,""time":"v":0,"2023-09-22T23:14:58.713397479Z"name,"":"hostname"crucible":","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2971 {"msg":","time":"2023-09-22T23:14:58.713414198Z","hostname":"[0] 8004c607-ee2d-4e41-b83c-b23601ad76a4 (a534a5bb-c489-4ad8-9603-6af8970f56bd) WaitQuorum New New ds_transition to Active","v"ip-10-150-1-55.us-west-2.compute.internal":,0","pid":name":"4759crucible"},"
2972 level":30{"msg":"RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]","v":0,"name":"crucible","level":,"30time":"2023-09-22T23:14:58.713435539Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
2973 ,"time":"{2023-09-22T23:14:58.713443103Z""msg":","hostname":"[0] Transition from WaitQuorum to Active","v"ip-10-150-1-55.us-west-2.compute.internal":,"0pid":,"4759name":"}crucible"
2974 ,"level":30{"msg":"RE:0 close id:1000 queued, notify DS","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.713467356Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759},"
2975 time":"{2023-09-22T23:14:58.713472743Z"",msg":""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759[1] 8004c607-ee2d-4e41-b83c-b23601ad76a4 (a534a5bb-c489-4ad8-9603-6af8970f56bd) Active New New ds_transition to WaitActive"}
2976 ,"v":0{,"name"":"msg"crucible:"","level":30RE:0 Wait for result from close command 1000:1","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.713498343Z","hostname":","time":ip-10-150-1-55.us-west-2.compute.internal"","pid":47592023-09-22T23:14:58.713501613Z"}
2977 ,"hostname":"{ip-10-150-1-55.us-west-2.compute.internal",""pidmsg":"":4759}
2978 [1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:58.71353096Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
29792023-09-22T23:14:58.713ZINFOcrucible: [1] 8004c607-ee2d-4e41-b83c-b23601ad76a4 (a534a5bb-c489-4ad8-9603-6af8970f56bd) Active WaitActive New ds_transition to WaitQuorum
29802023-09-22T23:14:58.713ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
29812023-09-22T23:14:58.713ZINFOcrucible: [1] 8004c607-ee2d-4e41-b83c-b23601ad76a4 (a534a5bb-c489-4ad8-9603-6af8970f56bd) Active WaitQuorum New ds_transition to Active
29822023-09-22T23:14:58.713ZINFOcrucible: [1] Transition from WaitQuorum to Active
29832023-09-22T23:14:58.713ZINFOcrucible: [2] 8004c607-ee2d-4e41-b83c-b23601ad76a4 (a534a5bb-c489-4ad8-9603-6af8970f56bd) Active Active New ds_transition to WaitActive
29842023-09-22T23:14:58.713ZINFOcrucible: [2] Transition from New to WaitActive
29852023-09-22T23:14:58.713ZINFOcrucible: [2] 8004c607-ee2d-4e41-b83c-b23601ad76a4 (a534a5bb-c489-4ad8-9603-6af8970f56bd) Active Active WaitActive ds_transition to WaitQuorum
29862023-09-22T23:14:58.713ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
29872023-09-22T23:14:58.713ZINFOcrucible: [2] 8004c607-ee2d-4e41-b83c-b23601ad76a4 (a534a5bb-c489-4ad8-9603-6af8970f56bd) Active Active WaitQuorum ds_transition to Active
29882023-09-22T23:14:58.713ZINFOcrucible: [2] Transition from WaitQuorum to Active
29892023-09-22T23:14:58.713ZINFOcrucible: 8004c607-ee2d-4e41-b83c-b23601ad76a4 is now active with session: a534a5bb-c489-4ad8-9603-6af8970f56bd
29902023-09-22T23:14:58.713ZINFOcrucible: [0] 8004c607-ee2d-4e41-b83c-b23601ad76a4 (a534a5bb-c489-4ad8-9603-6af8970f56bd) Active Active Active ds_transition to Faulted
29912023-09-22T23:14:58.713ZINFOcrucible: [0] Transition from Active to Faulted
29922023-09-22T23:14:58.713ZINFOcrucible: [0] 8004c607-ee2d-4e41-b83c-b23601ad76a4 (a534a5bb-c489-4ad8-9603-6af8970f56bd) Faulted Active Active ds_transition to LiveRepairReady
29932023-09-22T23:14:58.713ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
29942023-09-22T23:14:58.713ZINFOcrucible: [0] 8004c607-ee2d-4e41-b83c-b23601ad76a4 (a534a5bb-c489-4ad8-9603-6af8970f56bd) LiveRepairReady Active Active ds_transition to LiveRepair
29952023-09-22T23:14:58.713ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
29962023-09-22T23:14:58.713ZINFOcrucible: Waiting for Close + ReOpen jobs
29972023-09-22T23:14:58.713ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
29982023-09-22T23:14:58.713ZINFOcrucible: RE:0 close id:1000 queued, notify DS
29992023-09-22T23:14:58.713ZINFOcrucible: RE:0 Wait for result from close command 1000:1
30002023-09-22T23:14:59.578ZINFOcrucible: Checking if live repair is needed upstairs = 1
30012023-09-22T23:14:59.578ZINFOcrucible: No Live Repair required at this time upstairs = 1
30022023-09-22T23:14:59.580ZINFOcrucible: Checking if live repair is needed upstairs = 1
30032023-09-22T23:14:59.580ZINFOcrucible: No Live Repair required at this time upstairs = 1
30042023-09-22T23:14:59.581ZINFOcrucible: Checking if live repair is needed upstairs = 1
30052023-09-22T23:14:59.581ZINFOcrucible: No Live Repair required at this time upstairs = 1
30062023-09-22T23:14:59.582ZINFOcrucible: [0] 04482c5c-f564-49f3-897c-51c4de58239a looper connected looper = 0 upstairs = 1
30072023-09-22T23:14:59.582ZINFOcrucible: [0] Proc runs for 127.0.0.1:36921 in state Offline upstairs = 1
30082023-09-22T23:14:59.582ZINFOcrucible: [0] client re-new 1 jobs since flush 0 = downstairs upstairs = 1
30092023-09-22T23:14:59.582ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: 04482c5c-f564-49f3-897c-51c4de58239a, session_id: cc12d06e-bd83-45fe-987f-779d8c6339ec, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 1
30102023-09-22T23:14:59.582ZINFOcrucible: [0] upstairs guest_io_ready=TRUE, promote! session cc12d06e-bd83-45fe-987f-779d8c6339ec upstairs = 1
30112023-09-22T23:14:59.582ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: 04482c5c-f564-49f3-897c-51c4de58239a, session_id: cc12d06e-bd83-45fe-987f-779d8c6339ec, gen: 1 } downstairs = 1
30122023-09-22T23:14:59.583ZINFOcrucible: negotiate packet RegionInfoPlease downstairs = 1
30132023-09-22T23:14:59.583ZINFOcrucible: [0] downstairs client at 127.0.0.1:36921 has UUID d057f0d7-1fef-421b-b789-cb2746bfdb26 upstairs = 1
30142023-09-22T23:14:59.583ZINFOcrucible: [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: d057f0d7-1fef-421b-b789-cb2746bfdb26, encrypted: false, database_read_version: 1, database_write_version: 1 } upstairs = 1
30152023-09-22T23:14:59.583ZINFOcrucible: Returning client:0 UUID:d057f0d7-1fef-421b-b789-cb2746bfdb26 matches upstairs = 1
3016 waiting for ds1 message in test_replay_occurs
30172023-09-22T23:14:59.583ZINFOcrucible: [0] send last flush ID to this DS: 0 upstairs = 1
30182023-09-22T23:14:59.583ZINFOcrucible: 04482c5c-f564-49f3-897c-51c4de58239a Offline Active Active upstairs = 1
30192023-09-22T23:14:59.583ZINFOcrucible: negotiate packet LastFlush { last_flush_number: JobId(0) } downstairs = 1
30202023-09-22T23:14:59.583ZINFOcrucible: [0] Replied this last flush ID: 0 upstairs = 1
30212023-09-22T23:14:59.583ZINFOcrucible: [0] 04482c5c-f564-49f3-897c-51c4de58239a (cc12d06e-bd83-45fe-987f-779d8c6339ec) Offline Active Active ds_transition to Replay upstairs = 1
30222023-09-22T23:14:59.583ZINFOcrucible: [0] Transition from Offline to Replay upstairs = 1
30232023-09-22T23:14:59.583ZINFOcrucible: [0] 04482c5c-f564-49f3-897c-51c4de58239a Transition from Replay to Active upstairs = 1
30242023-09-22T23:14:59.583ZINFOcrucible: [0] 04482c5c-f564-49f3-897c-51c4de58239a (cc12d06e-bd83-45fe-987f-779d8c6339ec) Replay Active Active ds_transition to Active upstairs = 1
30252023-09-22T23:14:59.583ZINFOcrucible: [0] Transition from Replay to Active upstairs = 1
30262023-09-22T23:14:59.583ZINFOcrucible: [0] Starts cmd_loop upstairs = 1
30272023-09-22T23:14:59.583ZINFOcrucible: [0] 127.0.0.1:36921 task reports connection:true upstairs = 1
30282023-09-22T23:14:59.583ZINFOcrucible: 04482c5c-f564-49f3-897c-51c4de58239a Active Active Active upstairs = 1
30292023-09-22T23:14:59.583ZINFOcrucible: Set check for repair upstairs = 1
30302023-09-22T23:14:59.588ZINFOcrucible: Checking if live repair is needed upstairs = 1
30312023-09-22T23:14:59.588ZINFOcrucible: No Live Repair required at this time upstairs = 1
30322023-09-22T23:14:59.711ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(1), repair_downstairs: [ClientId(0)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
30332023-09-22T23:14:59.711ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(1), repair_downstairs: [ClientId(0)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
30342023-09-22T23:14:59.711ZINFOcrucible: [0] client skip 2 in process jobs because fault = downstairs
30352023-09-22T23:14:59.711ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
30362023-09-22T23:14:59.711ZINFOcrucible: [0] 8ee74e9c-4380-4bf2-913b-9825b6e2923a (7c4ead5b-3477-4337-95df-a84bbad26641) LiveRepair Active Active ds_transition to Faulted
30372023-09-22T23:14:59.711ZINFOcrucible: [0] Transition from LiveRepair to Faulted
30382023-09-22T23:14:59.711ZINFOcrucible: Now ACK the close job
30392023-09-22T23:14:59.711ZINFOcrucible: Waiting for 3 jobs (currently 2)
30402023-09-22T23:14:59.711ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
30412023-09-22T23:14:59.711ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
30422023-09-22T23:14:59.712ZINFOcrucible: Now ACK the close job
30432023-09-22T23:14:59.712ZINFOcrucible: Waiting for 3 jobs (currently 2)
30442023-09-22T23:14:59.712ZINFOcrucible: Repair for extent 0 s:1 d:[ClientId(0)] = downstairs
30452023-09-22T23:14:59.712ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
30462023-09-22T23:14:59.713ZINFOcrucible: Waiting for 3 jobs (currently 2)
30472023-09-22T23:14:59.713ZINFOcrucible: No repair needed for extent 0 = downstairs
30482023-09-22T23:14:59.713ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
30492023-09-22T23:14:59.714ZINFOcrucible: Waiting for 3 jobs (currently 2)
30502023-09-22T23:14:59.714ZINFOcrucible: No repair needed for extent 0 = downstairs
30512023-09-22T23:14:59.714ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
30522023-09-22T23:14:59.817ZWARNcrucible: [2] flow control start upstairs = 1
30532023-09-22T23:14:59.817ZWARNcrucible: [0] flow control start upstairs = 1
30542023-09-22T23:14:59.817ZWARNcrucible: [1] flow control start upstairs = 1
3055 waiting for ds1 message in test_replay_occurs
30562023-09-22T23:15:00.585ZWARNcrucible: [0] flow control end upstairs = 1
30572023-09-22T23:15:00.585ZINFOcrucible: Checking if live repair is needed upstairs = 1
30582023-09-22T23:15:00.585ZINFOcrucible: No Live Repair required at this time upstairs = 1
30592023-09-22T23:15:00.712ZINFOcrucible: Waiting for 4 jobs (currently 3)
30602023-09-22T23:15:00.712ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
30612023-09-22T23:15:00.712ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
30622023-09-22T23:15:00.713ZINFOcrucible: Now ACK the repair job
30632023-09-22T23:15:00.713ZINFOcrucible: Waiting for 4 jobs (currently 3)
30642023-09-22T23:15:00.713ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
30652023-09-22T23:15:00.713ZINFOcrucible: Waiting for 4 jobs (currently 3)
30662023-09-22T23:15:00.713ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
30672023-09-22T23:15:00.714ZINFOcrucible: Waiting for 4 jobs (currently 3)
30682023-09-22T23:15:00.715ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
30692023-09-22T23:15:00.817ZWARNcrucible: [0] flow control end upstairs = 1
30702023-09-22T23:15:00.817ZWARNcrucible: [2] flow control end upstairs = 1
30712023-09-22T23:15:00.819ZWARNcrucible: [1] flow control end upstairs = 1
3072 test dummy_downstairs_tests::protocol_test::test_replay_occurs ... ok
30732023-09-22T23:15:01.587ZINFOcrucible: Crucible stats registered with UUID: 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25
30742023-09-22T23:15:01.587ZINFOcrucible: Crucible 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25 has session id: 05b118b3-d9d3-49db-ae4e-8cebbfe05ada
30752023-09-22T23:15:01.587ZINFOcrucible: [0] 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25 (34c41cf8-0433-4cf1-9c64-d266d05b21c9) New New New ds_transition to WaitActive
30762023-09-22T23:15:01.587ZINFOcrucible: [0] Transition from New to WaitActive
30772023-09-22T23:15:01.587ZINFOcrucible: [0] 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25 (34c41cf8-0433-4cf1-9c64-d266d05b21c9) WaitActive New New ds_transition to WaitQuorum
30782023-09-22T23:15:01.587ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
30792023-09-22T23:15:01.587ZINFOcrucible: [0] 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25 (34c41cf8-0433-4cf1-9c64-d266d05b21c9) WaitQuorum New New ds_transition to Active
30802023-09-22T23:15:01.587ZINFOcrucible: [0] Transition from WaitQuorum to Active
30812023-09-22T23:15:01.587ZINFOcrucible: [1] 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25 (34c41cf8-0433-4cf1-9c64-d266d05b21c9) Active New New ds_transition to WaitActive
30822023-09-22T23:15:01.587ZINFOcrucible: [1] Transition from New to WaitActive
30832023-09-22T23:15:01.587ZINFOcrucible: [1] 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25 (34c41cf8-0433-4cf1-9c64-d266d05b21c9) Active WaitActive New ds_transition to WaitQuorum
30842023-09-22T23:15:01.587ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
30852023-09-22T23:15:01.587ZINFOcrucible: [1] 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25 (34c41cf8-0433-4cf1-9c64-d266d05b21c9) Active WaitQuorum New ds_transition to Active
30862023-09-22T23:15:01.587ZINFOcrucible: [1] Transition from WaitQuorum to Active
30872023-09-22T23:15:01.587ZINFOcrucible: [2] 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25 (34c41cf8-0433-4cf1-9c64-d266d05b21c9) Active Active New ds_transition to WaitActive
30882023-09-22T23:15:01.587ZINFOcrucible: [2] Transition from New to WaitActive
30892023-09-22T23:15:01.587ZINFOcrucible: [2] 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25 (34c41cf8-0433-4cf1-9c64-d266d05b21c9) Active Active WaitActive ds_transition to WaitQuorum
30902023-09-22T23:15:01.587ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
30912023-09-22T23:15:01.587ZINFOcrucible: [2] 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25 (34c41cf8-0433-4cf1-9c64-d266d05b21c9) Active Active WaitQuorum ds_transition to Active
30922023-09-22T23:15:01.587ZINFOcrucible: [2] Transition from WaitQuorum to Active
30932023-09-22T23:15:01.587ZINFOcrucible: 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25 is now active with session: 34c41cf8-0433-4cf1-9c64-d266d05b21c9
30942023-09-22T23:15:01.587ZINFOcrucible: [0] 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25 (34c41cf8-0433-4cf1-9c64-d266d05b21c9) Active Active Active ds_transition to Faulted
30952023-09-22T23:15:01.587ZINFOcrucible: [0] Transition from Active to Faulted
30962023-09-22T23:15:01.587ZINFOcrucible: [0] 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25 (34c41cf8-0433-4cf1-9c64-d266d05b21c9) Faulted Active Active ds_transition to LiveRepairReady
30972023-09-22T23:15:01.587ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
30982023-09-22T23:15:01.587ZINFOcrucible: [0] 3d04bbe2-2f14-4f9e-87aa-ab9edd65bb25 (34c41cf8-0433-4cf1-9c64-d266d05b21c9) LiveRepairReady Active Active ds_transition to LiveRepair
30992023-09-22T23:15:01.587ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
31002023-09-22T23:15:01.587ZINFOcrucible: Waiting for Close + ReOpen jobs
31012023-09-22T23:15:01.587ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
31022023-09-22T23:15:01.587ZINFOcrucible: RE:0 close id:1000 queued, notify DS
31032023-09-22T23:15:01.587ZINFOcrucible: RE:0 Wait for result from close command 1000:1
31042023-09-22T23:15:01.714ZINFOcrucible: Now move the NoOp job forward
31052023-09-22T23:15:01.714ZINFOcrucible: Now ACK the NoOp job
31062023-09-22T23:15:01.714ZINFOcrucible: Finally, move the ReOpen job forward
31072023-09-22T23:15:01.714ZINFOcrucible: Now ACK the Reopen job
31082023-09-22T23:15:01.714ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
31092023-09-22T23:15:01.714ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
31102023-09-22T23:15:01.714ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
31112023-09-22T23:15:01.714ZWARNcrucible: RE:0 Bailing with error
3112 ----------------------------------------------------------------
3113 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
3114 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
3115 1 Acked 1000 FClose 0 Err Done Done false
3116 2 Acked 1001 NoOp 0 Skip Done Done false
3117 3 Acked 1002 NoOp 0 Skip Done Done false
3118 4 Acked 1003 Reopen 0 Skip Done Done false
3119 STATES DS:0 DS:1 DS:2 TOTAL
3120 New 0 0 0 0
3121 Sent 0 0 0 0
3122 Done 0 4 4 8
3123 Skipped 3 0 0 3
3124 Error 1 0 0 1
3125 Last Flush: 0 0 0
3126 Downstairs last five completed:
3127 Upstairs last five completed: 4 3 2 1
31282023-09-22T23:15:01.714ZINFOcrucible: Now move the NoOp job forward
31292023-09-22T23:15:01.714ZINFOcrucible: Now ACK the NoOp job
31302023-09-22T23:15:01.714ZINFOcrucible: Finally, move the ReOpen job forward
31312023-09-22T23:15:01.714ZINFOcrucible: Now ACK the repair job
31322023-09-22T23:15:01.714ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
31332023-09-22T23:15:01.715ZINFOcrucible: Crucible stats registered with UUID: ab17680f-7c63-4c2f-bd24-9eb69ab45861
3134 {"msg":"Crucible ab17680f-7c63-4c2f-bd24-9eb69ab45861 has session id: 6a03f4da-4cdc-4031-8c63-9ab921ddeafd","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:01.71509934Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",{"pid":4759}
3135 "msg":"{Now move the NoOp job forward","v""msg"::"0,"name":"crucible","level":[0] ab17680f-7c63-4c2f-bd24-9eb69ab45861 (76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb) New New New ds_transition to WaitActive30","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:01.715147304Z","hostname":,""time":"ip-10-150-1-55.us-west-2.compute.internal","pid"2023-09-22T23:15:01.715141991Z":4759,"}hostname"
3136 :"{ip-10-150-1-55.us-west-2.compute.internal","pid":"4759msg":"}
3137 [0] Transition from New to WaitActive","v":0,"name":"{crucible","level":30"msg":"[0] DS Reports error Err(GenericError(\"bad\","time":"2023-09-22T23:15:01.715198192Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759)) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }"},
3138 {"msg":""[0] ab17680f-7c63-4c2f-bd24-9eb69ab45861 (76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb) WaitActive New New ds_transition to WaitQuorum"v",:"0v":,0","name"name:"":"crucible"crucible",","level"level"::5030,"time":","time2023-09-22T23:15:01.715269558Z"":","hostname":"2023-09-22T23:15:01.715269537Z","ip-10-150-1-55.us-west-2.compute.internal"hostname",":"pid":4759ip-10-150-1-55.us-west-2.compute.internal"},"
3139 pid":4759,"":"{downstairs"}
3140 "msg":"[0] Transition from WaitActive to WaitQuorum","v":{0,"name":"crucible",""level":msg":30"[0] Reports error GenericError(\"bad\","time":") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }"2023-09-22T23:15:01.715331264Z",",v"":hostname":"0,"name":"ip-10-150-1-55.us-west-2.compute.internal"crucible,""pid":,4759"level"}:
3141 50{"msg":"[0] ab17680f-7c63-4c2f-bd24-9eb69ab45861 (76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level":30,","time"time":":"2023-09-22T23:15:01.715377235Z","hostname":"2023-09-22T23:15:01.715367038Z",ip-10-150-1-55.us-west-2.compute.internal",""pid":4759hostname"}
3142 :"{"msg":"ip-10-150-1-55.us-west-2.compute.internal","[0] Transition from WaitQuorum to Active"pid",":v4759":0,"name,"":""crucible":,""level":30downstairs"}
3143 ,"time":"2023-09-22T23:15:01.715411867Z",{"hostname":"ip-10-150-1-55.us-west-2.compute.internal""msg,"":pid":"4759{}
3144 [0] client skip 4 in process jobs because fault","{"msg"v"":"msg":":0Crucible stats registered with UUID: dc10ceaf-6b5e-482c-b452-5b861e163a08","name",:"[1] ab17680f-7c63-4c2f-bd24-9eb69ab45861 (76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb) Active New New ds_transition to WaitActive""v":,"cruciblev"",0:,0",""namenamelevel""":":crucible:""30,"crucible"level":,30"level":30,"time":"2023-09-22T23:15:01.715466099Z",",hostname"":"time":"ip-10-150-1-55.us-west-2.compute.internal","pid":47592023-09-22T23:15:01.715463483Z}"
3145 ,"{hostname":"",msg""ip-10-150-1-55.us-west-2.compute.internal":,time":"""pid"2023-09-22T23:15:01.715468097Z:[1] Transition from New to WaitActive"4759",",,"""hostname":":v":"ip-10-150-1-55.us-west-2.compute.internal"downstairs0,",""pid"name"::}4759
3146 }"
3147 crucible"{,"level":30{"msg":""msg":"[0] changed 1 jobs to fault skipped","v":Crucible dc10ceaf-6b5e-482c-b452-5b861e163a08 has session id: cf4f1db3-0dbf-449f-a8a4-2771f7de8587",0,","v":"timename0",":"name"":":"cruciblecrucible"",,"2023-09-22T23:15:01.715523004Z"level"",level""::hostname3030":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3148 {"msg":","time":"2023-09-22T23:15:01.715553249Z","hostname":"[1] ab17680f-7c63-4c2f-bd24-9eb69ab45861 (76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb) Active WaitActive New ds_transition to WaitQuorum",ip-10-150-1-55.us-west-2.compute.internal",""time,""pid":v":4759:0},"
3149 "name":"crucible","level":2023-09-22T23:15:01.715553994Z"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,",time":""":"2023-09-22T23:15:01.715582233Z","downstairshostname"":"}
3150 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3151 {{"msg"":"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30[0] 7670fa61-da3c-40aa-a375-f47901b91c30 (6b3a8f2e-9943-4b3e-b0b5-d3cbb5e6085a) LiveRepair Active Active ds_transition to Faulted","v":0,"name":","crucible"time,":""level2023-09-22T23:15:01.715609802Z"",":hostname":30"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3152 {"msg":"[1] ab17680f-7c63-4c2f-bd24-9eb69ab45861 (76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb) Active WaitQuorum New ds_transition to Active","v":0,",name":""crucible","time":level":30"2023-09-22T23:15:01.715624882Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal,""time,":""pid"2023-09-22T23:15:01.715641346Z":,"4759hostname":"}
3153 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3154 {{{"msg"":""msg":msg""[0] Transition from LiveRepair to Faulted":","v":[1] Transition from WaitQuorum to Active"[0] dc10ceaf-6b5e-482c-b452-5b861e163a08 (1ac6e38b-5a66-4230-80ea-1ef440760337) New New New ds_transition to WaitActive,"","vv":"0:00,,",""namename":"namecrucible"","":"levelcrucible"::"30,""levelcrucible"",":30level":30,","time":"time":"2023-09-22T23:15:01.715699718Z"2023-09-22T23:15:01.715703768Z,"","timehostname",":":""hostname":"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:15:01.715704791Zpid":ip-10-150-1-55.us-west-2.compute.internal4759"},""
3155 pid":{4759,"}"
3156 hostname"msg"::""{"msg":ip-10-150-1-55.us-west-2.compute.internal"[2] ab17680f-7c63-4c2f-bd24-9eb69ab45861 (76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb) Active Active New ds_transition to WaitActive"",","[0] Transition from New to WaitActive"v"pid:,"0",":v":0name"4759:",}"crucible"
3157 ,"namelevel""::{"30crucible""msg,""level":30:","time":"Extent 0 close id:1002 Failed: Error: bad"2023-09-22T23:15:01.715774872Z",,""hostname":"v":ip-10-150-1-55.us-west-2.compute.internal"0,",",time":"pid":2023-09-22T23:15:01.715779088Z"4759,""}hostname
3158 name"":":{"ip-10-150-1-55.us-west-2.compute.internal"crucible"",msg",""pid"::"level"4759[2] Transition from New to WaitActive"},
3159 :"50v":{0,"name""msg":":"crucible","level":30[0] dc10ceaf-6b5e-482c-b452-5b861e163a08 (1ac6e38b-5a66-4230-80ea-1ef440760337) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level",:30,""time":"time":"2023-09-22T23:15:01.715827563Z","hostname":"2023-09-22T23:15:01.71582163Z","ip-10-150-1-55.us-west-2.compute.internal"hostname":,"","pidtimeip-10-150-1-55.us-west-2.compute.internal":"":"2023-09-22T23:15:01.715833811Z,4759"}","
3160 pid":{4759hostname":}""
3161 msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759{}
3162 "[2] ab17680f-7c63-4c2f-bd24-9eb69ab45861 (76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb) Active Active WaitActive ds_transition to WaitQuorummsg"":","{v":0RE:0 Wait for result from reopen command 1003:4",""name",:""cruciblemsg":"v""[0] Transition from WaitActive to WaitQuorum,"":level0",:30","name"v"::0","name":"crucible"crucible",,""time,""level":level"30:":302023-09-22T23:15:01.715891534Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3163 ,"time":"{2023-09-22T23:15:01.715908519Z"",msg":""hostname":"[2] Transition from WaitActive to WaitQuorum"ip-10-150-1-55.us-west-2.compute.internal",",v""pid"::04759,",}"nametime"":":crucible""
3164 ,"2023-09-22T23:15:01.71591104Z{"level"",:msg"":"hostname"30:"[0] dc10ceaf-6b5e-482c-b452-5b861e163a08 (1ac6e38b-5a66-4230-80ea-1ef440760337) WaitQuorum New New ds_transition to Active"ip-10-150-1-55.us-west-2.compute.internal",",v":0",",pidtime":"""name":2023-09-22T23:15:01.71594948Z"4759:","}crucible"hostname,
3165 "":"level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3166 {"msg":,""time":"2023-09-22T23:15:01.715975135Z","hostname":"[2] ab17680f-7c63-4c2f-bd24-9eb69ab45861 (76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb) Active Active WaitQuorum ds_transition to Active"ip-10-150-1-55.us-west-2.compute.internal",","v":0,"pid":name":"4759crucible","}
3167 level":30{"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:01.715998161Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3168 ,"time":"{2023-09-22T23:15:01.716004311Z""msg,"":hostname":""ip-10-150-1-55.us-west-2.compute.internal"[2] Transition from WaitQuorum to Active,""pid":,"4759v":0},"
3169 name":"crucible"{,"level":"30msg":"[1] dc10ceaf-6b5e-482c-b452-5b861e163a08 (1ac6e38b-5a66-4230-80ea-1ef440760337) Active New New ds_transition to WaitActive","v":0,"name":"crucible",","leveltime":"":302023-09-22T23:15:01.716031775Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3170 ,"time{":""msg2023-09-22T23:15:01.716041155Z"":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pidab17680f-7c63-4c2f-bd24-9eb69ab45861 is now active with session: 76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb"",":v":47590,"}name"
3171 :"crucible",{"level":"30msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:01.716066316Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,"4759time":"}
3172 2023-09-22T23:15:01.716070815Z","hostname{":""msgip-10-150-1-55.us-west-2.compute.internal","":"pid":4759}
3173 [0] ab17680f-7c63-4c2f-bd24-9eb69ab45861 (76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb) Active Active Active ds_transition to Faulted","v{":0,""name"msg":":"crucible","level":30[1] dc10ceaf-6b5e-482c-b452-5b861e163a08 (1ac6e38b-5a66-4230-80ea-1ef440760337) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:01.716101625Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid",:4759}
31742023-09-22T23:15:01.716ZINFOcrucible: [0] Transition from Active to Faulted
3175 {"time":"2023-09-22T23:15:01.716106531Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3176 {"msg":"[1] Transition from WaitActive to WaitQuorum","v":"0,msg"":"name":"crucible","level":30[0] ab17680f-7c63-4c2f-bd24-9eb69ab45861 (76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb) Faulted Active Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:01.716187678Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3177 ,"time":"{2023-09-22T23:15:01.716194511Z""msg":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759[1] dc10ceaf-6b5e-482c-b452-5b861e163a08 (1ac6e38b-5a66-4230-80ea-1ef440760337) Active WaitQuorum New ds_transition to Active"},"
3178 v":0,"name":"crucible",{"level":30"msg":"[0] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:01.716220641Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3179 ,"time":"{"2023-09-22T23:15:01.716225701Zmsg":"","hostname":"[1] Transition from WaitQuorum to Active","v":0ip-10-150-1-55.us-west-2.compute.internal",","name":"pid"crucible":,"4759level":30}
3180 {"msg":","time":"2023-09-22T23:15:01.7162488Z"[0] ab17680f-7c63-4c2f-bd24-9eb69ab45861 (76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb) LiveRepairReady Active Active ds_transition to LiveRepair",","hostname":"v":0,"name":ip-10-150-1-55.us-west-2.compute.internal"",crucible"","pid":level":475930}
3181 {"msg":","time":"[2] dc10ceaf-6b5e-482c-b452-5b861e163a08 (1ac6e38b-5a66-4230-80ea-1ef440760337) Active Active New ds_transition to WaitActive"2023-09-22T23:15:01.716269728Z",","v":hostname0,"":"name":"crucible","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3182 ,"time":"{2023-09-22T23:15:01.716287178Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3183 [0] Transition from LiveRepairReady to LiveRepair"{,"{"v"msg":":0","msgname""::""[2] Transition from New to WaitActive",crucibleNow move the NoOp job forward"","",level""v:"30:v":0,"0name":,""crucible"name":","cruciblelevel":"30,"level",:"30time":"2023-09-22T23:15:01.716316813Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:15:01.716326575Z"pid":,"4759hostname":"}
3184 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}{,
3185 ""msgtime""::""{"Waiting for Close + ReOpen jobs2023-09-22T23:15:01.716329012Z"","msg":",v"":0hostname",:""name":"crucible"[2] dc10ceaf-6b5e-482c-b452-5b861e163a08 (1ac6e38b-5a66-4230-80ea-1ef440760337) Active Active WaitActive ds_transition to WaitQuorum"ip-10-150-1-55.us-west-2.compute.internal,"",,""levelpid""::304759v":0,"}name
3186 ":"crucible","level":30,"time":"2023-09-22T23:15:01.716366173Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:15:01.716374616Zpid":"4759,"hostname}":"
3187 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}{
3188 "msg"{:{""msg":"[2] Transition from WaitActive to WaitQuorum"RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]""msg",:""v",":[0] DS Reports error Err(GenericError(0\"v":,bad"\"name"0:,""name":"cruciblecrucible"",",level"":30level":30,"time":")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }"2023-09-22T23:15:01.716416255Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal",v""time:"0:,",""pid":2023-09-22T23:15:01.716417375Zname"",:""4759hostnamecrucible"}
3189 ":","{level":ip-10-150-1-55.us-west-2.compute.internal"50,""pidmsg":"":4759}
3190 [2] dc10ceaf-6b5e-482c-b452-5b861e163a08 (1ac6e38b-5a66-4230-80ea-1ef440760337) Active Active WaitQuorum ds_transition to Active","v":0,"{,name":"""msg"time:"":"crucible","level"RE:0 close id:1000 queued, notify DS2023-09-22T23:15:01.716449167Z"":,,""30vhostname""::"0,"name":"ip-10-150-1-55.us-west-2.compute.internalcrucible"",","pid":level"4759:30,"":","downstairs"time":"}
3191 2023-09-22T23:15:01.716469711Z","hostname":"{,ip-10-150-1-55.us-west-2.compute.internal"","time""msg:"":"pid":47592023-09-22T23:15:01.716475181Z"[0] Reports error GenericError(,\""bad\"hostname"}
3192 :"{"msg":"ip-10-150-1-55.us-west-2.compute.internal","[2] Transition from WaitQuorum to Active"pid,"":v4759":0},"
3193 ) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }"name":",{crucible"""vmsg""::0",,""level":nameRE:0 Wait for result from close command 1000:1":"",30crucible""v",:"0level,"":50name":"crucible","level":30,"time":"2023-09-22T23:15:01.716521778Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3194 ,,""{timetime""::"""msg":"2023-09-22T23:15:01.716527259Z2023-09-22T23:15:01.716524572Z"",,""hostnamehostname""::""dc10ceaf-6b5e-482c-b452-5b861e163a08 is now active with session: 1ac6e38b-5a66-4230-80ea-1ef440760337","v":0,"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759name":},
3195 """crucible":","level":downstairs"30}
3196 {"msg":","time":"2023-09-22T23:15:01.7165607Z"[0] client skip 4 in process jobs because fault",","hostname":"v":0ip-10-150-1-55.us-west-2.compute.internal",,""pid":name":4759"}
3197 crucible","{level":"30msg":"[1] dc10ceaf-6b5e-482c-b452-5b861e163a08 (1ac6e38b-5a66-4230-80ea-1ef440760337) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30,"time":","time":2023-09-22T23:15:01.716585008Z"","2023-09-22T23:15:01.7165915Z"hostname,""hostname":":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internal"pid":,"4759pid":}4759
3198 ,"":"{downstairs"}
3199 "msg":"{[1] Transition from Active to Faulted","v":"0msg":","name":"[0] changed 0 jobs to fault skipped"crucible",",v"":level":300,"name":"crucible","level":30,"time":"2023-09-22T23:15:01.71662991Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","pid":2023-09-22T23:15:01.716632397Z"4759,"}hostname
3200 ":"{ip-10-150-1-55.us-west-2.compute.internal""msg":","pid":4759,"":"downstairs"}
3201 [1] dc10ceaf-6b5e-482c-b452-5b861e163a08 (1ac6e38b-5a66-4230-80ea-1ef440760337) Active Faulted Active ds_transition to LiveRepairReady","v{":0,"name":""cruciblemsg":"","level":30[0] 8004c607-ee2d-4e41-b83c-b23601ad76a4 (a534a5bb-c489-4ad8-9603-6af8970f56bd) LiveRepair Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:01.71666223Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3202 ,"time":"{"2023-09-22T23:15:01.716667638Z"msg":","hostname":"[1] Transition from Faulted to LiveRepairReady","vip-10-150-1-55.us-west-2.compute.internal"",":pid":04759,"}name":"
3203 crucible","level{":30"msg":"[0] Transition from LiveRepair to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:01.716693997Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3204 ,"time":"{2023-09-22T23:15:01.716699083Z"",msg":""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3205 [1] dc10ceaf-6b5e-482c-b452-5b861e163a08 (1ac6e38b-5a66-4230-80ea-1ef440760337) Active LiveRepairReady Active ds_transition to LiveRepair","v":0{,"name":""crucible"msg",":"level":30RE:0 Wait for result from reopen command 1003:4","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:01.716730138Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3206 ,"time":"{2023-09-22T23:15:01.716734306Z""msg":","hostname":"[1] Transition from LiveRepairReady to LiveRepair","ip-10-150-1-55.us-west-2.compute.internal","v":pid":04759,"name"}:"
3207 crucible","level":{30"msg":"Extent 0 close id:1003 Failed: Error: bad","v":0,"name":"crucible","level":50,"time":"2023-09-22T23:15:01.716761737Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3208 ,"time":"{2023-09-22T23:15:01.716767108Z"","msg"hostname":":"Waiting for Close + ReOpen jobs"ip-10-150-1-55.us-west-2.compute.internal,""v",":pid0",":name":"4759crucible","}level":30
3209 {"msg":"RE:0 Bailing with error","v":0,,""time":name"":"crucible"2023-09-22T23:15:01.716794196Z",,""level":40hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3210 {,""timemsg":"":"2023-09-22T23:15:01.716808713Z"RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]",,""v":0hostname":","name":"crucible"ip-10-150-1-55.us-west-2.compute.internal",,""pid":level"4759:30}
3211 ,"time":"2023-09-22T23:15:01.716831263Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
32122023-09-22T23:15:01.716ZINFOcrucible: RE:0 close id:1000 queued, notify DS
32132023-09-22T23:15:01.716ZINFOcrucible: RE:0 Wait for result from close command 1000:1
32142023-09-22T23:15:01.717ZINFOcrucible: Crucible stats registered with UUID: c96f066c-48e7-437e-b4d0-bca7db257ca7
32152023-09-22T23:15:01.717ZINFOcrucible: Crucible c96f066c-48e7-437e-b4d0-bca7db257ca7 has session id: 2d89a9cc-9693-4b98-b14a-70cd38065182
32162023-09-22T23:15:01.717ZINFOcrucible: [0] c96f066c-48e7-437e-b4d0-bca7db257ca7 (777a2755-8455-4e73-a9be-625addff6b55) New New New ds_transition to WaitActive
32172023-09-22T23:15:01.717ZINFOcrucible: [0] Transition from New to WaitActive
32182023-09-22T23:15:01.717ZINFOcrucible: [0] c96f066c-48e7-437e-b4d0-bca7db257ca7 (777a2755-8455-4e73-a9be-625addff6b55) WaitActive New New ds_transition to WaitQuorum
32192023-09-22T23:15:01.717ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
32202023-09-22T23:15:01.717ZINFOcrucible: [0] c96f066c-48e7-437e-b4d0-bca7db257ca7 (777a2755-8455-4e73-a9be-625addff6b55) WaitQuorum New New ds_transition to Active
32212023-09-22T23:15:01.717ZINFOcrucible: [0] Transition from WaitQuorum to Active
32222023-09-22T23:15:01.717ZINFOcrucible: [1] c96f066c-48e7-437e-b4d0-bca7db257ca7 (777a2755-8455-4e73-a9be-625addff6b55) Active New New ds_transition to WaitActive
32232023-09-22T23:15:01.717ZINFOcrucible: [1] Transition from New to WaitActive
32242023-09-22T23:15:01.717ZINFOcrucible: [1] c96f066c-48e7-437e-b4d0-bca7db257ca7 (777a2755-8455-4e73-a9be-625addff6b55) Active WaitActive New ds_transition to WaitQuorum
32252023-09-22T23:15:01.717ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
32262023-09-22T23:15:01.717ZINFOcrucible: [1] c96f066c-48e7-437e-b4d0-bca7db257ca7 (777a2755-8455-4e73-a9be-625addff6b55) Active WaitQuorum New ds_transition to Active
32272023-09-22T23:15:01.717ZINFOcrucible: [1] Transition from WaitQuorum to Active
32282023-09-22T23:15:01.717ZINFOcrucible: [2] c96f066c-48e7-437e-b4d0-bca7db257ca7 (777a2755-8455-4e73-a9be-625addff6b55) Active Active New ds_transition to WaitActive
32292023-09-22T23:15:01.717ZINFOcrucible: [2] Transition from New to WaitActive
32302023-09-22T23:15:01.717ZINFOcrucible: [2] c96f066c-48e7-437e-b4d0-bca7db257ca7 (777a2755-8455-4e73-a9be-625addff6b55) Active Active WaitActive ds_transition to WaitQuorum
32312023-09-22T23:15:01.717ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
32322023-09-22T23:15:01.717ZINFOcrucible: [2] c96f066c-48e7-437e-b4d0-bca7db257ca7 (777a2755-8455-4e73-a9be-625addff6b55) Active Active WaitQuorum ds_transition to Active
32332023-09-22T23:15:01.717ZINFOcrucible: [2] Transition from WaitQuorum to Active
32342023-09-22T23:15:01.717ZINFOcrucible: c96f066c-48e7-437e-b4d0-bca7db257ca7 is now active with session: 777a2755-8455-4e73-a9be-625addff6b55
32352023-09-22T23:15:01.717ZINFOcrucible: [0] c96f066c-48e7-437e-b4d0-bca7db257ca7 (777a2755-8455-4e73-a9be-625addff6b55) Active Active Active ds_transition to Faulted
32362023-09-22T23:15:01.717ZINFOcrucible: [0] Transition from Active to Faulted
32372023-09-22T23:15:01.717ZINFOcrucible: [0] c96f066c-48e7-437e-b4d0-bca7db257ca7 (777a2755-8455-4e73-a9be-625addff6b55) Faulted Active Active ds_transition to LiveRepairReady
32382023-09-22T23:15:01.717ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
32392023-09-22T23:15:01.717ZINFOcrucible: [0] c96f066c-48e7-437e-b4d0-bca7db257ca7 (777a2755-8455-4e73-a9be-625addff6b55) LiveRepairReady Active Active ds_transition to LiveRepair
32402023-09-22T23:15:01.717ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
32412023-09-22T23:15:01.717ZINFOcrucible: Waiting for Close + ReOpen jobs
32422023-09-22T23:15:01.717ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
32432023-09-22T23:15:01.717ZINFOcrucible: RE:0 close id:1000 queued, notify DS
32442023-09-22T23:15:01.717ZINFOcrucible: RE:0 Wait for result from close command 1000:1
32452023-09-22T23:15:01.787ZWARNcrucible: [0] flow control start upstairs = 1
3246 test dummy_downstairs_tests::protocol_test::test_flow_control ... ok
32472023-09-22T23:15:01.843ZINFOcrucible: Crucible stats registered with UUID: 208c6b08-32db-4498-a111-6f48aa07c8ef
32482023-09-22T23:15:01.843ZINFOcrucible: Crucible 208c6b08-32db-4498-a111-6f48aa07c8ef has session id: df19bc5a-b192-411a-a6e4-6b07040e819d
32492023-09-22T23:15:01.843ZINFOcrucible: [0] 208c6b08-32db-4498-a111-6f48aa07c8ef (a9d99eb4-fe49-4eea-a3e5-bed72149e733) New New New ds_transition to WaitActive
32502023-09-22T23:15:01.843ZINFOcrucible: [0] Transition from New to WaitActive
32512023-09-22T23:15:01.843ZINFOcrucible: [0] 208c6b08-32db-4498-a111-6f48aa07c8ef (a9d99eb4-fe49-4eea-a3e5-bed72149e733) WaitActive New New ds_transition to WaitQuorum
32522023-09-22T23:15:01.843ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
32532023-09-22T23:15:01.843ZINFOcrucible: [0] 208c6b08-32db-4498-a111-6f48aa07c8ef (a9d99eb4-fe49-4eea-a3e5-bed72149e733) WaitQuorum New New ds_transition to Active
32542023-09-22T23:15:01.843ZINFOcrucible: [0] Transition from WaitQuorum to Active
32552023-09-22T23:15:01.843ZINFOcrucible: [1] 208c6b08-32db-4498-a111-6f48aa07c8ef (a9d99eb4-fe49-4eea-a3e5-bed72149e733) Active New New ds_transition to WaitActive
32562023-09-22T23:15:01.843ZINFOcrucible: [1] Transition from New to WaitActive
32572023-09-22T23:15:01.843ZINFOcrucible: [1] 208c6b08-32db-4498-a111-6f48aa07c8ef (a9d99eb4-fe49-4eea-a3e5-bed72149e733) Active WaitActive New ds_transition to WaitQuorum
32582023-09-22T23:15:01.844ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
32592023-09-22T23:15:01.844ZINFOcrucible: [1] 208c6b08-32db-4498-a111-6f48aa07c8ef (a9d99eb4-fe49-4eea-a3e5-bed72149e733) Active WaitQuorum New ds_transition to Active
32602023-09-22T23:15:01.844ZINFOcrucible: [1] Transition from WaitQuorum to Active
32612023-09-22T23:15:01.844ZINFOcrucible: [2] 208c6b08-32db-4498-a111-6f48aa07c8ef (a9d99eb4-fe49-4eea-a3e5-bed72149e733) Active Active New ds_transition to WaitActive
32622023-09-22T23:15:01.844ZINFOcrucible: [2] Transition from New to WaitActive
32632023-09-22T23:15:01.844ZINFOcrucible: [2] 208c6b08-32db-4498-a111-6f48aa07c8ef (a9d99eb4-fe49-4eea-a3e5-bed72149e733) Active Active WaitActive ds_transition to WaitQuorum
32642023-09-22T23:15:01.844ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
32652023-09-22T23:15:01.844ZINFOcrucible: [2] 208c6b08-32db-4498-a111-6f48aa07c8ef (a9d99eb4-fe49-4eea-a3e5-bed72149e733) Active Active WaitQuorum ds_transition to Active
32662023-09-22T23:15:01.844ZINFOcrucible: [2] Transition from WaitQuorum to Active
32672023-09-22T23:15:01.844ZINFOcrucible: 208c6b08-32db-4498-a111-6f48aa07c8ef is now active with session: a9d99eb4-fe49-4eea-a3e5-bed72149e733
32682023-09-22T23:15:01.844ZINFOcrucible: [0] 208c6b08-32db-4498-a111-6f48aa07c8ef (a9d99eb4-fe49-4eea-a3e5-bed72149e733) Active Active Active ds_transition to Faulted
32692023-09-22T23:15:01.844ZINFOcrucible: [0] Transition from Active to Faulted
32702023-09-22T23:15:01.844ZINFOcrucible: [0] 208c6b08-32db-4498-a111-6f48aa07c8ef (a9d99eb4-fe49-4eea-a3e5-bed72149e733) Faulted Active Active ds_transition to LiveRepairReady
32712023-09-22T23:15:01.844ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
32722023-09-22T23:15:01.844ZINFOcrucible: [0] 208c6b08-32db-4498-a111-6f48aa07c8ef (a9d99eb4-fe49-4eea-a3e5-bed72149e733) LiveRepairReady Active Active ds_transition to LiveRepair
32732023-09-22T23:15:01.844ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
32742023-09-22T23:15:01.844ZINFOcrucible: Waiting for Close + ReOpen jobs
32752023-09-22T23:15:01.844ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
32762023-09-22T23:15:01.844ZINFOcrucible: RE:0 close id:1000 queued, notify DS
32772023-09-22T23:15:01.844ZINFOcrucible: RE:0 Wait for result from close command 1000:1
32782023-09-22T23:15:01.857ZWARNcrucible: [0] flow control start upstairs = 1
32792023-09-22T23:15:02.588ZINFOcrucible: Waiting for 3 jobs (currently 2)
32802023-09-22T23:15:02.588ZINFOcrucible: No repair needed for extent 0 = downstairs
32812023-09-22T23:15:02.588ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
32822023-09-22T23:15:02.716ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(1), repair_downstairs: [ClientId(0)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
32832023-09-22T23:15:02.716ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(1), repair_downstairs: [ClientId(0)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
32842023-09-22T23:15:02.716ZINFOcrucible: [1] client skip 2 in process jobs because fault = downstairs
32852023-09-22T23:15:02.716ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
32862023-09-22T23:15:02.716ZINFOcrucible: [1] ab17680f-7c63-4c2f-bd24-9eb69ab45861 (76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb) LiveRepair Active Active ds_transition to Faulted
32872023-09-22T23:15:02.716ZINFOcrucible: [1] Transition from Active to Faulted
32882023-09-22T23:15:02.716ZINFOcrucible: Now ACK the close job
32892023-09-22T23:15:02.716ZINFOcrucible: Waiting for 3 jobs (currently 2)
32902023-09-22T23:15:02.716ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
32912023-09-22T23:15:02.716ZINFOcrucible: [0] client skip 2 in process jobs because fault = downstairs
32922023-09-22T23:15:02.716ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
32932023-09-22T23:15:02.716ZINFOcrucible: [0] ab17680f-7c63-4c2f-bd24-9eb69ab45861 (76bbd7fe-ffcc-4ebf-8060-e1708e4d5ecb) LiveRepair Faulted Active ds_transition to Faulted
32942023-09-22T23:15:02.717ZINFOcrucible: [0] Transition from LiveRepair to Faulted
32952023-09-22T23:15:02.717ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
32962023-09-22T23:15:02.717ZINFOcrucible: Now ACK the close job
32972023-09-22T23:15:02.717ZINFOcrucible: Waiting for 3 jobs (currently 2)
32982023-09-22T23:15:02.717ZINFOcrucible: Repair for extent 0 s:2 d:[ClientId(1)] = downstairs
32992023-09-22T23:15:02.717ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
33002023-09-22T23:15:02.717ZINFOcrucible: Waiting for 3 jobs (currently 2)
33012023-09-22T23:15:02.717ZINFOcrucible: No repair needed for extent 0 = downstairs
33022023-09-22T23:15:02.717ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
33032023-09-22T23:15:02.845ZINFOcrucible: Now ACK the close job
33042023-09-22T23:15:02.845ZINFOcrucible: Waiting for 3 jobs (currently 2)
33052023-09-22T23:15:02.845ZINFOcrucible: No repair needed for extent 0 = downstairs
33062023-09-22T23:15:02.845ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
33072023-09-22T23:15:03.590ZINFOcrucible: Waiting for 4 jobs (currently 3)
33082023-09-22T23:15:03.590ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
33092023-09-22T23:15:03.715ZINFOcrucible: Finally, move the ReOpen job forward
33102023-09-22T23:15:03.715ZINFOcrucible: Now ACK the reopen job
33112023-09-22T23:15:03.715ZWARNcrucible: RE:0 Bailing with error
33122023-09-22T23:15:03.716ZINFOcrucible: Crucible stats registered with UUID: 67c26488-dbae-450b-b463-1bdda5022bc7
33132023-09-22T23:15:03.716ZINFOcrucible: Crucible 67c26488-dbae-450b-b463-1bdda5022bc7 has session id: 4e82f1f9-0353-4e67-9d25-ed3e57002eeb
33142023-09-22T23:15:03.716ZINFOcrucible: [0] 67c26488-dbae-450b-b463-1bdda5022bc7 (70c18185-953d-4518-93af-6f5485ebb3e0) New New New ds_transition to WaitActive
33152023-09-22T23:15:03.716ZINFOcrucible: [0] Transition from New to WaitActive
33162023-09-22T23:15:03.716ZINFOcrucible: [0] 67c26488-dbae-450b-b463-1bdda5022bc7 (70c18185-953d-4518-93af-6f5485ebb3e0) WaitActive New New ds_transition to WaitQuorum
33172023-09-22T23:15:03.716ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
33182023-09-22T23:15:03.716ZINFOcrucible: [0] 67c26488-dbae-450b-b463-1bdda5022bc7 (70c18185-953d-4518-93af-6f5485ebb3e0) WaitQuorum New New ds_transition to Active
33192023-09-22T23:15:03.716ZINFOcrucible: [0] Transition from WaitQuorum to Active
33202023-09-22T23:15:03.716ZINFOcrucible: [1] 67c26488-dbae-450b-b463-1bdda5022bc7 (70c18185-953d-4518-93af-6f5485ebb3e0) Active New New ds_transition to WaitActive
33212023-09-22T23:15:03.716ZINFOcrucible: [1] Transition from New to WaitActive
33222023-09-22T23:15:03.716ZINFOcrucible: [1] 67c26488-dbae-450b-b463-1bdda5022bc7 (70c18185-953d-4518-93af-6f5485ebb3e0) Active WaitActive New ds_transition to WaitQuorum
33232023-09-22T23:15:03.716ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
33242023-09-22T23:15:03.716ZINFOcrucible: [1] 67c26488-dbae-450b-b463-1bdda5022bc7 (70c18185-953d-4518-93af-6f5485ebb3e0) Active WaitQuorum New ds_transition to Active
33252023-09-22T23:15:03.716ZINFOcrucible: [1] Transition from WaitQuorum to Active
33262023-09-22T23:15:03.716ZINFOcrucible: [2] 67c26488-dbae-450b-b463-1bdda5022bc7 (70c18185-953d-4518-93af-6f5485ebb3e0) Active Active New ds_transition to WaitActive
33272023-09-22T23:15:03.716ZINFOcrucible: [2] Transition from New to WaitActive
33282023-09-22T23:15:03.716ZINFOcrucible: [2] 67c26488-dbae-450b-b463-1bdda5022bc7 (70c18185-953d-4518-93af-6f5485ebb3e0) Active Active WaitActive ds_transition to WaitQuorum
33292023-09-22T23:15:03.716ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
33302023-09-22T23:15:03.716ZINFOcrucible: [2] 67c26488-dbae-450b-b463-1bdda5022bc7 (70c18185-953d-4518-93af-6f5485ebb3e0) Active Active WaitQuorum ds_transition to Active
33312023-09-22T23:15:03.716ZINFOcrucible: [2] Transition from WaitQuorum to Active
33322023-09-22T23:15:03.716ZINFOcrucible: 67c26488-dbae-450b-b463-1bdda5022bc7 is now active with session: 70c18185-953d-4518-93af-6f5485ebb3e0
33332023-09-22T23:15:03.716ZINFOcrucible: [0] 67c26488-dbae-450b-b463-1bdda5022bc7 (70c18185-953d-4518-93af-6f5485ebb3e0) Active Active Active ds_transition to Faulted
33342023-09-22T23:15:03.716ZINFOcrucible: [0] Transition from Active to Faulted
33352023-09-22T23:15:03.716ZINFOcrucible: [0] 67c26488-dbae-450b-b463-1bdda5022bc7 (70c18185-953d-4518-93af-6f5485ebb3e0) Faulted Active Active ds_transition to LiveRepairReady
33362023-09-22T23:15:03.716ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
33372023-09-22T23:15:03.716ZINFOcrucible: [0] 67c26488-dbae-450b-b463-1bdda5022bc7 (70c18185-953d-4518-93af-6f5485ebb3e0) LiveRepairReady Active Active ds_transition to LiveRepair
33382023-09-22T23:15:03.716ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
33392023-09-22T23:15:03.716ZINFOcrucible: Waiting for Close + ReOpen jobs
33402023-09-22T23:15:03.716ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
33412023-09-22T23:15:03.716ZINFOcrucible: RE:0 close id:1000 queued, notify DS
33422023-09-22T23:15:03.716ZINFOcrucible: RE:0 Wait for result from close command 1000:1
33432023-09-22T23:15:03.717ZINFOcrucible: Waiting for 4 jobs (currently 3)
33442023-09-22T23:15:03.717ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
33452023-09-22T23:15:03.717ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
33462023-09-22T23:15:03.718ZINFOcrucible: Now ACK the repair job
33472023-09-22T23:15:03.718ZINFOcrucible: Waiting for 4 jobs (currently 3)
33482023-09-22T23:15:03.718ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
33492023-09-22T23:15:03.719ZINFOcrucible: Waiting for 4 jobs (currently 3)
33502023-09-22T23:15:03.719ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
33512023-09-22T23:15:03.846ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
33522023-09-22T23:15:03.846ZERROcrucible: [0] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
33532023-09-22T23:15:03.846ZINFOcrucible: [0] client skip 3 in process jobs because fault = downstairs
33542023-09-22T23:15:03.846ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
33552023-09-22T23:15:03.846ZINFOcrucible: [0] 208c6b08-32db-4498-a111-6f48aa07c8ef (a9d99eb4-fe49-4eea-a3e5-bed72149e733) LiveRepair Active Active ds_transition to Faulted
33562023-09-22T23:15:03.846ZINFOcrucible: [0] Transition from LiveRepair to Faulted
33572023-09-22T23:15:03.846ZINFOcrucible: Waiting for 4 jobs (currently 3)
33582023-09-22T23:15:03.846ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
33592023-09-22T23:15:03.847ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
33602023-09-22T23:15:04.717ZINFOcrucible: Waiting for 3 jobs (currently 2)
33612023-09-22T23:15:04.717ZINFOcrucible: No repair needed for extent 0 = downstairs
33622023-09-22T23:15:04.718ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
33632023-09-22T23:15:04.718ZINFOcrucible: Now move the NoOp job forward
33642023-09-22T23:15:04.718ZINFOcrucible: Now ACK the NoOp job
33652023-09-22T23:15:04.718ZINFOcrucible: Finally, move the ReOpen job forward
33662023-09-22T23:15:04.718ZINFOcrucible: Now ACK the Reopen job
33672023-09-22T23:15:04.718ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
33682023-09-22T23:15:04.718ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
33692023-09-22T23:15:04.718ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
33702023-09-22T23:15:04.718ZWARNcrucible: RE:0 Bailing with error
3371 ----------------------------------------------------------------
3372 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
3373 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
3374 1 Acked 1000 FClose 0 Done Err Done false
3375 2 Acked 1001 NoOp 0 Skip Skip Done false
3376 3 Acked 1002 NoOp 0 Skip Skip Done false
3377 4 Acked 1003 Reopen 0 Skip Skip Done false
3378 STATES DS:0 DS:1 DS:2 TOTAL
3379 New 0 0 0 0
3380 Sent 0 0 0 0
3381 Done 1 0 4 5
3382 Skipped 3 3 0 6
3383 Error 0 1 0 1
3384 Last Flush: 0 0 0
3385 Downstairs last five completed:
3386 Upstairs last five completed: 4 3 2 1
33872023-09-22T23:15:04.719ZINFOcrucible: Now move the NoOp job forward
33882023-09-22T23:15:04.719ZINFOcrucible: Now ACK the NoOp job
33892023-09-22T23:15:04.719ZINFOcrucible: Finally, move the ReOpen job forward
33902023-09-22T23:15:04.719ZINFOcrucible: Now ACK the repair job
33912023-09-22T23:15:04.719ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
33922023-09-22T23:15:04.719ZINFOcrucible: Crucible stats registered with UUID: 740b9b19-2d4a-44eb-b854-682cc860698c
33932023-09-22T23:15:04.719ZINFOcrucible: Crucible 740b9b19-2d4a-44eb-b854-682cc860698c has session id: 285f064a-eb2a-49dd-bff1-97e3d20cec44
33942023-09-22T23:15:04.719ZINFOcrucible: [0] 740b9b19-2d4a-44eb-b854-682cc860698c (d76910b1-bd56-4bb7-8622-025d64ee3acf) New New New ds_transition to WaitActive
33952023-09-22T23:15:04.719ZINFOcrucible: [0] Transition from New to WaitActive
3396 {"msg":"[0] 740b9b19-2d4a-44eb-b854-682cc860698c (d76910b1-bd56-4bb7-8622-025d64ee3acf) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30{,"time":""msg"2023-09-22T23:15:04.719635315Z":","hostname":"Crucible stats registered with UUID: 6b6841a1-369e-4ce0-bd40-27f441691315"ip-10-150-1-55.us-west-2.compute.internal","pid":,4759"v":}
3397 0,"name":"crucible{","level"":msg":"30[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30{"msg":"Now move the NoOp job forward",","time"v",:":"2023-09-22T23:15:04.719681536Z"time",:"0",hostname""name:":"2023-09-22T23:15:04.719674741Z"crucible"",","ip-10-150-1-55.us-west-2.compute.internalhostname":"level"":ip-10-150-1-55.us-west-2.compute.internal"30,","pidpid""::47594759}
3398 }
3399 {",msg{"":"time":"2023-09-22T23:15:04.719722939Z","hostname":"[0] 740b9b19-2d4a-44eb-b854-682cc860698c (d76910b1-bd56-4bb7-8622-025d64ee3acf) WaitQuorum New New ds_transition to Active","ip-10-150-1-55.us-west-2.compute.internalv"":,""pid"0msg,""name":::""crucible"4759,"level":}30
3400 ,"time":"2023-09-22T23:15:04.719767941Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3401 {"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30{"msg":"Crucible 6b6841a1-369e-4ce0-bd40-27f441691315 has session id: 6afb4f20-ee36-42e0-b477-7bcca39adbb5"[1] DS Reports error Err(GenericError(\"bad\",,""v"time:":0","name"2023-09-22T23:15:04.719813863Z":","crucible"hostname":","level":ip-10-150-1-55.us-west-2.compute.internal"30,")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }"pid":4759}
3402 ,"v":0,"name"{:"crucible","level":"50msg":","time"[1] 740b9b19-2d4a-44eb-b854-682cc860698c (d76910b1-bd56-4bb7-8622-025d64ee3acf) Active New New ds_transition to WaitActive":","v":2023-09-22T23:15:04.719842039Z"0,","name":"hostname":"crucible","level"ip-10-150-1-55.us-west-2.compute.internal":,"30pid",":time"4759:"}2023-09-22T23:15:04.719857884Z
3403 ","hostname":"{ip-10-150-1-55.us-west-2.compute.internal",,""time"pid""::"msg"47592023-09-22T23:15:04.719875817Z":",,""":hostname"":[0] 6b6841a1-369e-4ce0-bd40-27f441691315 (604185d3-c867-44d9-80b9-4f85beef9147) New New New ds_transition to WaitActive"downstairs""},ip-10-150-1-55.us-west-2.compute.internal""
3404 ,"v"pid"::47590{,"}name
3405 ""msg:"":"{crucible",[1] Reports error GenericError("\"levelbad"\""msg":":30[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }","v":0,"name":"crucible","level",":time",:50""time":2023-09-22T23:15:04.71994208Z"","hostname":2023-09-22T23:15:04.719948378Z"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,"pid":}4759
3406 },
3407 "{time":"2023-09-22T23:15:04.719965842Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal"","[0] Transition from New to WaitActivepid":"4759msg,,"":"":""vdownstairs"":}
3408 0[1] 740b9b19-2d4a-44eb-b854-682cc860698c (d76910b1-bd56-4bb7-8622-025d64ee3acf) Active WaitActive New ds_transition to WaitQuorum,""{name",:"""v"msg":":crucible"[1] client skip 4 in process jobs because fault",0,","v":level0"","name"name":":":crucible"crucible,30""level":,30"level":30,"time":"2023-09-22T23:15:04.720044379Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,","":time"":"downstairs","}
3409 2023-09-22T23:15:04.72004235Z"time",:{"""msg"hostname"2023-09-22T23:15:04.720046555Z":":",[1] changed 0 jobs to fault skipped"","hostname"v"::"0ip-10-150-1-55.us-west-2.compute.internal",",ip-10-150-1-55.us-west-2.compute.internal"name":""pidcrucible",","pid"level"::"304759:4759}
3410 },"
3411 time":"{2023-09-22T23:15:04.720096849Z","hostname":"{"ip-10-150-1-55.us-west-2.compute.internal"msg",":pid":"4759","":"msg":"downstairs"[1] Transition from WaitActive to WaitQuorum"}
3412 ,"v"[0] 6b6841a1-369e-4ce0-bd40-27f441691315 (604185d3-c867-44d9-80b9-4f85beef9147) WaitActive New New ds_transition to WaitQuorum{:"0",msg","":"name"v":":[1] c96f066c-48e7-437e-b4d0-bca7db257ca7 (777a2755-8455-4e73-a9be-625addff6b55) LiveRepair Active Active ds_transition to Faultedcrucible0"",,,""name"v":0:",""name":"level":crucible"crucible",30,""level":30level":30,"time":"2023-09-22T23:15:04.720160661Z","hostname":","time"ip-10-150-1-55.us-west-2.compute.internal":,""pid":4759}
3413 2023-09-22T23:15:04.720158251Z",,""{hostname"time"":":msg":""ip-10-150-1-55.us-west-2.compute.internal"[1] Transition from Active to Faulted",2023-09-22T23:15:04.720162645Z""pid":,4759,"}"
3414 v":hostname0{,""name"":"msg":":"crucible","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759[1] 740b9b19-2d4a-44eb-b854-682cc860698c (d76910b1-bd56-4bb7-8622-025d64ee3acf) Active WaitQuorum New ds_transition to Active"},
3415 ","v"time":":0,"2023-09-22T23:15:04.720214509Z"name",:""hostname":"crucible"{ip-10-150-1-55.us-west-2.compute.internal",,""pid"":msg"level":475930:"}
3416 [0] Transition from WaitActive to WaitQuorum"{,"v"":msg":"0,"nameRE:0 Wait for result from reopen command 1003:4"",",:"v":crucible"0,,"""nametime""::level":""30crucible"2023-09-22T23:15:04.720244054Z",",level":"30hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3417 ,"time":"{2023-09-22T23:15:04.720273848Z","hostname":""msg":","ip-10-150-1-55.us-west-2.compute.internal","[1] Transition from WaitQuorum to Activepid":time"4759,""}:"v"
3418 2023-09-22T23:15:04.720270146Z"{:0,""msg"hostname":",:"Extent 0 close id:1003 Failed: Error: bad""name",":ip-10-150-1-55.us-west-2.compute.internalv":0"","crucible",",name":""pidlevel":crucible30"":,"4759level":50}
3419 ,"time":","time":2023-09-22T23:15:04.720329311Z"","{hostname":"2023-09-22T23:15:04.720326105Z","ip-10-150-1-55.us-west-2.compute.internal"hostname",:""msgpid":""4759:}ip-10-150-1-55.us-west-2.compute.internal"",
3420 "pid"{:[0] 6b6841a1-369e-4ce0-bd40-27f441691315 (604185d3-c867-44d9-80b9-4f85beef9147) WaitQuorum New New ds_transition to Active4759""}msg"
3421 ,:""{v"[0] client skip 4 in process jobs because fault"",":v":00msg",,":""name":"name"crucible":","crucible"[2] 740b9b19-2d4a-44eb-b854-682cc860698c (d76910b1-bd56-4bb7-8622-025d64ee3acf) Active Active New ds_transition to WaitActive,"levellevel"",:"30:"v30":0,"name":"crucible",","level"time":":302023-09-22T23:15:04.720394094Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"":"downstairs"}
3422 {"msg":"[0] changed 0 jobs to fault skipped",",,""v"time"::time0"",":name"":"2023-09-22T23:15:04.72039684Z2023-09-22T23:15:04.720403506Z"crucible"",,""level":30,"hostname":"hostname":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",",",pid":"4759pid"}:
3423 4759time":"{}2023-09-22T23:15:04.720434421Z"
3424 ","msg"hostname:"":"{ip-10-150-1-55.us-west-2.compute.internal"[2] Transition from New to WaitActive",",""pidv":":04759,,msg""name":":crucible"""",:""[0] Transition from WaitQuorum to Activedownstairs""}
3425 level","{v:"30"msg":":0,"name":"crucible"[0] c96f066c-48e7-437e-b4d0-bca7db257ca7 (777a2755-8455-4e73-a9be-625addff6b55) LiveRepair Faulted Active ds_transition to Faulted",","level"v":0:,",name":"30crucible"","time"level":30:"2023-09-22T23:15:04.720491881Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid",":time":"47592023-09-22T23:15:04.720509593Z"},"
3426 ,"hostname":{time"":""ip-10-150-1-55.us-west-2.compute.internal"msg2023-09-22T23:15:04.72050618Z"",",:pid":"4759"}
3427 hostname":"{"msg[2] 740b9b19-2d4a-44eb-b854-682cc860698c (d76910b1-bd56-4bb7-8622-025d64ee3acf) Active Active WaitActive ds_transition to WaitQuorumip-10-150-1-55.us-west-2.compute.internal""",:",""v"pid:[0] Transition from LiveRepair to Faulted""0:,,""v":name":"4759crucible"0,,""namelevel":}30":"
3428 crucible","level":30{,"time":"","2023-09-22T23:15:04.72056924Ztime"":"msg",2023-09-22T23:15:04.720575924Z"",hostname"::""hostname":""ip-10-150-1-55.us-west-2.compute.internal","pid":4759ip-10-150-1-55.us-west-2.compute.internal"}[1] 6b6841a1-369e-4ce0-bd40-27f441691315 (604185d3-c867-44d9-80b9-4f85beef9147) Active New New ds_transition to WaitActive"
3429 ,,""{pid"v"::04759",msg""}:"
3430 RE:0 Bailing with error"name",{":"v":"0crucible","msg"name":",:crucible"","level"level":":3040[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:04.720635602Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3431 ,"time":","2023-09-22T23:15:04.720642033Ztime":"","2023-09-22T23:15:04.720635808Zhostname"":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,"pid"}:
3432 4759}{
3433 "msg":"{[2] 740b9b19-2d4a-44eb-b854-682cc860698c (d76910b1-bd56-4bb7-8622-025d64ee3acf) Active Active WaitQuorum ds_transition to Active"","msgv":":"0,"name":"[1] Transition from New to WaitActive"crucible",,""levelv""::300,"name":"crucible","level":30,"time":"2023-09-22T23:15:04.720706765Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3434 ,"time":"{2023-09-22T23:15:04.720711694Z""msg":,""hostname":"[2] Transition from WaitQuorum to Active","v":ip-10-150-1-55.us-west-2.compute.internal"0,",pid"":name":"4759crucible","}level"
3435 :30{"msg":","time":"[1] 6b6841a1-369e-4ce0-bd40-27f441691315 (604185d3-c867-44d9-80b9-4f85beef9147) Active WaitActive New ds_transition to WaitQuorum","2023-09-22T23:15:04.720756429Z"v",:"0hostname":","name":"crucible"ip-10-150-1-55.us-west-2.compute.internal,"",level"":pid":304759}
3436 {"msg":"740b9b19-2d4a-44eb-b854-682cc860698c is now active with session: d76910b1-bd56-4bb7-8622-025d64ee3acf","v":0,"name":"crucible",",level"":time30":"2023-09-22T23:15:04.720783904Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3437 ,"time":"2023-09-22T23:15:04.72080403Z{","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid"[1] Transition from WaitActive to WaitQuorum":,4759"v"}:
3438 0,"name":{"crucible"","msg":level"":30[0] 740b9b19-2d4a-44eb-b854-682cc860698c (d76910b1-bd56-4bb7-8622-025d64ee3acf) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:04.720844643Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,}"
3439 time":"2023-09-22T23:15:04.720855065Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3440 [1] 6b6841a1-369e-4ce0-bd40-27f441691315 (604185d3-c867-44d9-80b9-4f85beef9147) Active WaitQuorum New ds_transition to Active","v"{:0,""namemsg"":":"crucible","level"[0] Transition from Active to Faulted":,"30v":0,"name":"crucible","level":30,"time":","time":2023-09-22T23:15:04.72090176Z"","hostname":"2023-09-22T23:15:04.720897473Z","ip-10-150-1-55.us-west-2.compute.internal"hostname",:""pid":4759}ip-10-150-1-55.us-west-2.compute.internal"
3441 ,"pid":4759{}"
3442 msg":"{"[0] 740b9b19-2d4a-44eb-b854-682cc860698c (d76910b1-bd56-4bb7-8622-025d64ee3acf) Faulted Active Active ds_transition to LiveRepairReady"msg,""v"::{"0,"name":"[1] Transition from WaitQuorum to Active"crucible",",msg":"""level"v":Crucible stats registered with UUID: 58683b55-a732-4f77-bcc0-42761edb8654"30:,"0v",:"0name":","crucible"name":",crucible"",,"level""level":time30:":30"2023-09-22T23:15:04.720963282Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3443 ,"{time":"2023-09-22T23:15:04.720978813Z"","msghostname":",""ip-10-150-1-55.us-west-2.compute.internal":,""time"pid":[0] Transition from Faulted to LiveRepairReady"4759:,"}v"
3444 ":0,"2023-09-22T23:15:04.72098082Zname":"{","crucible"hostname,"""msg":":"level":30Crucible 58683b55-a732-4f77-bcc0-42761edb8654 has session id: d4b9a97d-da05-496a-91a9-4a4f72a18bebip-10-150-1-55.us-west-2.compute.internal"",,""vpid":0",":name":"4759crucible","level"}:
3445 ,30"time":"{2023-09-22T23:15:04.721030266Z",""hostname":"msg":,"ip-10-150-1-55.us-west-2.compute.internal"time":",""pid":2023-09-22T23:15:04.721047527Z"4759,"hostname":"}
3446 ip-10-150-1-55.us-west-2.compute.internal"[2] 6b6841a1-369e-4ce0-bd40-27f441691315 (604185d3-c867-44d9-80b9-4f85beef9147) Active Active New ds_transition to WaitActive","pid":{4759,}
3447 ""{v"msg""msg"::":"0,"name":"crucible","[0] 58683b55-a732-4f77-bcc0-42761edb8654 (cac0d111-a31a-4543-a886-1fea695e5af3) New New New ds_transition to WaitActive"level"[0] 740b9b19-2d4a-44eb-b854-682cc860698c (d76910b1-bd56-4bb7-8622-025d64ee3acf) LiveRepairReady Active Active ds_transition to LiveRepair":,30,""v":v"0:,"0name":,""crucible"name",":level":30","crucible"time":","2023-09-22T23:15:04.721090535Z","hostname":,""time":"2023-09-22T23:15:04.72110221Z","ip-10-150-1-55.us-west-2.compute.internal"hostname":","pid"ip-10-150-1-55.us-west-2.compute.internal",level""::pid"475930:}4759
3448 {}
3449 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,",time":""time2023-09-22T23:15:04.721242877Z"":,""hostname":"2023-09-22T23:15:04.721122816Z","ip-10-150-1-55.us-west-2.compute.internal"time",:""pid",2023-09-22T23:15:04.721252478Z:"4759",}"
3450 hostname":hostname"":{"ip-10-150-1-55.us-west-2.compute.internal"",msg"":pid""ip-10-150-1-55.us-west-2.compute.internal":,4759"pid"}:
3451 [0] 58683b55-a732-4f77-bcc0-42761edb8654 (cac0d111-a31a-4543-a886-1fea695e5af3) WaitActive New New ds_transition to WaitQuorum4759"{,"v"}:"0msg,"":"name"
3452 [2] 6b6841a1-369e-4ce0-bd40-27f441691315 (604185d3-c867-44d9-80b9-4f85beef9147) Active Active WaitActive ds_transition to WaitQuorum:"",crucible""v",:"0level,"":name30":"crucible{","level":30"msg":"[0] Transition from LiveRepairReady to LiveRepair","v":,"0time":",","2023-09-22T23:15:04.721305805Ztime"":,""name":"hostname2023-09-22T23:15:04.721309442Z"crucible":",",""level":hostnameip-10-150-1-55.us-west-2.compute.internal"":,""30pid":4759ip-10-150-1-55.us-west-2.compute.internal",}"
3453 pid":4759}{
3454 "msg":"{"msg"[0] Transition from WaitActive to WaitQuorum:"","v":0,"[2] Transition from WaitActive to WaitQuorum"name",:""v":crucible0",","name"level:"":30crucible","level":30,"time":"2023-09-22T23:15:04.721333266Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",,,""time"time:"":""pid2023-09-22T23:15:04.721349377Z"2023-09-22T23:15:04.721351145Z"",,"":hostnamehostname""::""4759}ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"pid:"4759:
3455 4759}
3456 }
3457 {{{"msg":""msg":""msg":"Waiting for Close + ReOpen jobs","v":[2] 6b6841a1-369e-4ce0-bd40-27f441691315 (604185d3-c867-44d9-80b9-4f85beef9147) Active Active WaitQuorum ds_transition to Active0"[0] 58683b55-a732-4f77-bcc0-42761edb8654 (cac0d111-a31a-4543-a886-1fea695e5af3) WaitQuorum New New ds_transition to Active,,"",v""v"::00,"",name""name":name:"":crucible"","crucible"level,"":level"30:"30crucible","level":30,,""timetime""::""2023-09-22T23:15:04.72141031Z2023-09-22T23:15:04.721411421Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"pid:":47594759}}
3458 
3459 {,"{"time":msg""msg""::""2023-09-22T23:15:04.721414435Z","[2] Transition from WaitQuorum to Active[0] Transition from WaitQuorum to Active"",hostname",""vv""::00,",:"name"":"name":crucible""ip-10-150-1-55.us-west-2.compute.internal"crucible,"","levellevel""::3030,"pid":4759}
3460 ,,""timetime{""::""2023-09-22T23:15:04.721455737Z2023-09-22T23:15:04.721455885Z""",,""msg":"hostnamehostname""::""RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,"",pidpid""::47594759"}}
3461 
3462 {v":{"0"msg"msg:"":","name":"crucible","level":306b6841a1-369e-4ce0-bd40-27f441691315 is now active with session: 604185d3-c867-44d9-80b9-4f85beef9147[1] 58683b55-a732-4f77-bcc0-42761edb8654 (cac0d111-a31a-4543-a886-1fea695e5af3) Active New New ds_transition to WaitActive"",,""vv""::00,,""namename""::""cruciblecrucible"",,""levellevel""::3030,,,""time"time:"":""2023-09-22T23:15:04.721505031Z"time",2023-09-22T23:15:04.72150495Z"":,""hostname":hostname"":"2023-09-22T23:15:04.72149803Z"ip-10-150-1-55.us-west-2.compute.internal",ip-10-150-1-55.us-west-2.compute.internal,"",pid""pid:"4759:"}4759
3463 }
3464 {hostname":"{"msg":""msg"ip-10-150-1-55.us-west-2.compute.internal"[1] Transition from New to WaitActive:,"",""pidv""::0[2] 6b6841a1-369e-4ce0-bd40-27f441691315 (604185d3-c867-44d9-80b9-4f85beef9147) Active Active Active ds_transition to Faulted,""4759name,"":"v"}:crucible0"
3465 ,,""levelname""::30"crucible","level"{:30"msg":"RE:0 close id:1000 queued, notify DS",",v"":time,"":time"":"02023-09-22T23:15:04.721559142Z"2023-09-22T23:15:04.721562392Z",,,"""hostnamename"hostname""::"":"crucible","ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,"level"pid""pid:"4759::}4759
3466 }30
3467 {"{msg":""msg":"[2] Transition from Active to Faulted","v":0,"name":"crucible","level[1] 58683b55-a732-4f77-bcc0-42761edb8654 (cac0d111-a31a-4543-a886-1fea695e5af3) Active WaitActive New ds_transition to WaitQuorum"":30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:04.721593532Z","hostname,"":time"":,""time":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:15:04.721604772Z2023-09-22T23:15:04.721609412Z"",,",""pidhostnamehostname""::""":4759ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"pid:"4759:}}4759
3468 }
3469 {
3470 "msg":"{{[1] Transition from WaitActive to WaitQuorum""msg",:""v":"0,"msgname"":"[2] 6b6841a1-369e-4ce0-bd40-27f441691315 (604185d3-c867-44d9-80b9-4f85beef9147) Active Active Faulted ds_transition to LiveRepairReadycrucible"":,,""v":level0",:"30name"":"crucible","RE:0 Wait for result from close command 1000:1level"":30,"v":0,"name":"crucible",,""timelevel":",:""time":302023-09-22T23:15:04.721667239Z"","hostname2023-09-22T23:15:04.721672849Z"":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4759,"pid}"
3471 :4759}
3472 {{"msg":""msg":","[2] Transition from Faulted to LiveRepairReady"time","[1] 58683b55-a732-4f77-bcc0-42761edb8654 (cac0d111-a31a-4543-a886-1fea695e5af3) Active WaitQuorum New ds_transition to Activev"",:"0v,""::"name0",:""namecrucible"",:""level":crucible30"2023-09-22T23:15:04.721684457Z",",level"":30hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,"4759time":"}
3473 2023-09-22T23:15:04.721712886Z,"",time"":"hostname":"2023-09-22T23:15:04.721717809Z","ip-10-150-1-55.us-west-2.compute.internal"hostname",:""pid":4759}
3474 ip-10-150-1-55.us-west-2.compute.internal","pid":{4759}
3475 "msg":"{"msg":"[1] Transition from WaitQuorum to Active","v":[2] 6b6841a1-369e-4ce0-bd40-27f441691315 (604185d3-c867-44d9-80b9-4f85beef9147) Active Active LiveRepairReady ds_transition to LiveRepair0",","name"v:"":0crucible,"","name":level"":crucible30","level":30,",time"":time"":"2023-09-22T23:15:04.721754294Z"2023-09-22T23:15:04.721755839Z",","hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"","pid"pid:":47594759}}
3476 
3477 {"{msg":""msg":"[2] Transition from LiveRepairReady to LiveRepair","v":0[2] 58683b55-a732-4f77-bcc0-42761edb8654 (cac0d111-a31a-4543-a886-1fea695e5af3) Active Active New ds_transition to WaitActive,"",name""v:"":0crucible,"",name"":"level":crucible"30,"level":30,","time"time:"":"2023-09-22T23:15:04.721788672Z"2023-09-22T23:15:04.721789949Z",","hostname"hostname:"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",","pid"pid:":47594759}}
3478 
3479 {"{msg":""msg":"Waiting for Close + ReOpen jobs","v"[2] Transition from New to WaitActive:"0,,""v"name:"0:","cruciblename"":,""levelcrucible"":,30"level":30,,""timetime""::""2023-09-22T23:15:04.721820711Z2023-09-22T23:15:04.721821803Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47594759}}
3480 
3481 {"{msg":""msg":"RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]","v":0,"name":"crucible",[2] 58683b55-a732-4f77-bcc0-42761edb8654 (cac0d111-a31a-4543-a886-1fea695e5af3) Active Active WaitActive ds_transition to WaitQuorum""level,"":v30":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:04.721853781Z",","time":hostname"":"2023-09-22T23:15:04.721857688Z",ip-10-150-1-55.us-west-2.compute.internal"","hostname"pid:"":4759}
3482 ip-10-150-1-55.us-west-2.compute.internal","pid":{4759}
3483 "msg":"{"RE:0 close id:1000 queued, notify DSmsg"":","v":0,"name[2] Transition from WaitActive to WaitQuorum"":","cruciblev"":,0","level"name:":30"crucible","level":30,"time":","time":2023-09-22T23:15:04.721887122Z"","hostname2023-09-22T23:15:04.721889418Z"":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal4759","}
3484 pid":4759}{
3485 "msg":"{"msgRE:0 Wait for result from close command 1000:1"":","v":0,"name":"crucible","level":30[2] 58683b55-a732-4f77-bcc0-42761edb8654 (cac0d111-a31a-4543-a886-1fea695e5af3) Active Active WaitQuorum ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:04.721918673Z","hostname":","time":ip-10-150-1-55.us-west-2.compute.internal"","pid":2023-09-22T23:15:04.721924363Z4759","}
3486 hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
34872023-09-22T23:15:04.721ZINFOcrucible: [2] Transition from WaitQuorum to Active
34882023-09-22T23:15:04.721ZINFOcrucible: 58683b55-a732-4f77-bcc0-42761edb8654 is now active with session: cac0d111-a31a-4543-a886-1fea695e5af3
34892023-09-22T23:15:04.722ZINFOcrucible: [0] 58683b55-a732-4f77-bcc0-42761edb8654 (cac0d111-a31a-4543-a886-1fea695e5af3) Active Active Active ds_transition to Faulted
34902023-09-22T23:15:04.722ZINFOcrucible: [0] Transition from Active to Faulted
34912023-09-22T23:15:04.722ZINFOcrucible: [0] 58683b55-a732-4f77-bcc0-42761edb8654 (cac0d111-a31a-4543-a886-1fea695e5af3) Faulted Active Active ds_transition to LiveRepairReady
34922023-09-22T23:15:04.722ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
34932023-09-22T23:15:04.722ZINFOcrucible: [0] 58683b55-a732-4f77-bcc0-42761edb8654 (cac0d111-a31a-4543-a886-1fea695e5af3) LiveRepairReady Active Active ds_transition to LiveRepair
34942023-09-22T23:15:04.722ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
34952023-09-22T23:15:04.722ZINFOcrucible: Waiting for Close + ReOpen jobs
34962023-09-22T23:15:04.722ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
34972023-09-22T23:15:04.722ZINFOcrucible: RE:0 close id:1000 queued, notify DS
34982023-09-22T23:15:04.722ZINFOcrucible: RE:0 Wait for result from close command 1000:1
34992023-09-22T23:15:04.848ZINFOcrucible: Now move the NoOp job forward
35002023-09-22T23:15:04.848ZINFOcrucible: Now ACK the NoOp job
35012023-09-22T23:15:04.848ZINFOcrucible: Finally, move the ReOpen job forward
35022023-09-22T23:15:04.848ZINFOcrucible: Now ACK the Reopen job
35032023-09-22T23:15:04.848ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
35042023-09-22T23:15:04.848ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
35052023-09-22T23:15:04.848ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
35062023-09-22T23:15:04.848ZWARNcrucible: RE:0 Bailing with error
35072023-09-22T23:15:04.849ZINFOcrucible: Crucible stats registered with UUID: 71b60e6c-2610-440d-b92b-182648d96e96
35082023-09-22T23:15:04.849ZINFOcrucible: Crucible 71b60e6c-2610-440d-b92b-182648d96e96 has session id: 5838c09f-b248-45c2-9508-0e9bee9f039f
35092023-09-22T23:15:04.849ZINFOcrucible: [0] 71b60e6c-2610-440d-b92b-182648d96e96 (f64d2a54-56b6-4ceb-9870-e701a34e1d11) New New New ds_transition to WaitActive
35102023-09-22T23:15:04.849ZINFOcrucible: [0] Transition from New to WaitActive
35112023-09-22T23:15:04.849ZINFOcrucible: [0] 71b60e6c-2610-440d-b92b-182648d96e96 (f64d2a54-56b6-4ceb-9870-e701a34e1d11) WaitActive New New ds_transition to WaitQuorum
35122023-09-22T23:15:04.849ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
35132023-09-22T23:15:04.849ZINFOcrucible: [0] 71b60e6c-2610-440d-b92b-182648d96e96 (f64d2a54-56b6-4ceb-9870-e701a34e1d11) WaitQuorum New New ds_transition to Active
35142023-09-22T23:15:04.849ZINFOcrucible: [0] Transition from WaitQuorum to Active
35152023-09-22T23:15:04.849ZINFOcrucible: [1] 71b60e6c-2610-440d-b92b-182648d96e96 (f64d2a54-56b6-4ceb-9870-e701a34e1d11) Active New New ds_transition to WaitActive
35162023-09-22T23:15:04.849ZINFOcrucible: [1] Transition from New to WaitActive
35172023-09-22T23:15:04.849ZINFOcrucible: [1] 71b60e6c-2610-440d-b92b-182648d96e96 (f64d2a54-56b6-4ceb-9870-e701a34e1d11) Active WaitActive New ds_transition to WaitQuorum
35182023-09-22T23:15:04.849ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
35192023-09-22T23:15:04.849ZINFOcrucible: [1] 71b60e6c-2610-440d-b92b-182648d96e96 (f64d2a54-56b6-4ceb-9870-e701a34e1d11) Active WaitQuorum New ds_transition to Active
35202023-09-22T23:15:04.849ZINFOcrucible: [1] Transition from WaitQuorum to Active
35212023-09-22T23:15:04.849ZINFOcrucible: [2] 71b60e6c-2610-440d-b92b-182648d96e96 (f64d2a54-56b6-4ceb-9870-e701a34e1d11) Active Active New ds_transition to WaitActive
35222023-09-22T23:15:04.849ZINFOcrucible: [2] Transition from New to WaitActive
35232023-09-22T23:15:04.849ZINFOcrucible: [2] 71b60e6c-2610-440d-b92b-182648d96e96 (f64d2a54-56b6-4ceb-9870-e701a34e1d11) Active Active WaitActive ds_transition to WaitQuorum
35242023-09-22T23:15:04.849ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
35252023-09-22T23:15:04.849ZINFOcrucible: [2] 71b60e6c-2610-440d-b92b-182648d96e96 (f64d2a54-56b6-4ceb-9870-e701a34e1d11) Active Active WaitQuorum ds_transition to Active
35262023-09-22T23:15:04.849ZINFOcrucible: [2] Transition from WaitQuorum to Active
35272023-09-22T23:15:04.849ZINFOcrucible: 71b60e6c-2610-440d-b92b-182648d96e96 is now active with session: f64d2a54-56b6-4ceb-9870-e701a34e1d11
35282023-09-22T23:15:04.849ZINFOcrucible: [0] 71b60e6c-2610-440d-b92b-182648d96e96 (f64d2a54-56b6-4ceb-9870-e701a34e1d11) Active Active Active ds_transition to Faulted
35292023-09-22T23:15:04.849ZINFOcrucible: [0] Transition from Active to Faulted
35302023-09-22T23:15:04.849ZINFOcrucible: [0] 71b60e6c-2610-440d-b92b-182648d96e96 (f64d2a54-56b6-4ceb-9870-e701a34e1d11) Faulted Active Active ds_transition to LiveRepairReady
35312023-09-22T23:15:04.849ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
35322023-09-22T23:15:04.849ZINFOcrucible: [0] 71b60e6c-2610-440d-b92b-182648d96e96 (f64d2a54-56b6-4ceb-9870-e701a34e1d11) LiveRepairReady Active Active ds_transition to LiveRepair
35332023-09-22T23:15:04.849ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
35342023-09-22T23:15:04.849ZINFOcrucible: Waiting for Close + ReOpen jobs
35352023-09-22T23:15:04.849ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
35362023-09-22T23:15:04.849ZINFOcrucible: RE:0 close id:1000 queued, notify DS
35372023-09-22T23:15:04.849ZINFOcrucible: RE:0 Wait for result from close command 1000:1
35382023-09-22T23:15:05.719ZINFOcrucible: Waiting for 4 jobs (currently 3)
35392023-09-22T23:15:05.719ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
35402023-09-22T23:15:05.721ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(1), repair_downstairs: [ClientId(0)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
35412023-09-22T23:15:05.721ZERROcrucible: [2] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(1), repair_downstairs: [ClientId(0)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
35422023-09-22T23:15:05.721ZINFOcrucible: [2] client skip 2 in process jobs because fault = downstairs
3543 {"msg":"[2] changed 1 jobs to fault skipped","v":0,"name":"crucible","level":30{,"time":"2023-09-22T23:15:05.721247918Z"","msg"hostname"::""Now ACK the close job"ip-10-150-1-55.us-west-2.compute.internal","pid",:"4759v":,"0":","downstairsname"":"}
3544 crucible","level":30{"msg":"[2] 740b9b19-2d4a-44eb-b854-682cc860698c (d76910b1-bd56-4bb7-8622-025d64ee3acf) LiveRepair Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:05.721289411Z",","time":"hostname":"2023-09-22T23:15:05.721303975Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internalpid"",":pid":47594759}
3545 }
3546 {"msg":"[2] Transition from Active to Faulted","v":0,{"name":"crucible","level":30"msg":"Waiting for 3 jobs (currently 2)","v":0,"name",":time":""crucible2023-09-22T23:15:05.721343652Z"",",hostname":""level":ip-10-150-1-55.us-west-2.compute.internal",30"pid":4759}
3547 {"msg":"Now ACK the close job","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:05.721369202Z",","hostname":"time":"ip-10-150-1-55.us-west-2.compute.internal","pid":47592023-09-22T23:15:05.721360878Z}"
3548 ,"hostname"{:""msg":"Waiting for 3 jobs (currently 2)"ip-10-150-1-55.us-west-2.compute.internal",","v":pid"0:,"4759name":"crucible","}level
3549 ":30{","msg"time":":"2023-09-22T23:15:05.721402723Z","hostnameRepair for extent 0 s:0 d:[ClientId(2)]"":","v":0ip-10-150-1-55.us-west-2.compute.internal,",""name":"pidcrucible"":,"4759level"}
3550 :30{"msg":"Extent 0 close id:1000 Failed: Error: bad","v":0,"name":"crucible","level":50,"time":","2023-09-22T23:15:05.721426971Z"time":","hostname":2023-09-22T23:15:05.721432433Z"","hostname":ip-10-150-1-55.us-west-2.compute.internal"","pid":4759ip-10-150-1-55.us-west-2.compute.internal","pid":,"4759":}
3551 "downstairs"{}
3552 "msg":"{[0] client skip 2 in process jobs because fault"",msg":""v":0,"name"RE:0 Wait for result from repair command 1001:2":,""v":crucible"0,",level":"30name":"crucible","level":30,"time":"2023-09-22T23:15:05.721469903Z",",hostname"":time":""2023-09-22T23:15:05.721471629Z","ip-10-150-1-55.us-west-2.compute.internal"hostname":","pid":4759ip-10-150-1-55.us-west-2.compute.internal,"",":""downstairs"pid"}:
3553 4759}
35542023-09-22T23:15:05.721ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
35552023-09-22T23:15:05.721ZINFOcrucible: [0] 740b9b19-2d4a-44eb-b854-682cc860698c (d76910b1-bd56-4bb7-8622-025d64ee3acf) LiveRepair Active Faulted ds_transition to Faulted
35562023-09-22T23:15:05.721ZINFOcrucible: [0] Transition from LiveRepair to Faulted
35572023-09-22T23:15:05.721ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
35582023-09-22T23:15:05.722ZINFOcrucible: Waiting for 3 jobs (currently 2)
35592023-09-22T23:15:05.722ZINFOcrucible: No repair needed for extent 0 = downstairs
35602023-09-22T23:15:05.722ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
35612023-09-22T23:15:05.849ZINFOcrucible: Now ACK the close job
35622023-09-22T23:15:05.849ZINFOcrucible: Waiting for 3 jobs (currently 2)
35632023-09-22T23:15:05.849ZINFOcrucible: No repair needed for extent 0 = downstairs
35642023-09-22T23:15:05.849ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
3565 ----------------------------------------------------------------
3566 Crucible gen:0 GIO:true work queues: Upstairs:2 downstairs:4
3567 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
3568 1 Acked 1000 FClose 0 Done Done Done false
3569 2 Acked 1001 NoOp 0 Done Done Done false
3570 3 NotAcked 1002 NoOp 0 New New New false
3571 4 NotAcked 1003 Reopen 0 New New New false
3572 STATES DS:0 DS:1 DS:2 TOTAL
3573 New 2 2 2 6
3574 Sent 0 0 0 0
3575 Done 2 2 2 6
3576 Skipped 0 0 0 0
3577 Error 0 0 0 0
3578 Last Flush: 0 0 0
3579 Downstairs last five completed:
3580 Upstairs last five completed: 2 1
35812023-09-22T23:15:06.591ZINFOcrucible: Now move the NoOp job forward
35822023-09-22T23:15:06.591ZINFOcrucible: Finally, move the ReOpen job forward
35832023-09-22T23:15:06.591ZINFOcrucible: Now ACK the reopen job
35842023-09-22T23:15:06.591ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
35852023-09-22T23:15:06.591ZINFOcrucible: jobs are: 4
35862023-09-22T23:15:06.592ZINFOcrucible: Crucible stats registered with UUID: 87064aa7-d0ac-4f60-b676-7a1f3cd25380
35872023-09-22T23:15:06.592ZINFOcrucible: Crucible 87064aa7-d0ac-4f60-b676-7a1f3cd25380 has session id: 08aa62d6-481b-4db0-8b56-7e1bd0f45909
35882023-09-22T23:15:06.592ZINFOcrucible: [0] 87064aa7-d0ac-4f60-b676-7a1f3cd25380 (38df2a71-8b65-46d9-a8e8-79394e623d8a) New New New ds_transition to WaitActive
35892023-09-22T23:15:06.592ZINFOcrucible: [0] Transition from New to WaitActive
35902023-09-22T23:15:06.592ZINFOcrucible: [0] 87064aa7-d0ac-4f60-b676-7a1f3cd25380 (38df2a71-8b65-46d9-a8e8-79394e623d8a) WaitActive New New ds_transition to WaitQuorum
35912023-09-22T23:15:06.592ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
35922023-09-22T23:15:06.592ZINFOcrucible: [0] 87064aa7-d0ac-4f60-b676-7a1f3cd25380 (38df2a71-8b65-46d9-a8e8-79394e623d8a) WaitQuorum New New ds_transition to Active
35932023-09-22T23:15:06.592ZINFOcrucible: [0] Transition from WaitQuorum to Active
35942023-09-22T23:15:06.592ZINFOcrucible: [1] 87064aa7-d0ac-4f60-b676-7a1f3cd25380 (38df2a71-8b65-46d9-a8e8-79394e623d8a) Active New New ds_transition to WaitActive
35952023-09-22T23:15:06.592ZINFOcrucible: [1] Transition from New to WaitActive
35962023-09-22T23:15:06.592ZINFOcrucible: [1] 87064aa7-d0ac-4f60-b676-7a1f3cd25380 (38df2a71-8b65-46d9-a8e8-79394e623d8a) Active WaitActive New ds_transition to WaitQuorum
35972023-09-22T23:15:06.592ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
35982023-09-22T23:15:06.592ZINFOcrucible: [1] 87064aa7-d0ac-4f60-b676-7a1f3cd25380 (38df2a71-8b65-46d9-a8e8-79394e623d8a) Active WaitQuorum New ds_transition to Active
35992023-09-22T23:15:06.592ZINFOcrucible: [1] Transition from WaitQuorum to Active
36002023-09-22T23:15:06.592ZINFOcrucible: [2] 87064aa7-d0ac-4f60-b676-7a1f3cd25380 (38df2a71-8b65-46d9-a8e8-79394e623d8a) Active Active New ds_transition to WaitActive
36012023-09-22T23:15:06.592ZINFOcrucible: [2] Transition from New to WaitActive
36022023-09-22T23:15:06.592ZINFOcrucible: [2] 87064aa7-d0ac-4f60-b676-7a1f3cd25380 (38df2a71-8b65-46d9-a8e8-79394e623d8a) Active Active WaitActive ds_transition to WaitQuorum
36032023-09-22T23:15:06.592ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
36042023-09-22T23:15:06.592ZINFOcrucible: [2] 87064aa7-d0ac-4f60-b676-7a1f3cd25380 (38df2a71-8b65-46d9-a8e8-79394e623d8a) Active Active WaitQuorum ds_transition to Active
36052023-09-22T23:15:06.592ZINFOcrucible: [2] Transition from WaitQuorum to Active
36062023-09-22T23:15:06.592ZINFOcrucible: 87064aa7-d0ac-4f60-b676-7a1f3cd25380 is now active with session: 38df2a71-8b65-46d9-a8e8-79394e623d8a
36072023-09-22T23:15:06.592ZINFOcrucible: [1] 87064aa7-d0ac-4f60-b676-7a1f3cd25380 (38df2a71-8b65-46d9-a8e8-79394e623d8a) Active Active Active ds_transition to Faulted
36082023-09-22T23:15:06.592ZINFOcrucible: [1] Transition from Active to Faulted
36092023-09-22T23:15:06.592ZINFOcrucible: [1] 87064aa7-d0ac-4f60-b676-7a1f3cd25380 (38df2a71-8b65-46d9-a8e8-79394e623d8a) Active Faulted Active ds_transition to LiveRepairReady
36102023-09-22T23:15:06.592ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
36112023-09-22T23:15:06.592ZINFOcrucible: [1] 87064aa7-d0ac-4f60-b676-7a1f3cd25380 (38df2a71-8b65-46d9-a8e8-79394e623d8a) Active LiveRepairReady Active ds_transition to LiveRepair
36122023-09-22T23:15:06.592ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
36132023-09-22T23:15:06.592ZINFOcrucible: Waiting for Close + ReOpen jobs
36142023-09-22T23:15:06.592ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
36152023-09-22T23:15:06.592ZINFOcrucible: RE:0 close id:1000 queued, notify DS
36162023-09-22T23:15:06.592ZINFOcrucible: RE:0 Wait for result from close command 1000:1
36172023-09-22T23:15:06.719ZINFOcrucible: Now move the NoOp job forward
36182023-09-22T23:15:06.719ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
36192023-09-22T23:15:06.719ZERROcrucible: [1] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
36202023-09-22T23:15:06.719ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
36212023-09-22T23:15:06.719ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
36222023-09-22T23:15:06.719ZINFOcrucible: [1] 67c26488-dbae-450b-b463-1bdda5022bc7 (70c18185-953d-4518-93af-6f5485ebb3e0) LiveRepair Active Active ds_transition to Faulted
36232023-09-22T23:15:06.719ZINFOcrucible: [1] Transition from Active to Faulted
36242023-09-22T23:15:06.719ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
36252023-09-22T23:15:06.719ZINFOcrucible: [0] client skip 4 in process jobs because fault = downstairs
36262023-09-22T23:15:06.719ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
36272023-09-22T23:15:06.720ZINFOcrucible: [0] 67c26488-dbae-450b-b463-1bdda5022bc7 (70c18185-953d-4518-93af-6f5485ebb3e0) LiveRepair Faulted Active ds_transition to Faulted
36282023-09-22T23:15:06.720ZINFOcrucible: [0] Transition from LiveRepair to Faulted
36292023-09-22T23:15:06.720ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
3630 {"msg":"Waiting for 4 jobs (currently 3)","v":0,"name":"crucible","level":30{"msg":"Now ACK the repair job","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:06.722824235Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3631 ,"time":"2023-09-22T23:15:06.722843295Z"{,"hostname"":"msg":"ip-10-150-1-55.us-west-2.compute.internal","Extent 0 close id:1001 Failed: Error: badpid"":,4759"v":0},"
3632 name":"crucible","level":50{"msg":"Waiting for 4 jobs (currently 3)","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:06.722889971Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3633 ,"time":"2023-09-22T23:15:06.722901782Z"{,"hostname":""msg":ip-10-150-1-55.us-west-2.compute.internal"","pid":4759}
3634 RE:0 Wait for result from NoOp command 1002:3","v{":0,""msg":name"":"crucible"RE:0 Wait for result from NoOp command 1002:3",,""v":level"0:,"30name":"crucible","level":30,"time":"2023-09-22T23:15:06.722941861Z",","hostname":"time":"ip-10-150-1-55.us-west-2.compute.internal","pid":47592023-09-22T23:15:06.722940115Z"}
3635 ,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
36362023-09-22T23:15:06.723ZINFOcrucible: Waiting for 4 jobs (currently 3)
36372023-09-22T23:15:06.723ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
36382023-09-22T23:15:06.851ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
36392023-09-22T23:15:06.851ZERROcrucible: [1] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
36402023-09-22T23:15:06.851ZINFOcrucible: [1] client skip 3 in process jobs because fault = downstairs
36412023-09-22T23:15:06.851ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
36422023-09-22T23:15:06.851ZINFOcrucible: [1] 71b60e6c-2610-440d-b92b-182648d96e96 (f64d2a54-56b6-4ceb-9870-e701a34e1d11) LiveRepair Active Active ds_transition to Faulted
36432023-09-22T23:15:06.851ZINFOcrucible: [1] Transition from Active to Faulted
36442023-09-22T23:15:06.851ZINFOcrucible: Waiting for 4 jobs (currently 3)
36452023-09-22T23:15:06.851ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
36462023-09-22T23:15:06.851ZINFOcrucible: [0] client skip 3 in process jobs because fault = downstairs
36472023-09-22T23:15:06.851ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
36482023-09-22T23:15:06.851ZINFOcrucible: [0] 71b60e6c-2610-440d-b92b-182648d96e96 (f64d2a54-56b6-4ceb-9870-e701a34e1d11) LiveRepair Faulted Active ds_transition to Faulted
36492023-09-22T23:15:06.851ZINFOcrucible: [0] Transition from LiveRepair to Faulted
36502023-09-22T23:15:06.851ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
36512023-09-22T23:15:07.592ZINFOcrucible: Waiting for 3 jobs (currently 2)
36522023-09-22T23:15:07.593ZINFOcrucible: No repair needed for extent 0 = downstairs
36532023-09-22T23:15:07.593ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
36542023-09-22T23:15:07.723ZINFOcrucible: Now move the NoOp job forward
36552023-09-22T23:15:07.723ZINFOcrucible: Now ACK the NoOp job
36562023-09-22T23:15:07.723ZINFOcrucible: Finally, move the ReOpen job forward
36572023-09-22T23:15:07.723ZINFOcrucible: Now ACK the Reopen job
36582023-09-22T23:15:07.723ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
36592023-09-22T23:15:07.723ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
36602023-09-22T23:15:07.723ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
36612023-09-22T23:15:07.723ZWARNcrucible: RE:0 Bailing with error
3662 ----------------------------------------------------------------
3663 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
3664 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
3665 1 Acked 1000 FClose 0 Done Done Err false
3666 2 Acked 1001 NoOp 0 Skip Done Skip false
3667 3 Acked 1002 NoOp 0 Skip Done Skip false
3668 4 Acked 1003 Reopen 0 Skip Done Skip false
3669 STATES DS:0 DS:1 DS:2 TOTAL
3670 New 0 0 0 0
3671 Sent 0 0 0 0
3672 Done 1 4 0 5
3673 Skipped 3 0 3 6
3674 Error 0 0 1 1
3675 Last Flush: 0 0 0
3676 Downstairs last five completed:
3677 Upstairs last five completed: 4 3 2 1
36782023-09-22T23:15:07.724ZINFOcrucible: Crucible stats registered with UUID: 98c53138-edfb-4bb6-9deb-ec8ddf307957
36792023-09-22T23:15:07.724ZINFOcrucible: Crucible 98c53138-edfb-4bb6-9deb-ec8ddf307957 has session id: c2840ded-d7e5-484e-b1fc-80b6031c41b9
36802023-09-22T23:15:07.724ZINFOcrucible: [0] 98c53138-edfb-4bb6-9deb-ec8ddf307957 (9c9d5503-b2e8-4f16-b53b-34eee863d6b8) New New New ds_transition to WaitActive
36812023-09-22T23:15:07.724ZINFOcrucible: [0] Transition from New to WaitActive
36822023-09-22T23:15:07.724ZINFOcrucible: [0] 98c53138-edfb-4bb6-9deb-ec8ddf307957 (9c9d5503-b2e8-4f16-b53b-34eee863d6b8) WaitActive New New ds_transition to WaitQuorum
36832023-09-22T23:15:07.724ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
36842023-09-22T23:15:07.724ZINFOcrucible: [0] 98c53138-edfb-4bb6-9deb-ec8ddf307957 (9c9d5503-b2e8-4f16-b53b-34eee863d6b8) WaitQuorum New New ds_transition to Active
36852023-09-22T23:15:07.724ZINFOcrucible: [0] Transition from WaitQuorum to Active
36862023-09-22T23:15:07.724ZINFOcrucible: [1] 98c53138-edfb-4bb6-9deb-ec8ddf307957 (9c9d5503-b2e8-4f16-b53b-34eee863d6b8) Active New New ds_transition to WaitActive
36872023-09-22T23:15:07.724ZINFOcrucible: [1] Transition from New to WaitActive
36882023-09-22T23:15:07.724ZINFOcrucible: [1] 98c53138-edfb-4bb6-9deb-ec8ddf307957 (9c9d5503-b2e8-4f16-b53b-34eee863d6b8) Active WaitActive New ds_transition to WaitQuorum
36892023-09-22T23:15:07.724ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
36902023-09-22T23:15:07.724ZINFOcrucible: [1] 98c53138-edfb-4bb6-9deb-ec8ddf307957 (9c9d5503-b2e8-4f16-b53b-34eee863d6b8) Active WaitQuorum New ds_transition to Active
36912023-09-22T23:15:07.724ZINFOcrucible: [1] Transition from WaitQuorum to Active
3692 {"msg":"[2] 98c53138-edfb-4bb6-9deb-ec8ddf307957 (9c9d5503-b2e8-4f16-b53b-34eee863d6b8) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30{"msg":","timeNow move the NoOp job forward"":",2023-09-22T23:15:07.724369905Z""v",":hostname":"0,"name"ip-10-150-1-55.us-west-2.compute.internal",":"pid":crucible"4759,}
3693 "level":{30"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:07.724397904Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3694 ,"time":"{2023-09-22T23:15:07.724393434Z""msg":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":[2] 98c53138-edfb-4bb6-9deb-ec8ddf307957 (9c9d5503-b2e8-4f16-b53b-34eee863d6b8) Active Active WaitActive ds_transition to WaitQuorum4759","v":0},"
3695 name":"crucible","level":30{"msg":"Now ACK the NoOp job","v":0,"name":"crucible",","level":time":"302023-09-22T23:15:07.724428893Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3696 {,""time":"msg":"2023-09-22T23:15:07.724438774Z",[2] Transition from WaitActive to WaitQuorum"","hostname":v":0","name":"crucible"ip-10-150-1-55.us-west-2.compute.internal",,""pidlevel""::475930}
3697 {"msg":"Finally, move the ReOpen job forward,""time":","v":0,2023-09-22T23:15:07.724463719Z"","namehostname":"":"crucible",ip-10-150-1-55.us-west-2.compute.internal"",level"":30pid":4759}
3698 {"msg":","time":"2023-09-22T23:15:07.724482559Z"[2] 98c53138-edfb-4bb6-9deb-ec8ddf307957 (9c9d5503-b2e8-4f16-b53b-34eee863d6b8) Active Active WaitQuorum ds_transition to Active",,""v"hostname:":"0,"name":"ip-10-150-1-55.us-west-2.compute.internal"crucible",","pidlevel":":304759}
3699 {"msg":"Now ACK the repair job","v":0,","time":"name":"crucible2023-09-22T23:15:07.724503942Z"",","hostname"level":30:"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3700 {"msg,"":"time":"[2] Transition from WaitQuorum to Active2023-09-22T23:15:07.72452059Z"",,""v"hostname":":0,"name":"ip-10-150-1-55.us-west-2.compute.internal"crucible",","pidlevel":30":4759}
3701 {"msg":","time":"RE:0 Wait for result from reopen command 1003:4","v"2023-09-22T23:15:07.724541676Z":,"0,"hostname":"name":"crucible",ip-10-150-1-55.us-west-2.compute.internal"","levelpid":":304759}
3702 {"msg":","time":"98c53138-edfb-4bb6-9deb-ec8ddf307957 is now active with session: 9c9d5503-b2e8-4f16-b53b-34eee863d6b8","2023-09-22T23:15:07.724560813Zv":"0,","hostname":"name":"crucible",ip-10-150-1-55.us-west-2.compute.internal"","level":pid":304759}
3703 ,"time":"2023-09-22T23:15:07.724580917Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
37042023-09-22T23:15:07.724ZINFOcrucible: [1] 98c53138-edfb-4bb6-9deb-ec8ddf307957 (9c9d5503-b2e8-4f16-b53b-34eee863d6b8) Active Active Active ds_transition to Faulted
37052023-09-22T23:15:07.724ZINFOcrucible: [1] Transition from Active to Faulted
37062023-09-22T23:15:07.724ZINFOcrucible: [1] 98c53138-edfb-4bb6-9deb-ec8ddf307957 (9c9d5503-b2e8-4f16-b53b-34eee863d6b8) Active Faulted Active ds_transition to LiveRepairReady
37072023-09-22T23:15:07.724ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
37082023-09-22T23:15:07.724ZINFOcrucible: [1] 98c53138-edfb-4bb6-9deb-ec8ddf307957 (9c9d5503-b2e8-4f16-b53b-34eee863d6b8) Active LiveRepairReady Active ds_transition to LiveRepair
37092023-09-22T23:15:07.724ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
37102023-09-22T23:15:07.724ZINFOcrucible: Waiting for Close + ReOpen jobs
37112023-09-22T23:15:07.724ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
37122023-09-22T23:15:07.724ZINFOcrucible: RE:0 close id:1000 queued, notify DS
37132023-09-22T23:15:07.724ZINFOcrucible: RE:0 Wait for result from close command 1000:1
3714 test live_repair::repair_test::test_repair_extent_do_repair_all ... ok
37152023-09-22T23:15:07.725ZINFOcrucible: Now move the NoOp job forward
3716 {"msg":"Crucible stats registered with UUID: 04a09fc3-4f66-480e-84bb-fa73586a5f17","v":0,"name":"crucible","level":30{"msg":"[2] DS Reports error Err(GenericError(\"bad\","time":"2023-09-22T23:15:07.725457844Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }"pid":4759},"
3717 v":0,"name":"crucible"{,"level":50"msg":"Crucible 04a09fc3-4f66-480e-84bb-fa73586a5f17 has session id: 3369a2bc-49b1-4d8e-9433-31405f84065e","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:07.725503723Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"time":","":"2023-09-22T23:15:07.725515587Z"downstairs","}hostname":"
3718 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3719 {{"msg":""[2] Reports error GenericError(msg\""bad\":"[0] 04a09fc3-4f66-480e-84bb-fa73586a5f17 (b0b519cf-95b7-4a43-b79b-312a989a1592) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }","v":0,"name":"crucible","level":50,"time":"2023-09-22T23:15:07.725574325Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3720 ,"time":"{2023-09-22T23:15:07.725585649Z","hostname"":msg"":"ip-10-150-1-55.us-west-2.compute.internal"[0] Transition from New to WaitActive","pid",":v"4759:0,,""name"":":"downstairs"crucible"},"
3721 level":30{"msg":"[2] client skip 4 in process jobs because fault","v":0,"name":"crucible","level":,"30time":"2023-09-22T23:15:07.725630552Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3722 {",msg":""time":"2023-09-22T23:15:07.725645931Z","hostname":[0] 04a09fc3-4f66-480e-84bb-fa73586a5f17 (b0b519cf-95b7-4a43-b79b-312a989a1592) WaitActive New New ds_transition to WaitQuorum"","v":0ip-10-150-1-55.us-west-2.compute.internal",,""name":pid"":crucible4759",",level"""::"30downstairs"}
3723 {"msg":","time":"[2] changed 0 jobs to fault skipped","2023-09-22T23:15:07.725685063Z"v":,"0hostname":","name":"crucible"ip-10-150-1-55.us-west-2.compute.internal",,""level"pid"::475930}
3724 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":",crucible"","time":level":"302023-09-22T23:15:07.725713837Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"":"downstairs"}
3725 ,"time":"2023-09-22T23:15:07.725735588Z",{"hostname":""ip-10-150-1-55.us-west-2.compute.internal"msg",":pid"":4759}
3726 [2] 58683b55-a732-4f77-bcc0-42761edb8654 (cac0d111-a31a-4543-a886-1fea695e5af3) LiveRepair Active Active ds_transition to Faulted","v":0{,"name":"crucible"","msg":"level":30[0] 04a09fc3-4f66-480e-84bb-fa73586a5f17 (b0b519cf-95b7-4a43-b79b-312a989a1592) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:07.725784375Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:15:07.725794267Z"pid",":hostname4759":"}ip-10-150-1-55.us-west-2.compute.internal
3727 ","pid":4759}
3728 {{"msg"":"msg":"[0] Transition from WaitQuorum to Active","v":0[2] Transition from Active to Faulted","name":","crucible"v",":level"0:30,"name":"crucible","level":30,"time":"2023-09-22T23:15:07.725836172Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3729 {"msg":"[1] 04a09fc3-4f66-480e-84bb-fa73586a5f17 (b0b519cf-95b7-4a43-b79b-312a989a1592) Active New New ds_transition to WaitActive","v":0,","time":"name":"crucible","2023-09-22T23:15:07.725840217Z"level":30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759},"
3730 time":"2023-09-22T23:15:07.72586166Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3731 "msg":"{"msg":"RE:0 Wait for result from reopen command 1003:4"[1] Transition from New to WaitActive",,""vv""::0,"0name":","crucible"name,""level":30:"crucible","level":30,"time":"2023-09-22T23:15:07.725896029Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3732 {"msg":"[1] 04a09fc3-4f66-480e-84bb-fa73586a5f17 (b0b519cf-95b7-4a43-b79b-312a989a1592) Active WaitActive New ds_transition to WaitQuorum,"","v":0time",":name":""crucible","level":302023-09-22T23:15:07.725899713Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"time":}"
3733 2023-09-22T23:15:07.72592336Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759{}
3734 "{msg":""msg":"[1] Transition from WaitActive to WaitQuorumExtent 0 close id:1003 Failed: Error: bad"",","v":0,"vname":""crucible":,"0level":,"30name":"crucible","level":50,"time":"2023-09-22T23:15:07.725958484Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3735 {"msg":"[1] 04a09fc3-4f66-480e-84bb-fa73586a5f17 (b0b519cf-95b7-4a43-b79b-312a989a1592) Active WaitQuorum New ds_transition to Active","v":0,",name":""crucible",time":""level":302023-09-22T23:15:07.725962201Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,"4759time":"}2023-09-22T23:15:07.725983135Z
3736 ","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759{}
3737 "msg"{:""msg":"[0] client skip 4 in process jobs because fault"[1] Transition from WaitQuorum to Active",","v":v"0:,"0name":,""namecrucible"":","crucible"level":,30"level":30,"time":"2023-09-22T23:15:07.726021675Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3738 {"msg":","[2] 04a09fc3-4f66-480e-84bb-fa73586a5f17 (b0b519cf-95b7-4a43-b79b-312a989a1592) Active Active New ds_transition to WaitActive"time",:""v":0,"name":"2023-09-22T23:15:07.726023634Zcrucible"","level":30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,","time":"":"2023-09-22T23:15:07.726047685Zdownstairs"","}hostname":"
3739 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3740 {{"msg":""[2] Transition from New to WaitActivemsg"":,""v":0,"name":"crucible"[0] changed 0 jobs to fault skipped","level,":30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:07.726081863Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3741 {"msg":"[2] 04a09fc3-4f66-480e-84bb-fa73586a5f17 (b0b519cf-95b7-4a43-b79b-312a989a1592) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:07.726087987Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:15:07.726102728Z"pid,""hostname":":4759ip-10-150-1-55.us-west-2.compute.internal","pid,"":"4759:"}
3742 downstairs"}{
3743 "msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible"{,"level":30"msg":","time":"[0] 58683b55-a732-4f77-bcc0-42761edb8654 (cac0d111-a31a-4543-a886-1fea695e5af3) LiveRepair Active Faulted ds_transition to Faulted"2023-09-22T23:15:07.72613854Z",,""vhostname"":":0ip-10-150-1-55.us-west-2.compute.internal",,""pidname":"":4759crucible"}
3744 ,"level"{:30"msg":"[2] 04a09fc3-4f66-480e-84bb-fa73586a5f17 (b0b519cf-95b7-4a43-b79b-312a989a1592) Active Active WaitQuorum ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:07.726170694Z","hostname":",ip-10-150-1-55.us-west-2.compute.internal"","time"pid"::4759"}
3745 2023-09-22T23:15:07.726164127Z"{,""msg":"hostname":"[2] Transition from WaitQuorum to Active","v":0,"ip-10-150-1-55.us-west-2.compute.internal"name",:""cruciblepid"":,"4759level":30}
3746 ,"time":"2023-09-22T23:15:07.726200961Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3747 "msg"{:""msg":"[0] Transition from LiveRepair to Faulted","v":04a09fc3-4f66-480e-84bb-fa73586a5f17 is now active with session: b0b519cf-95b7-4a43-b79b-312a989a1592"0,","vname":":"0,"cruciblename"":","crucible","level"level"::3030,"time":"2023-09-22T23:15:07.726232678Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3748 {"msg":","time":[1] 04a09fc3-4f66-480e-84bb-fa73586a5f17 (b0b519cf-95b7-4a43-b79b-312a989a1592) Active Active Active ds_transition to Faulted"","v":0,"name":"2023-09-22T23:15:07.726233471Zcrucible"","level":30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759},"
3749 time":"2023-09-22T23:15:07.72625829Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4759}
3750 "msg{":""msg":"RE:0 Bailing with error"[1] Transition from Active to Faulted",,""v":0v",":name"0:",crucible"","name"level":30:"crucible","level":40,"time":"2023-09-22T23:15:07.726293115Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3751 {"msg":"[1] 04a09fc3-4f66-480e-84bb-fa73586a5f17 (b0b519cf-95b7-4a43-b79b-312a989a1592) Active Faulted Active ds_transition to LiveRepairReady","v":0,,""name":time""crucible":,""level":302023-09-22T23:15:07.726296494Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,4759"time":"}2023-09-22T23:15:07.72631923Z"
3752 ,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
37532023-09-22T23:15:07.726ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
37542023-09-22T23:15:07.726ZINFOcrucible: [1] 04a09fc3-4f66-480e-84bb-fa73586a5f17 (b0b519cf-95b7-4a43-b79b-312a989a1592) Active LiveRepairReady Active ds_transition to LiveRepair
37552023-09-22T23:15:07.726ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
3756 test live_repair::repair_test::test_repair_io_above_el_skipped ... ok
37572023-09-22T23:15:07.726ZINFOcrucible: Crucible stats registered with UUID: 73140564-9a43-4886-9974-6360c31b2d26
37582023-09-22T23:15:07.726ZINFOcrucible: Crucible 73140564-9a43-4886-9974-6360c31b2d26 has session id: 1dc8588b-8a0c-4272-939c-6e839c534254
37592023-09-22T23:15:07.726ZINFOcrucible: [0] 73140564-9a43-4886-9974-6360c31b2d26 (521e1392-21bb-441e-9084-601140ec5551) New New New ds_transition to WaitActive
37602023-09-22T23:15:07.726ZINFOcrucible: [0] Transition from New to WaitActive
37612023-09-22T23:15:07.726ZINFOcrucible: [0] 73140564-9a43-4886-9974-6360c31b2d26 (521e1392-21bb-441e-9084-601140ec5551) WaitActive New New ds_transition to WaitQuorum
37622023-09-22T23:15:07.726ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
37632023-09-22T23:15:07.726ZINFOcrucible: [0] 73140564-9a43-4886-9974-6360c31b2d26 (521e1392-21bb-441e-9084-601140ec5551) WaitQuorum New New ds_transition to Active
37642023-09-22T23:15:07.727ZINFOcrucible: [0] Transition from WaitQuorum to Active
37652023-09-22T23:15:07.727ZINFOcrucible: [1] 73140564-9a43-4886-9974-6360c31b2d26 (521e1392-21bb-441e-9084-601140ec5551) Active New New ds_transition to WaitActive
37662023-09-22T23:15:07.727ZINFOcrucible: [1] Transition from New to WaitActive
3767 {"msg":"{"[1] 73140564-9a43-4886-9974-6360c31b2d26 (521e1392-21bb-441e-9084-601140ec5551) Active WaitActive New ds_transition to WaitQuorum"msg",:""v":0,"Crucible stats registered with UUID: f25cb6f0-d847-4044-b2ba-64db5febf8b4"name":","crucible"v":,"0level,""name":":crucible"30,"level":30,"time":"2023-09-22T23:15:07.727146463Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"}time":"
3768 2023-09-22T23:15:07.727148512Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal"","msg"pid"::"4759}
3769 [1] Transition from WaitActive to WaitQuorum","v":0,{"name":""crucible"msg":","level":30Crucible f25cb6f0-d847-4044-b2ba-64db5febf8b4 has session id: 5b21f270-e95e-487f-a9e3-73e37056f04d","v":0,"name":"crucible","level":30,"time":","time"2023-09-22T23:15:07.727193284Z":","2023-09-22T23:15:07.727197716Z"hostname",":hostname"":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internalpid":"4759,"}pid
3770 ":4759{}
3771 "msg":"{"msg":"[0] f25cb6f0-d847-4044-b2ba-64db5febf8b4 (3ec1c360-ac3c-465c-a512-4327d950daa8) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30[1] 73140564-9a43-4886-9974-6360c31b2d26 (521e1392-21bb-441e-9084-601140ec5551) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible,""time":","2023-09-22T23:15:07.727233133Z"level",":hostname30":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3772 {"msg":"[0] Transition from New to WaitActive","v":0,"name":,""timecrucible"":,""level":302023-09-22T23:15:07.727246991Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,"4759time":"}
3773 2023-09-22T23:15:07.727262726Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal",""pid"msg:"4759:"}
3774 [1] Transition from WaitQuorum to Active"{,"v""msg":":0,"name":"crucible","[0] f25cb6f0-d847-4044-b2ba-64db5febf8b4 (3ec1c360-ac3c-465c-a512-4327d950daa8) WaitActive New New ds_transition to WaitQuorum"level,"":v":030,"name":"crucible","level":30,"time":"2023-09-22T23:15:07.727298985Z",,""hostname":time"":"ip-10-150-1-55.us-west-2.compute.internal","pid"2023-09-22T23:15:07.727296987Z":4759,"}
3775 hostname":"{"msg":"ip-10-150-1-55.us-west-2.compute.internal",[0] Transition from WaitActive to WaitQuorum"","pidv":"0:,"4759name":"}
3776 crucible","level":30{"msg":","time":"2023-09-22T23:15:07.727332158Z","hostname[2] 73140564-9a43-4886-9974-6360c31b2d26 (521e1392-21bb-441e-9084-601140ec5551) Active Active New ds_transition to WaitActive"":","v":0ip-10-150-1-55.us-west-2.compute.internal",","name"pid"::4759"}
3777 crucible",{"level"":msg30":"[0] f25cb6f0-d847-4044-b2ba-64db5febf8b4 (3ec1c360-ac3c-465c-a512-4327d950daa8) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:15:07.727356259Z"time":,""2023-09-22T23:15:07.7273626Z"hostname",":hostname":""ip-10-150-1-55.us-west-2.compute.internal","pid":4759ip-10-150-1-55.us-west-2.compute.internal}"
3778 ,"pid":{4759}"
3779 msg":"[0] Transition from WaitQuorum to Active"{,"v":0,"name"":"msg"crucible":,""level":30[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:07.727403989Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3780 {"msg":","time":"[1] f25cb6f0-d847-4044-b2ba-64db5febf8b4 (3ec1c360-ac3c-465c-a512-4327d950daa8) Active New New ds_transition to WaitActive"2023-09-22T23:15:07.727411123Z","v":0,,""name":"hostnamecrucible"",":level":"30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3781 ,"time":"{2023-09-22T23:15:07.727434521Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3782 {"msg":"[2] 73140564-9a43-4886-9974-6360c31b2d26 (521e1392-21bb-441e-9084-601140ec5551) Active Active WaitActive ds_transition to WaitQuorum"[1] Transition from New to WaitActive",",v"":v":00,",name":""crucible"name",":"level":30crucible","level":30,"time":"2023-09-22T23:15:07.727465462Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3783 ,"time{":""msg":"2023-09-22T23:15:07.727467682Z","hostname":"[1] f25cb6f0-d847-4044-b2ba-64db5febf8b4 (3ec1c360-ac3c-465c-a512-4327d950daa8) Active WaitActive New ds_transition to WaitQuorum","v":0,"ip-10-150-1-55.us-west-2.compute.internal"name,":""pidcrucible"",":level":304759}
3784 {,"time":""msg":"2023-09-22T23:15:07.727494222Z","hostname":"[2] Transition from WaitActive to WaitQuorum"ip-10-150-1-55.us-west-2.compute.internal",,""pidv""::47590}
3785 ,"name{":""msgcrucible"":,""level":[1] Transition from WaitActive to WaitQuorum30","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:15:07.72752422Z"time",":hostname":""ip-10-150-1-55.us-west-2.compute.internal","pid"2023-09-22T23:15:07.727521341Z":4759,"}
3786 hostname":"{"msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}[1] f25cb6f0-d847-4044-b2ba-64db5febf8b4 (3ec1c360-ac3c-465c-a512-4327d950daa8) Active WaitQuorum New ds_transition to Active"
3787 ,"v":0,"name":"{crucible","level":30"msg":","time":"[2] 73140564-9a43-4886-9974-6360c31b2d26 (521e1392-21bb-441e-9084-601140ec5551) Active Active WaitQuorum ds_transition to Active"2023-09-22T23:15:07.727555455Z,"","v"hostname":":0,"ip-10-150-1-55.us-west-2.compute.internal","namepid":"4759:"}
3788 crucible","{level":"30msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,,""time":time"":"2023-09-22T23:15:07.727584886Z","hostname":"2023-09-22T23:15:07.72758053Z","ip-10-150-1-55.us-west-2.compute.internal","hostname"pid:":"4759}
3789 ip-10-150-1-55.us-west-2.compute.internal","{pid":"4759msg":"}
3790 [2] f25cb6f0-d847-4044-b2ba-64db5febf8b4 (3ec1c360-ac3c-465c-a512-4327d950daa8) Active Active New ds_transition to WaitActive","v":0,"{name":"crucible",""level":msg"30:"[2] Transition from WaitQuorum to Active","v":0,"name":","crucibletime":"","2023-09-22T23:15:07.727617799Z"level",":hostname":30"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3791 {"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible",","level":30time":"2023-09-22T23:15:07.72763104Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal,""time":,""pid"2023-09-22T23:15:07.727643074Z":,"4759hostname":"}
3792 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}{
3793 "msg"{:""msg":"[2] f25cb6f0-d847-4044-b2ba-64db5febf8b4 (3ec1c360-ac3c-465c-a512-4327d950daa8) Active Active WaitActive ds_transition to WaitQuorum73140564-9a43-4886-9974-6360c31b2d26 is now active with session: 521e1392-21bb-441e-9084-601140ec5551"",,""vv""::00,","name"name":":crucible"","level":30crucible","level":30,"time":"2023-09-22T23:15:07.727684506Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3794 ,"time{":""msg":"2023-09-22T23:15:07.727686836Z"[2] Transition from WaitActive to WaitQuorum",,""v":0hostname",":name":""crucible","level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3795 ,"time":"{2023-09-22T23:15:07.727709683Z","hostname"":msg"":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3796 [1] 73140564-9a43-4886-9974-6360c31b2d26 (521e1392-21bb-441e-9084-601140ec5551) Active Active Active ds_transition to Faulted","{v":0",msg":""name":"crucible","level":[2] f25cb6f0-d847-4044-b2ba-64db5febf8b4 (3ec1c360-ac3c-465c-a512-4327d950daa8) Active Active WaitQuorum ds_transition to Active"30,"v":0,"name":"crucible","level":30,"time,":""time"2023-09-22T23:15:07.727741918Z":,""hostname":"2023-09-22T23:15:07.727739037Z"ip-10-150-1-55.us-west-2.compute.internal",,""pid":4759hostname"}
3797 :"{"msg"ip-10-150-1-55.us-west-2.compute.internal":","pid":[2] Transition from WaitQuorum to Active"4759,"v":0},"
3798 name":"crucible","level":30{"msg":"[1] Transition from Active to Faulted","v":0,",time":""name":"2023-09-22T23:15:07.727777087Z","crucible"hostname":","level"ip-10-150-1-55.us-west-2.compute.internal":,"30pid":4759}
3799 {"msg":"f25cb6f0-d847-4044-b2ba-64db5febf8b4 is now active with session: 3ec1c360-ac3c-465c-a512-4327d950daa8","v":0,"name":"crucible",,""level":time"30:"2023-09-22T23:15:07.727795582Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",",time"":"pid":47592023-09-22T23:15:07.727809116Z","}hostname
3800 ":"ip-10-150-1-55.us-west-2.compute.internal","{pid":4759}
3801 "msg":"{"msg":"[1] f25cb6f0-d847-4044-b2ba-64db5febf8b4 (3ec1c360-ac3c-465c-a512-4327d950daa8) Active Active Active ds_transition to Faulted","v":0,"[1] 73140564-9a43-4886-9974-6360c31b2d26 (521e1392-21bb-441e-9084-601140ec5551) Active Faulted Active ds_transition to LiveRepairReady"name":","crucible"v":,0","level":30name":"crucible","level":30,"time":"2023-09-22T23:15:07.727845154Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3802 ,{"time"":"msg":"[1] Transition from Active to Faulted"2023-09-22T23:15:07.727848551Z,""v":0,",name":""crucible","hostnamelevel":30":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3803 ,"time":"{2023-09-22T23:15:07.727873047Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
3804 [1] Transition from Faulted to LiveRepairReady"{,"v"":msg":0","name":"crucible","level":[1] f25cb6f0-d847-4044-b2ba-64db5febf8b4 (3ec1c360-ac3c-465c-a512-4327d950daa8) Active Faulted Active ds_transition to LiveRepairReady"30,"v":0,"name":"crucible","level":30,"time":","2023-09-22T23:15:07.727903237Z"time,""hostname":":"ip-10-150-1-55.us-west-2.compute.internal","pid":2023-09-22T23:15:07.727900662Z4759"}
3805 ,"{hostname":""msg":"[1] Transition from Faulted to LiveRepairReady"ip-10-150-1-55.us-west-2.compute.internal",,""v":pid"0:,"4759name":"}crucible"
3806 ,"level":30{"msg":","time":"2023-09-22T23:15:07.727937863Z","hostname":"[1] 73140564-9a43-4886-9974-6360c31b2d26 (521e1392-21bb-441e-9084-601140ec5551) Active LiveRepairReady Active ds_transition to LiveRepair"ip-10-150-1-55.us-west-2.compute.internal",",pid":"4759v":}0
3807 ,"name{":""msg":"crucible","level":30[1] f25cb6f0-d847-4044-b2ba-64db5febf8b4 (3ec1c360-ac3c-465c-a512-4327d950daa8) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"crucible","level":30,,""time":"time":"2023-09-22T23:15:07.727966384Z","hostname":"2023-09-22T23:15:07.727962103Z","ip-10-150-1-55.us-west-2.compute.internal",hostname""pid"::4759"}
3808 ip-10-150-1-55.us-west-2.compute.internal",{"pid""msg":":4759}[1] Transition from LiveRepairReady to LiveRepair"
3809 ,"v":0,"name":"{crucible","level":30"msg":"[1] Transition from LiveRepairReady to LiveRepair","v":0,","nametime":"":"2023-09-22T23:15:07.727997453Z"crucible",",hostname":""level":ip-10-150-1-55.us-west-2.compute.internal"30,"pid":4759}
3810 {"msg":"Write to Extent 0:0:9 under repair","v":0,"name":"crucible","level":40,"time":"2023-09-22T23:15:07.728013018Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:15:07.728022237Z",,""hostnamepid"":":4759ip-10-150-1-55.us-west-2.compute.internal","}pid
3811 {""msg":":4759Waiting for Close + ReOpen jobs"}
3812 ,"v":0,"name{":"crucible"","msg":level"":30Write to Extent 0:0:9 under repair","v":0,"name":"crucible","level":40,,""timetime""::""2023-09-22T23:15:07.728142241Z2023-09-22T23:15:07.728147415Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"pid:":47594759}
3813 }
38142023-09-22T23:15:07.728ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
38152023-09-22T23:15:07.728ZINFOcrucible: RE:0 close id:1000 queued, notify DS
38162023-09-22T23:15:07.728ZINFOcrucible: RE:0 Wait for result from close command 1000:1
3817 test live_repair::repair_test::test_repair_io_at_el_sent ... ok
38182023-09-22T23:15:07.728ZINFOcrucible: Crucible stats registered with UUID: 1bd3598e-c0ae-461f-b737-8e54c980aeab
38192023-09-22T23:15:07.728ZINFOcrucible: Crucible 1bd3598e-c0ae-461f-b737-8e54c980aeab has session id: be7e3185-beed-4c0b-bc34-ecea7cde1037
38202023-09-22T23:15:07.728ZINFOcrucible: [0] 1bd3598e-c0ae-461f-b737-8e54c980aeab (c23143a7-8706-4bb2-a232-a549da748e27) New New New ds_transition to WaitActive
38212023-09-22T23:15:07.728ZINFOcrucible: [0] Transition from New to WaitActive
38222023-09-22T23:15:07.728ZINFOcrucible: [0] 1bd3598e-c0ae-461f-b737-8e54c980aeab (c23143a7-8706-4bb2-a232-a549da748e27) WaitActive New New ds_transition to WaitQuorum
38232023-09-22T23:15:07.728ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
38242023-09-22T23:15:07.728ZINFOcrucible: [0] 1bd3598e-c0ae-461f-b737-8e54c980aeab (c23143a7-8706-4bb2-a232-a549da748e27) WaitQuorum New New ds_transition to Active
38252023-09-22T23:15:07.728ZINFOcrucible: [0] Transition from WaitQuorum to Active
38262023-09-22T23:15:07.728ZINFOcrucible: [1] 1bd3598e-c0ae-461f-b737-8e54c980aeab (c23143a7-8706-4bb2-a232-a549da748e27) Active New New ds_transition to WaitActive
38272023-09-22T23:15:07.728ZINFOcrucible: [1] Transition from New to WaitActive
38282023-09-22T23:15:07.728ZINFOcrucible: [1] 1bd3598e-c0ae-461f-b737-8e54c980aeab (c23143a7-8706-4bb2-a232-a549da748e27) Active WaitActive New ds_transition to WaitQuorum
38292023-09-22T23:15:07.729ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
38302023-09-22T23:15:07.729ZINFOcrucible: [1] 1bd3598e-c0ae-461f-b737-8e54c980aeab (c23143a7-8706-4bb2-a232-a549da748e27) Active WaitQuorum New ds_transition to Active
38312023-09-22T23:15:07.729ZINFOcrucible: [1] Transition from WaitQuorum to Active
38322023-09-22T23:15:07.729ZINFOcrucible: [2] 1bd3598e-c0ae-461f-b737-8e54c980aeab (c23143a7-8706-4bb2-a232-a549da748e27) Active Active New ds_transition to WaitActive
38332023-09-22T23:15:07.729ZINFOcrucible: [2] Transition from New to WaitActive
38342023-09-22T23:15:07.729ZINFOcrucible: [2] 1bd3598e-c0ae-461f-b737-8e54c980aeab (c23143a7-8706-4bb2-a232-a549da748e27) Active Active WaitActive ds_transition to WaitQuorum
38352023-09-22T23:15:07.729ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
38362023-09-22T23:15:07.729ZINFOcrucible: [2] 1bd3598e-c0ae-461f-b737-8e54c980aeab (c23143a7-8706-4bb2-a232-a549da748e27) Active Active WaitQuorum ds_transition to Active
38372023-09-22T23:15:07.729ZINFOcrucible: [2] Transition from WaitQuorum to Active
38382023-09-22T23:15:07.729ZINFOcrucible: 1bd3598e-c0ae-461f-b737-8e54c980aeab is now active with session: c23143a7-8706-4bb2-a232-a549da748e27
38392023-09-22T23:15:07.729ZINFOcrucible: [1] 1bd3598e-c0ae-461f-b737-8e54c980aeab (c23143a7-8706-4bb2-a232-a549da748e27) Active Active Active ds_transition to Faulted
38402023-09-22T23:15:07.729ZINFOcrucible: [1] Transition from Active to Faulted
38412023-09-22T23:15:07.729ZINFOcrucible: [1] 1bd3598e-c0ae-461f-b737-8e54c980aeab (c23143a7-8706-4bb2-a232-a549da748e27) Active Faulted Active ds_transition to LiveRepairReady
38422023-09-22T23:15:07.729ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
38432023-09-22T23:15:07.729ZINFOcrucible: [1] 1bd3598e-c0ae-461f-b737-8e54c980aeab (c23143a7-8706-4bb2-a232-a549da748e27) Active LiveRepairReady Active ds_transition to LiveRepair
38442023-09-22T23:15:07.729ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
3845 test live_repair::repair_test::test_repair_io_below_el_sent ... ok
38462023-09-22T23:15:07.729ZINFOcrucible: Crucible stats registered with UUID: 630c7f91-b4d8-4905-b080-0d171cd9028f
38472023-09-22T23:15:07.729ZINFOcrucible: Crucible 630c7f91-b4d8-4905-b080-0d171cd9028f has session id: e3024dc0-53d3-4056-83d8-9a8fb9a977f8
38482023-09-22T23:15:07.729ZINFOcrucible: [0] 630c7f91-b4d8-4905-b080-0d171cd9028f (71ef5e0f-f803-43f6-b820-271fc47e3485) New New New ds_transition to WaitActive
38492023-09-22T23:15:07.729ZINFOcrucible: [0] Transition from New to WaitActive
38502023-09-22T23:15:07.730ZINFOcrucible: [0] 630c7f91-b4d8-4905-b080-0d171cd9028f (71ef5e0f-f803-43f6-b820-271fc47e3485) WaitActive New New ds_transition to WaitQuorum
38512023-09-22T23:15:07.730ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
38522023-09-22T23:15:07.730ZINFOcrucible: [0] 630c7f91-b4d8-4905-b080-0d171cd9028f (71ef5e0f-f803-43f6-b820-271fc47e3485) WaitQuorum New New ds_transition to Active
38532023-09-22T23:15:07.730ZINFOcrucible: [0] Transition from WaitQuorum to Active
38542023-09-22T23:15:07.730ZINFOcrucible: [1] 630c7f91-b4d8-4905-b080-0d171cd9028f (71ef5e0f-f803-43f6-b820-271fc47e3485) Active New New ds_transition to WaitActive
38552023-09-22T23:15:07.730ZINFOcrucible: [1] Transition from New to WaitActive
38562023-09-22T23:15:07.730ZINFOcrucible: [1] 630c7f91-b4d8-4905-b080-0d171cd9028f (71ef5e0f-f803-43f6-b820-271fc47e3485) Active WaitActive New ds_transition to WaitQuorum
38572023-09-22T23:15:07.730ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
38582023-09-22T23:15:07.730ZINFOcrucible: [1] 630c7f91-b4d8-4905-b080-0d171cd9028f (71ef5e0f-f803-43f6-b820-271fc47e3485) Active WaitQuorum New ds_transition to Active
38592023-09-22T23:15:07.730ZINFOcrucible: [1] Transition from WaitQuorum to Active
38602023-09-22T23:15:07.730ZINFOcrucible: [2] 630c7f91-b4d8-4905-b080-0d171cd9028f (71ef5e0f-f803-43f6-b820-271fc47e3485) Active Active New ds_transition to WaitActive
38612023-09-22T23:15:07.730ZINFOcrucible: [2] Transition from New to WaitActive
38622023-09-22T23:15:07.730ZINFOcrucible: [2] 630c7f91-b4d8-4905-b080-0d171cd9028f (71ef5e0f-f803-43f6-b820-271fc47e3485) Active Active WaitActive ds_transition to WaitQuorum
38632023-09-22T23:15:07.730ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
38642023-09-22T23:15:07.730ZINFOcrucible: [2] 630c7f91-b4d8-4905-b080-0d171cd9028f (71ef5e0f-f803-43f6-b820-271fc47e3485) Active Active WaitQuorum ds_transition to Active
38652023-09-22T23:15:07.730ZINFOcrucible: [2] Transition from WaitQuorum to Active
38662023-09-22T23:15:07.730ZINFOcrucible: 630c7f91-b4d8-4905-b080-0d171cd9028f is now active with session: 71ef5e0f-f803-43f6-b820-271fc47e3485
38672023-09-22T23:15:07.730ZINFOcrucible: [1] 630c7f91-b4d8-4905-b080-0d171cd9028f (71ef5e0f-f803-43f6-b820-271fc47e3485) Active Active Active ds_transition to Faulted
38682023-09-22T23:15:07.730ZINFOcrucible: [1] Transition from Active to Faulted
38692023-09-22T23:15:07.730ZINFOcrucible: [1] 630c7f91-b4d8-4905-b080-0d171cd9028f (71ef5e0f-f803-43f6-b820-271fc47e3485) Active Faulted Active ds_transition to LiveRepairReady
38702023-09-22T23:15:07.730ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
38712023-09-22T23:15:07.730ZINFOcrucible: [1] 630c7f91-b4d8-4905-b080-0d171cd9028f (71ef5e0f-f803-43f6-b820-271fc47e3485) Active LiveRepairReady Active ds_transition to LiveRepair
38722023-09-22T23:15:07.730ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
3873 test live_repair::repair_test::test_repair_io_no_el_skipped ... ok
38742023-09-22T23:15:07.731ZINFOcrucible: Crucible stats registered with UUID: 6bffdf01-5cd2-4635-97e7-762d833316f9
38752023-09-22T23:15:07.731ZINFOcrucible: Crucible 6bffdf01-5cd2-4635-97e7-762d833316f9 has session id: 09cb16a5-bde7-4030-81f3-d987e91d8bcf
38762023-09-22T23:15:07.731ZINFOcrucible: [0] 6bffdf01-5cd2-4635-97e7-762d833316f9 (48be3975-38f5-4ada-a823-b521356c3118) New New New ds_transition to WaitActive
38772023-09-22T23:15:07.731ZINFOcrucible: [0] Transition from New to WaitActive
38782023-09-22T23:15:07.731ZINFOcrucible: [0] 6bffdf01-5cd2-4635-97e7-762d833316f9 (48be3975-38f5-4ada-a823-b521356c3118) WaitActive New New ds_transition to WaitQuorum
38792023-09-22T23:15:07.731ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
38802023-09-22T23:15:07.731ZINFOcrucible: [0] 6bffdf01-5cd2-4635-97e7-762d833316f9 (48be3975-38f5-4ada-a823-b521356c3118) WaitQuorum New New ds_transition to Active
38812023-09-22T23:15:07.731ZINFOcrucible: [0] Transition from WaitQuorum to Active
38822023-09-22T23:15:07.731ZINFOcrucible: [1] 6bffdf01-5cd2-4635-97e7-762d833316f9 (48be3975-38f5-4ada-a823-b521356c3118) Active New New ds_transition to WaitActive
38832023-09-22T23:15:07.731ZINFOcrucible: [1] Transition from New to WaitActive
38842023-09-22T23:15:07.731ZINFOcrucible: [1] 6bffdf01-5cd2-4635-97e7-762d833316f9 (48be3975-38f5-4ada-a823-b521356c3118) Active WaitActive New ds_transition to WaitQuorum
38852023-09-22T23:15:07.731ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
38862023-09-22T23:15:07.731ZINFOcrucible: [1] 6bffdf01-5cd2-4635-97e7-762d833316f9 (48be3975-38f5-4ada-a823-b521356c3118) Active WaitQuorum New ds_transition to Active
38872023-09-22T23:15:07.731ZINFOcrucible: [1] Transition from WaitQuorum to Active
38882023-09-22T23:15:07.731ZINFOcrucible: [2] 6bffdf01-5cd2-4635-97e7-762d833316f9 (48be3975-38f5-4ada-a823-b521356c3118) Active Active New ds_transition to WaitActive
38892023-09-22T23:15:07.731ZINFOcrucible: [2] Transition from New to WaitActive
38902023-09-22T23:15:07.731ZINFOcrucible: [2] 6bffdf01-5cd2-4635-97e7-762d833316f9 (48be3975-38f5-4ada-a823-b521356c3118) Active Active WaitActive ds_transition to WaitQuorum
38912023-09-22T23:15:07.731ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
38922023-09-22T23:15:07.731ZINFOcrucible: [2] 6bffdf01-5cd2-4635-97e7-762d833316f9 (48be3975-38f5-4ada-a823-b521356c3118) Active Active WaitQuorum ds_transition to Active
38932023-09-22T23:15:07.731ZINFOcrucible: [2] Transition from WaitQuorum to Active
38942023-09-22T23:15:07.731ZINFOcrucible: 6bffdf01-5cd2-4635-97e7-762d833316f9 is now active with session: 48be3975-38f5-4ada-a823-b521356c3118
38952023-09-22T23:15:07.731ZINFOcrucible: [1] 6bffdf01-5cd2-4635-97e7-762d833316f9 (48be3975-38f5-4ada-a823-b521356c3118) Active Active Active ds_transition to Faulted
38962023-09-22T23:15:07.731ZINFOcrucible: [1] Transition from Active to Faulted
38972023-09-22T23:15:07.731ZINFOcrucible: [1] 6bffdf01-5cd2-4635-97e7-762d833316f9 (48be3975-38f5-4ada-a823-b521356c3118) Active Faulted Active ds_transition to LiveRepairReady
38982023-09-22T23:15:07.731ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
38992023-09-22T23:15:07.731ZINFOcrucible: [1] 6bffdf01-5cd2-4635-97e7-762d833316f9 (48be3975-38f5-4ada-a823-b521356c3118) Active LiveRepairReady Active ds_transition to LiveRepair
39002023-09-22T23:15:07.731ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
39012023-09-22T23:15:07.731ZWARNcrucible: Write to Extent 1:0:9 under repair
39022023-09-22T23:15:07.731ZWARNcrucible: Write to Extent 1:1:9 under repair
39032023-09-22T23:15:07.731ZWARNcrucible: Write to Extent 1:2:9 under repair
39042023-09-22T23:15:07.731ZWARNcrucible: Write 2:0:9 past extent under repair 1
39052023-09-22T23:15:07.731ZWARNcrucible: Write 2:1:9 past extent under repair 1
39062023-09-22T23:15:07.731ZWARNcrucible: Write 2:2:9 past extent under repair 1
39072023-09-22T23:15:07.731ZWARNcrucible: IO Write 1004 on eur 1 Added deps 2
39082023-09-22T23:15:07.731ZWARNcrucible: Create read repair deps for extent 2
39092023-09-22T23:15:07.731ZWARNcrucible: IO Read 1005 extent 1 added deps 2
39102023-09-22T23:15:07.731ZWARNcrucible: Write to Extent 1:0:9 under repair
39112023-09-22T23:15:07.731ZWARNcrucible: Write to Extent 1:1:9 under repair
39122023-09-22T23:15:07.731ZWARNcrucible: Write to Extent 1:2:9 under repair
39132023-09-22T23:15:07.731ZWARNcrucible: Write 2:0:9 past extent under repair 1
39142023-09-22T23:15:07.731ZWARNcrucible: Write 2:1:9 past extent under repair 1
39152023-09-22T23:15:07.731ZWARNcrucible: Write 2:2:9 past extent under repair 1
39162023-09-22T23:15:07.731ZWARNcrucible: IO Write 1006 on eur 1 Added deps 2
3917 test live_repair::repair_test::test_repair_io_span_el_sent ... ok
39182023-09-22T23:15:07.732ZINFOcrucible: Crucible stats registered with UUID: cde6561c-b94d-442b-83c1-194a7b340204
39192023-09-22T23:15:07.732ZINFOcrucible: Crucible cde6561c-b94d-442b-83c1-194a7b340204 has session id: 99f5768b-a5f4-486b-8bbc-25f1561f34f6
39202023-09-22T23:15:07.732ZINFOcrucible: [0] cde6561c-b94d-442b-83c1-194a7b340204 (70ef231b-de88-4e90-a397-be55ff1dea6d) New New New ds_transition to WaitActive
39212023-09-22T23:15:07.732ZINFOcrucible: [0] Transition from New to WaitActive
39222023-09-22T23:15:07.732ZINFOcrucible: [0] cde6561c-b94d-442b-83c1-194a7b340204 (70ef231b-de88-4e90-a397-be55ff1dea6d) WaitActive New New ds_transition to WaitQuorum
39232023-09-22T23:15:07.732ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
39242023-09-22T23:15:07.732ZINFOcrucible: [0] cde6561c-b94d-442b-83c1-194a7b340204 (70ef231b-de88-4e90-a397-be55ff1dea6d) WaitQuorum New New ds_transition to Active
39252023-09-22T23:15:07.732ZINFOcrucible: [0] Transition from WaitQuorum to Active
39262023-09-22T23:15:07.732ZINFOcrucible: [1] cde6561c-b94d-442b-83c1-194a7b340204 (70ef231b-de88-4e90-a397-be55ff1dea6d) Active New New ds_transition to WaitActive
39272023-09-22T23:15:07.732ZINFOcrucible: [1] Transition from New to WaitActive
39282023-09-22T23:15:07.732ZINFOcrucible: [1] cde6561c-b94d-442b-83c1-194a7b340204 (70ef231b-de88-4e90-a397-be55ff1dea6d) Active WaitActive New ds_transition to WaitQuorum
39292023-09-22T23:15:07.732ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
39302023-09-22T23:15:07.732ZINFOcrucible: [1] cde6561c-b94d-442b-83c1-194a7b340204 (70ef231b-de88-4e90-a397-be55ff1dea6d) Active WaitQuorum New ds_transition to Active
39312023-09-22T23:15:07.732ZINFOcrucible: [1] Transition from WaitQuorum to Active
39322023-09-22T23:15:07.732ZINFOcrucible: [2] cde6561c-b94d-442b-83c1-194a7b340204 (70ef231b-de88-4e90-a397-be55ff1dea6d) Active Active New ds_transition to WaitActive
39332023-09-22T23:15:07.732ZINFOcrucible: [2] Transition from New to WaitActive
39342023-09-22T23:15:07.732ZINFOcrucible: [2] cde6561c-b94d-442b-83c1-194a7b340204 (70ef231b-de88-4e90-a397-be55ff1dea6d) Active Active WaitActive ds_transition to WaitQuorum
39352023-09-22T23:15:07.732ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
39362023-09-22T23:15:07.732ZINFOcrucible: [2] cde6561c-b94d-442b-83c1-194a7b340204 (70ef231b-de88-4e90-a397-be55ff1dea6d) Active Active WaitQuorum ds_transition to Active
39372023-09-22T23:15:07.732ZINFOcrucible: [2] Transition from WaitQuorum to Active
39382023-09-22T23:15:07.732ZINFOcrucible: cde6561c-b94d-442b-83c1-194a7b340204 is now active with session: 70ef231b-de88-4e90-a397-be55ff1dea6d
39392023-09-22T23:15:07.732ZINFOcrucible: [1] cde6561c-b94d-442b-83c1-194a7b340204 (70ef231b-de88-4e90-a397-be55ff1dea6d) Active Active Active ds_transition to Faulted
39402023-09-22T23:15:07.732ZINFOcrucible: [1] Transition from Active to Faulted
39412023-09-22T23:15:07.732ZINFOcrucible: [1] cde6561c-b94d-442b-83c1-194a7b340204 (70ef231b-de88-4e90-a397-be55ff1dea6d) Active Faulted Active ds_transition to LiveRepairReady
39422023-09-22T23:15:07.732ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
39432023-09-22T23:15:07.732ZINFOcrucible: [1] cde6561c-b94d-442b-83c1-194a7b340204 (70ef231b-de88-4e90-a397-be55ff1dea6d) Active LiveRepairReady Active ds_transition to LiveRepair
39442023-09-22T23:15:07.732ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
39452023-09-22T23:15:07.732ZWARNcrucible: Create read repair deps for extent 2
39462023-09-22T23:15:07.733ZWARNcrucible: IO Read 1004 extent 1 added deps 2
3947 test live_repair::repair_test::test_repair_read_span_el_sent ... ok
39482023-09-22T23:15:07.733ZINFOcrucible: Crucible stats registered with UUID: ac10043e-f771-4b20-9401-d688ee9fd44c
39492023-09-22T23:15:07.733ZINFOcrucible: Crucible ac10043e-f771-4b20-9401-d688ee9fd44c has session id: 20e39922-f402-4d04-b0fd-13c2207ef5b6
39502023-09-22T23:15:07.733ZINFOcrucible: [0] ac10043e-f771-4b20-9401-d688ee9fd44c (ed0be356-db20-44b7-8af6-f9591507659c) New New New ds_transition to WaitActive
39512023-09-22T23:15:07.733ZINFOcrucible: [0] Transition from New to WaitActive
39522023-09-22T23:15:07.733ZINFOcrucible: [0] ac10043e-f771-4b20-9401-d688ee9fd44c (ed0be356-db20-44b7-8af6-f9591507659c) WaitActive New New ds_transition to WaitQuorum
39532023-09-22T23:15:07.733ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
39542023-09-22T23:15:07.733ZINFOcrucible: [0] ac10043e-f771-4b20-9401-d688ee9fd44c (ed0be356-db20-44b7-8af6-f9591507659c) WaitQuorum New New ds_transition to Active
39552023-09-22T23:15:07.733ZINFOcrucible: [0] Transition from WaitQuorum to Active
39562023-09-22T23:15:07.733ZINFOcrucible: [1] ac10043e-f771-4b20-9401-d688ee9fd44c (ed0be356-db20-44b7-8af6-f9591507659c) Active New New ds_transition to WaitActive
39572023-09-22T23:15:07.733ZINFOcrucible: [1] Transition from New to WaitActive
39582023-09-22T23:15:07.733ZINFOcrucible: [1] ac10043e-f771-4b20-9401-d688ee9fd44c (ed0be356-db20-44b7-8af6-f9591507659c) Active WaitActive New ds_transition to WaitQuorum
39592023-09-22T23:15:07.733ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
39602023-09-22T23:15:07.733ZINFOcrucible: [1] ac10043e-f771-4b20-9401-d688ee9fd44c (ed0be356-db20-44b7-8af6-f9591507659c) Active WaitQuorum New ds_transition to Active
39612023-09-22T23:15:07.733ZINFOcrucible: [1] Transition from WaitQuorum to Active
39622023-09-22T23:15:07.733ZINFOcrucible: [2] ac10043e-f771-4b20-9401-d688ee9fd44c (ed0be356-db20-44b7-8af6-f9591507659c) Active Active New ds_transition to WaitActive
39632023-09-22T23:15:07.733ZINFOcrucible: [2] Transition from New to WaitActive
39642023-09-22T23:15:07.733ZINFOcrucible: [2] ac10043e-f771-4b20-9401-d688ee9fd44c (ed0be356-db20-44b7-8af6-f9591507659c) Active Active WaitActive ds_transition to WaitQuorum
39652023-09-22T23:15:07.733ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
39662023-09-22T23:15:07.733ZINFOcrucible: [2] ac10043e-f771-4b20-9401-d688ee9fd44c (ed0be356-db20-44b7-8af6-f9591507659c) Active Active WaitQuorum ds_transition to Active
39672023-09-22T23:15:07.734ZINFOcrucible: [2] Transition from WaitQuorum to Active
39682023-09-22T23:15:07.734ZINFOcrucible: ac10043e-f771-4b20-9401-d688ee9fd44c is now active with session: ed0be356-db20-44b7-8af6-f9591507659c
39692023-09-22T23:15:07.734ZINFOcrucible: [1] ac10043e-f771-4b20-9401-d688ee9fd44c (ed0be356-db20-44b7-8af6-f9591507659c) Active Active Active ds_transition to Faulted
39702023-09-22T23:15:07.734ZINFOcrucible: [1] Transition from Active to Faulted
39712023-09-22T23:15:07.734ZINFOcrucible: [1] ac10043e-f771-4b20-9401-d688ee9fd44c (ed0be356-db20-44b7-8af6-f9591507659c) Active Faulted Active ds_transition to LiveRepairReady
39722023-09-22T23:15:07.734ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
39732023-09-22T23:15:07.734ZINFOcrucible: [1] ac10043e-f771-4b20-9401-d688ee9fd44c (ed0be356-db20-44b7-8af6-f9591507659c) Active LiveRepairReady Active ds_transition to LiveRepair
39742023-09-22T23:15:07.734ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
39752023-09-22T23:15:07.734ZWARNcrucible: Write to Extent 1:0:9 under repair
39762023-09-22T23:15:07.734ZWARNcrucible: Write to Extent 1:1:9 under repair
39772023-09-22T23:15:07.734ZWARNcrucible: Write to Extent 1:2:9 under repair
39782023-09-22T23:15:07.734ZWARNcrucible: Write 2:0:9 past extent under repair 1
39792023-09-22T23:15:07.734ZWARNcrucible: Write 2:1:9 past extent under repair 1
39802023-09-22T23:15:07.734ZWARNcrucible: Write 2:2:9 past extent under repair 1
39812023-09-22T23:15:07.734ZWARNcrucible: IO Write 1004 on eur 1 Added deps 2
3982 test live_repair::repair_test::test_repair_write_span_el_sent ... ok
39832023-09-22T23:15:07.734ZINFOcrucible: Crucible stats registered with UUID: 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e
39842023-09-22T23:15:07.734ZINFOcrucible: Crucible 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e has session id: 227380e3-74cf-458b-8f99-cd6384b7ccf3
39852023-09-22T23:15:07.734ZINFOcrucible: [0] 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e (e660d75e-dc92-48f6-9e7d-44cbb6b32053) New New New ds_transition to WaitActive
39862023-09-22T23:15:07.734ZINFOcrucible: [0] Transition from New to WaitActive
39872023-09-22T23:15:07.734ZINFOcrucible: [0] 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e (e660d75e-dc92-48f6-9e7d-44cbb6b32053) WaitActive New New ds_transition to WaitQuorum
39882023-09-22T23:15:07.734ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
39892023-09-22T23:15:07.735ZINFOcrucible: [0] 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e (e660d75e-dc92-48f6-9e7d-44cbb6b32053) WaitQuorum New New ds_transition to Active
39902023-09-22T23:15:07.735ZINFOcrucible: [0] Transition from WaitQuorum to Active
39912023-09-22T23:15:07.735ZINFOcrucible: [1] 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e (e660d75e-dc92-48f6-9e7d-44cbb6b32053) Active New New ds_transition to WaitActive
39922023-09-22T23:15:07.735ZINFOcrucible: [1] Transition from New to WaitActive
39932023-09-22T23:15:07.735ZINFOcrucible: [1] 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e (e660d75e-dc92-48f6-9e7d-44cbb6b32053) Active WaitActive New ds_transition to WaitQuorum
39942023-09-22T23:15:07.735ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
39952023-09-22T23:15:07.735ZINFOcrucible: [1] 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e (e660d75e-dc92-48f6-9e7d-44cbb6b32053) Active WaitQuorum New ds_transition to Active
39962023-09-22T23:15:07.735ZINFOcrucible: [1] Transition from WaitQuorum to Active
39972023-09-22T23:15:07.735ZINFOcrucible: [2] 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e (e660d75e-dc92-48f6-9e7d-44cbb6b32053) Active Active New ds_transition to WaitActive
39982023-09-22T23:15:07.735ZINFOcrucible: [2] Transition from New to WaitActive
39992023-09-22T23:15:07.735ZINFOcrucible: [2] 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e (e660d75e-dc92-48f6-9e7d-44cbb6b32053) Active Active WaitActive ds_transition to WaitQuorum
40002023-09-22T23:15:07.735ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
40012023-09-22T23:15:07.735ZINFOcrucible: [2] 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e (e660d75e-dc92-48f6-9e7d-44cbb6b32053) Active Active WaitQuorum ds_transition to Active
40022023-09-22T23:15:07.735ZINFOcrucible: [2] Transition from WaitQuorum to Active
40032023-09-22T23:15:07.735ZINFOcrucible: 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e is now active with session: e660d75e-dc92-48f6-9e7d-44cbb6b32053
40042023-09-22T23:15:07.735ZINFOcrucible: [1] 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e (e660d75e-dc92-48f6-9e7d-44cbb6b32053) Active Active Active ds_transition to Faulted
40052023-09-22T23:15:07.735ZINFOcrucible: [1] Transition from Active to Faulted
40062023-09-22T23:15:07.735ZINFOcrucible: [1] 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e (e660d75e-dc92-48f6-9e7d-44cbb6b32053) Active Faulted Active ds_transition to LiveRepairReady
40072023-09-22T23:15:07.735ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
40082023-09-22T23:15:07.735ZINFOcrucible: [1] 7f6c56b0-84fc-4f4e-ad81-9acd25765e9e (e660d75e-dc92-48f6-9e7d-44cbb6b32053) Active LiveRepairReady Active ds_transition to LiveRepair
40092023-09-22T23:15:07.735ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
40102023-09-22T23:15:07.735ZWARNcrucible: Write to Extent 0:0:9 under repair
40112023-09-22T23:15:07.735ZWARNcrucible: Write to Extent 0:1:9 under repair
40122023-09-22T23:15:07.735ZWARNcrucible: Write to Extent 0:2:9 under repair
40132023-09-22T23:15:07.735ZWARNcrucible: Write 1:0:9 past extent under repair 0
40142023-09-22T23:15:07.735ZWARNcrucible: Write 1:1:9 past extent under repair 0
40152023-09-22T23:15:07.735ZWARNcrucible: Write 1:2:9 past extent under repair 0
40162023-09-22T23:15:07.735ZWARNcrucible: Write 2:0:9 past extent under repair 0
40172023-09-22T23:15:07.735ZWARNcrucible: Write 2:1:9 past extent under repair 0
40182023-09-22T23:15:07.735ZWARNcrucible: Write 2:2:9 past extent under repair 0
40192023-09-22T23:15:07.735ZWARNcrucible: IO Write 1008 on eur 0 Added deps 1
40202023-09-22T23:15:07.735ZWARNcrucible: IO Write 1008 on eur 0 Added deps 2
4021 test live_repair::repair_test::test_repair_write_span_two_el_sent ... ok
40222023-09-22T23:15:07.736ZINFOcrucible: Crucible stats registered with UUID: 797301b9-b7b2-43e8-990e-87a8ef0b3065
40232023-09-22T23:15:07.736ZINFOcrucible: Crucible 797301b9-b7b2-43e8-990e-87a8ef0b3065 has session id: a65dfd88-a5bf-42eb-ac78-f30129ced51d
40242023-09-22T23:15:07.736ZINFOcrucible: [0] 797301b9-b7b2-43e8-990e-87a8ef0b3065 (01780b64-54c6-47e3-9047-57d69957a1c2) New New New ds_transition to WaitActive
40252023-09-22T23:15:07.736ZINFOcrucible: [0] Transition from New to WaitActive
40262023-09-22T23:15:07.736ZINFOcrucible: [0] 797301b9-b7b2-43e8-990e-87a8ef0b3065 (01780b64-54c6-47e3-9047-57d69957a1c2) WaitActive New New ds_transition to WaitQuorum
40272023-09-22T23:15:07.736ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
40282023-09-22T23:15:07.736ZINFOcrucible: [0] 797301b9-b7b2-43e8-990e-87a8ef0b3065 (01780b64-54c6-47e3-9047-57d69957a1c2) WaitQuorum New New ds_transition to Active
40292023-09-22T23:15:07.736ZINFOcrucible: [0] Transition from WaitQuorum to Active
40302023-09-22T23:15:07.736ZINFOcrucible: [1] 797301b9-b7b2-43e8-990e-87a8ef0b3065 (01780b64-54c6-47e3-9047-57d69957a1c2) Active New New ds_transition to WaitActive
40312023-09-22T23:15:07.736ZINFOcrucible: [1] Transition from New to WaitActive
40322023-09-22T23:15:07.736ZINFOcrucible: [1] 797301b9-b7b2-43e8-990e-87a8ef0b3065 (01780b64-54c6-47e3-9047-57d69957a1c2) Active WaitActive New ds_transition to WaitQuorum
40332023-09-22T23:15:07.736ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
40342023-09-22T23:15:07.736ZINFOcrucible: [1] 797301b9-b7b2-43e8-990e-87a8ef0b3065 (01780b64-54c6-47e3-9047-57d69957a1c2) Active WaitQuorum New ds_transition to Active
40352023-09-22T23:15:07.736ZINFOcrucible: [1] Transition from WaitQuorum to Active
40362023-09-22T23:15:07.736ZINFOcrucible: [2] 797301b9-b7b2-43e8-990e-87a8ef0b3065 (01780b64-54c6-47e3-9047-57d69957a1c2) Active Active New ds_transition to WaitActive
40372023-09-22T23:15:07.736ZINFOcrucible: [2] Transition from New to WaitActive
40382023-09-22T23:15:07.736ZINFOcrucible: [2] 797301b9-b7b2-43e8-990e-87a8ef0b3065 (01780b64-54c6-47e3-9047-57d69957a1c2) Active Active WaitActive ds_transition to WaitQuorum
40392023-09-22T23:15:07.736ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
40402023-09-22T23:15:07.736ZINFOcrucible: [2] 797301b9-b7b2-43e8-990e-87a8ef0b3065 (01780b64-54c6-47e3-9047-57d69957a1c2) Active Active WaitQuorum ds_transition to Active
40412023-09-22T23:15:07.736ZINFOcrucible: [2] Transition from WaitQuorum to Active
40422023-09-22T23:15:07.736ZINFOcrucible: 797301b9-b7b2-43e8-990e-87a8ef0b3065 is now active with session: 01780b64-54c6-47e3-9047-57d69957a1c2
40432023-09-22T23:15:07.736ZINFOcrucible: [1] 797301b9-b7b2-43e8-990e-87a8ef0b3065 (01780b64-54c6-47e3-9047-57d69957a1c2) Active Active Active ds_transition to Faulted
40442023-09-22T23:15:07.736ZINFOcrucible: [1] Transition from Active to Faulted
40452023-09-22T23:15:07.736ZINFOcrucible: [1] 797301b9-b7b2-43e8-990e-87a8ef0b3065 (01780b64-54c6-47e3-9047-57d69957a1c2) Active Faulted Active ds_transition to LiveRepairReady
40462023-09-22T23:15:07.736ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
40472023-09-22T23:15:07.736ZINFOcrucible: [1] 797301b9-b7b2-43e8-990e-87a8ef0b3065 (01780b64-54c6-47e3-9047-57d69957a1c2) Active LiveRepairReady Active ds_transition to LiveRepair
40482023-09-22T23:15:07.736ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
4049 test live_repair::repair_test::test_reserve_extent_repair_ids ... ok
4050 Testing repair with s:0 r:[ClientId(1)]
4051 Sep 22 23:15:07.737 DEBG Get repair info for 0 source, : downstairs
4052 Sep 22 23:15:07.737 DEBG Get repair info for 1 bad, : downstairs
4053 Sep 22 23:15:07.737 INFO Repair for extent 0 s:0 d:[ClientId(1)], : downstairs
4054 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4055 Testing repair with s:0 r:[ClientId(2)]
4056 Sep 22 23:15:07.737 DEBG Get repair info for 0 source, : downstairs
4057 Sep 22 23:15:07.737 DEBG Get repair info for 2 bad, : downstairs
4058 Sep 22 23:15:07.737 INFO Repair for extent 0 s:0 d:[ClientId(2)], : downstairs
4059 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4060 Testing repair with s:1 r:[ClientId(0)]
4061 Sep 22 23:15:07.737 DEBG Get repair info for 1 source, : downstairs
4062 Sep 22 23:15:07.737 DEBG Get repair info for 0 bad, : downstairs
4063 Sep 22 23:15:07.737 INFO Repair for extent 0 s:1 d:[ClientId(0)], : downstairs
4064 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4065 Testing repair with s:1 r:[ClientId(2)]
4066 Sep 22 23:15:07.737 DEBG Get repair info for 1 source, : downstairs
4067 Sep 22 23:15:07.737 DEBG Get repair info for 2 bad, : downstairs
4068 Sep 22 23:15:07.737 INFO Repair for extent 0 s:1 d:[ClientId(2)], : downstairs
4069 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4070 Testing repair with s:2 r:[ClientId(0)]
4071 Sep 22 23:15:07.737 DEBG Get repair info for 2 source, : downstairs
4072 Sep 22 23:15:07.737 DEBG Get repair info for 0 bad, : downstairs
4073 Sep 22 23:15:07.737 INFO Repair for extent 0 s:2 d:[ClientId(0)], : downstairs
4074 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4075 Testing repair with s:2 r:[ClientId(1)]
4076 Sep 22 23:15:07.737 DEBG Get repair info for 2 source, : downstairs
4077 Sep 22 23:15:07.737 DEBG Get repair info for 1 bad, : downstairs
4078 Sep 22 23:15:07.737 INFO Repair for extent 0 s:2 d:[ClientId(1)], : downstairs
4079 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4080 test live_repair::repair_test::test_solver_dirty_needs_repair_one ... ok
4081 Testing repair with s:0 r:[ClientId(1), ClientId(2)]
4082 Sep 22 23:15:07.737 DEBG Get repair info for 0 source, : downstairs
4083 Sep 22 23:15:07.737 DEBG Get repair info for 1 bad, : downstairs
4084 Sep 22 23:15:07.737 DEBG Get repair info for 2 bad, : downstairs
4085 Sep 22 23:15:07.737 INFO Repair for extent 0 s:0 d:[ClientId(1), ClientId(2)], : downstairs
4086 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4087 Testing repair with s:1 r:[ClientId(0), ClientId(2)]
4088 Sep 22 23:15:07.737 DEBG Get repair info for 1 source, : downstairs
4089 Sep 22 23:15:07.737 DEBG Get repair info for 0 bad, : downstairs
4090 Sep 22 23:15:07.737 DEBG Get repair info for 2 bad, : downstairs
4091 Sep 22 23:15:07.737 INFO Repair for extent 0 s:1 d:[ClientId(0), ClientId(2)], : downstairs
4092 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4093 Testing repair with s:2 r:[ClientId(0), ClientId(1)]
4094 Sep 22 23:15:07.737 DEBG Get repair info for 2 source, : downstairs
4095 Sep 22 23:15:07.737 DEBG Get repair info for 0 bad, : downstairs
4096 Sep 22 23:15:07.737 DEBG Get repair info for 1 bad, : downstairs
4097 Sep 22 23:15:07.737 INFO Repair for extent 0 s:2 d:[ClientId(0), ClientId(1)], : downstairs
4098 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4099 test live_repair::repair_test::test_solver_dirty_needs_repair_two ... ok
4100 Testing repair with s:0 r:[ClientId(1)]
4101 Sep 22 23:15:07.738 DEBG Get repair info for 0 source, : downstairs
4102 Sep 22 23:15:07.738 DEBG Get repair info for 1 bad, : downstairs
4103 Sep 22 23:15:07.738 INFO Repair for extent 0 s:0 d:[ClientId(1)], : downstairs
4104 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4105 Testing repair with s:0 r:[ClientId(2)]
4106 Sep 22 23:15:07.738 DEBG Get repair info for 0 source, : downstairs
4107 Sep 22 23:15:07.738 DEBG Get repair info for 2 bad, : downstairs
4108 Sep 22 23:15:07.738 INFO Repair for extent 0 s:0 d:[ClientId(2)], : downstairs
4109 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4110 Testing repair with s:1 r:[ClientId(0)]
4111 Sep 22 23:15:07.738 DEBG Get repair info for 1 source, : downstairs
4112 Sep 22 23:15:07.738 DEBG Get repair info for 0 bad, : downstairs
4113 Sep 22 23:15:07.738 INFO Repair for extent 0 s:1 d:[ClientId(0)], : downstairs
4114 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4115 Testing repair with s:1 r:[ClientId(2)]
4116 Sep 22 23:15:07.738 DEBG Get repair info for 1 source, : downstairs
4117 Sep 22 23:15:07.738 DEBG Get repair info for 2 bad, : downstairs
4118 Sep 22 23:15:07.738 INFO Repair for extent 0 s:1 d:[ClientId(2)], : downstairs
4119 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4120 Testing repair with s:2 r:[ClientId(0)]
4121 Sep 22 23:15:07.738 DEBG Get repair info for 2 source, : downstairs
4122 Sep 22 23:15:07.738 DEBG Get repair info for 0 bad, : downstairs
4123 Sep 22 23:15:07.738 INFO Repair for extent 0 s:2 d:[ClientId(0)], : downstairs
4124 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4125 Testing repair with s:2 r:[ClientId(1)]
4126 Sep 22 23:15:07.738 DEBG Get repair info for 2 source, : downstairs
4127 Sep 22 23:15:07.738 DEBG Get repair info for 1 bad, : downstairs
4128 Sep 22 23:15:07.738 INFO Repair for extent 0 s:2 d:[ClientId(1)], : downstairs
4129 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4130 test live_repair::repair_test::test_solver_flush_higher_needs_repair_one ... ok
4131 Testing repair with s:0 r:[ClientId(1), ClientId(2)]
4132 Sep 22 23:15:07.738 DEBG Get repair info for 0 source, : downstairs
4133 Sep 22 23:15:07.738 DEBG Get repair info for 1 bad, : downstairs
4134 Sep 22 23:15:07.738 DEBG Get repair info for 2 bad, : downstairs
4135 Sep 22 23:15:07.738 INFO Repair for extent 0 s:0 d:[ClientId(1), ClientId(2)], : downstairs
4136 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4137 Testing repair with s:1 r:[ClientId(0), ClientId(2)]
4138 Sep 22 23:15:07.738 DEBG Get repair info for 1 source, : downstairs
4139 Sep 22 23:15:07.738 DEBG Get repair info for 0 bad, : downstairs
4140 Sep 22 23:15:07.738 DEBG Get repair info for 2 bad, : downstairs
4141 Sep 22 23:15:07.738 INFO Repair for extent 0 s:1 d:[ClientId(0), ClientId(2)], : downstairs
4142 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4143 Testing repair with s:2 r:[ClientId(0), ClientId(1)]
4144 Sep 22 23:15:07.738 DEBG Get repair info for 2 source, : downstairs
4145 Sep 22 23:15:07.738 DEBG Get repair info for 0 bad, : downstairs
4146 Sep 22 23:15:07.738 DEBG Get repair info for 1 bad, : downstairs
4147 Sep 22 23:15:07.738 INFO Repair for extent 0 s:2 d:[ClientId(0), ClientId(1)], : downstairs
4148 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4149 test live_repair::repair_test::test_solver_flush_higher_needs_repair_two ... ok
4150 Testing repair with s:0 r:[ClientId(1)]
4151 Sep 22 23:15:07.739 DEBG Get repair info for 0 source, : downstairs
4152 Sep 22 23:15:07.739 DEBG Get repair info for 1 bad, : downstairs
4153 Sep 22 23:15:07.739 INFO Repair for extent 0 s:0 d:[ClientId(1)], : downstairs
4154 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4155 Testing repair with s:0 r:[ClientId(2)]
4156 Sep 22 23:15:07.739 DEBG Get repair info for 0 source, : downstairs
4157 Sep 22 23:15:07.739 DEBG Get repair info for 2 bad, : downstairs
4158 Sep 22 23:15:07.739 INFO Repair for extent 0 s:0 d:[ClientId(2)], : downstairs
4159 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4160 Testing repair with s:1 r:[ClientId(0)]
4161 Sep 22 23:15:07.739 DEBG Get repair info for 1 source, : downstairs
4162 Sep 22 23:15:07.739 DEBG Get repair info for 0 bad, : downstairs
4163 Sep 22 23:15:07.739 INFO Repair for extent 0 s:1 d:[ClientId(0)], : downstairs
4164 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4165 Testing repair with s:1 r:[ClientId(2)]
4166 Sep 22 23:15:07.739 DEBG Get repair info for 1 source, : downstairs
4167 Sep 22 23:15:07.739 DEBG Get repair info for 2 bad, : downstairs
4168 Sep 22 23:15:07.739 INFO Repair for extent 0 s:1 d:[ClientId(2)], : downstairs
4169 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4170 Testing repair with s:2 r:[ClientId(0)]
4171 Sep 22 23:15:07.739 DEBG Get repair info for 2 source, : downstairs
4172 Sep 22 23:15:07.739 DEBG Get repair info for 0 bad, : downstairs
4173 Sep 22 23:15:07.739 INFO Repair for extent 0 s:2 d:[ClientId(0)], : downstairs
4174 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4175 Testing repair with s:2 r:[ClientId(1)]
4176 Sep 22 23:15:07.739 DEBG Get repair info for 2 source, : downstairs
4177 Sep 22 23:15:07.739 DEBG Get repair info for 1 bad, : downstairs
4178 Sep 22 23:15:07.739 INFO Repair for extent 0 s:2 d:[ClientId(1)], : downstairs
4179 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4180 test live_repair::repair_test::test_solver_flush_lower_needs_repair_one ... ok
4181 Testing repair with s:0 r:[ClientId(1), ClientId(2)]
4182 Sep 22 23:15:07.739 DEBG Get repair info for 0 source, : downstairs
4183 Sep 22 23:15:07.739 DEBG Get repair info for 1 bad, : downstairs
4184 Sep 22 23:15:07.739 DEBG Get repair info for 2 bad, : downstairs
4185 Sep 22 23:15:07.739 INFO Repair for extent 0 s:0 d:[ClientId(1), ClientId(2)], : downstairs
4186 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4187 Testing repair with s:1 r:[ClientId(0), ClientId(2)]
4188 Sep 22 23:15:07.739 DEBG Get repair info for 1 source, : downstairs
4189 Sep 22 23:15:07.739 DEBG Get repair info for 0 bad, : downstairs
4190 Sep 22 23:15:07.739 DEBG Get repair info for 2 bad, : downstairs
4191 Sep 22 23:15:07.739 INFO Repair for extent 0 s:1 d:[ClientId(0), ClientId(2)], : downstairs
4192 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4193 Testing repair with s:2 r:[ClientId(0), ClientId(1)]
4194 Sep 22 23:15:07.739 DEBG Get repair info for 2 source, : downstairs
4195 Sep 22 23:15:07.739 DEBG Get repair info for 0 bad, : downstairs
4196 Sep 22 23:15:07.739 DEBG Get repair info for 1 bad, : downstairs
4197 Sep 22 23:15:07.740 INFO Repair for extent 0 s:2 d:[ClientId(0), ClientId(1)], : downstairs
4198 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4199 test live_repair::repair_test::test_solver_flush_lower_needs_repair_two ... ok
4200 Testing repair with s:0 r:[ClientId(1)]
4201 Sep 22 23:15:07.740 DEBG Get repair info for 0 source, : downstairs
4202 Sep 22 23:15:07.740 DEBG Get repair info for 1 bad, : downstairs
4203 Sep 22 23:15:07.740 INFO Repair for extent 0 s:0 d:[ClientId(1)], : downstairs
4204 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4205 Testing repair with s:0 r:[ClientId(2)]
4206 Sep 22 23:15:07.740 DEBG Get repair info for 0 source, : downstairs
4207 Sep 22 23:15:07.740 DEBG Get repair info for 2 bad, : downstairs
4208 Sep 22 23:15:07.740 INFO Repair for extent 0 s:0 d:[ClientId(2)], : downstairs
4209 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4210 Testing repair with s:1 r:[ClientId(0)]
4211 Sep 22 23:15:07.740 DEBG Get repair info for 1 source, : downstairs
4212 Sep 22 23:15:07.740 DEBG Get repair info for 0 bad, : downstairs
4213 Sep 22 23:15:07.740 INFO Repair for extent 0 s:1 d:[ClientId(0)], : downstairs
4214 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4215 Testing repair with s:1 r:[ClientId(2)]
4216 Sep 22 23:15:07.740 DEBG Get repair info for 1 source, : downstairs
4217 Sep 22 23:15:07.740 DEBG Get repair info for 2 bad, : downstairs
4218 Sep 22 23:15:07.740 INFO Repair for extent 0 s:1 d:[ClientId(2)], : downstairs
4219 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4220 Testing repair with s:2 r:[ClientId(0)]
4221 Sep 22 23:15:07.740 DEBG Get repair info for 2 source, : downstairs
4222 Sep 22 23:15:07.740 DEBG Get repair info for 0 bad, : downstairs
4223 Sep 22 23:15:07.740 INFO Repair for extent 0 s:2 d:[ClientId(0)], : downstairs
4224 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4225 Testing repair with s:2 r:[ClientId(1)]
4226 Sep 22 23:15:07.740 DEBG Get repair info for 2 source, : downstairs
4227 Sep 22 23:15:07.740 DEBG Get repair info for 1 bad, : downstairs
4228 Sep 22 23:15:07.740 INFO Repair for extent 0 s:2 d:[ClientId(1)], : downstairs
4229 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4230 test live_repair::repair_test::test_solver_gen_higher_needs_repair_one ... ok
4231 Testing repair with s:0 r:[ClientId(1), ClientId(2)]
4232 Sep 22 23:15:07.740 DEBG Get repair info for 0 source, : downstairs
4233 Sep 22 23:15:07.740 DEBG Get repair info for 1 bad, : downstairs
4234 Sep 22 23:15:07.740 DEBG Get repair info for 2 bad, : downstairs
4235 Sep 22 23:15:07.740 INFO Repair for extent 0 s:0 d:[ClientId(1), ClientId(2)], : downstairs
4236 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4237 Testing repair with s:1 r:[ClientId(0), ClientId(2)]
4238 Sep 22 23:15:07.740 DEBG Get repair info for 1 source, : downstairs
4239 Sep 22 23:15:07.740 DEBG Get repair info for 0 bad, : downstairs
4240 Sep 22 23:15:07.740 DEBG Get repair info for 2 bad, : downstairs
4241 Sep 22 23:15:07.740 INFO Repair for extent 0 s:1 d:[ClientId(0), ClientId(2)], : downstairs
4242 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4243 Testing repair with s:2 r:[ClientId(0), ClientId(1)]
4244 Sep 22 23:15:07.740 DEBG Get repair info for 2 source, : downstairs
4245 Sep 22 23:15:07.740 DEBG Get repair info for 0 bad, : downstairs
4246 Sep 22 23:15:07.740 DEBG Get repair info for 1 bad, : downstairs
4247 Sep 22 23:15:07.740 INFO Repair for extent 0 s:2 d:[ClientId(0), ClientId(1)], : downstairs
4248 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4249 test live_repair::repair_test::test_solver_gen_higher_needs_repair_two ... ok
4250 Testing repair with s:0 r:[ClientId(1)]
4251 Sep 22 23:15:07.741 DEBG Get repair info for 0 source, : downstairs
4252 Sep 22 23:15:07.741 DEBG Get repair info for 1 bad, : downstairs
4253 Sep 22 23:15:07.741 INFO Repair for extent 0 s:0 d:[ClientId(1)], : downstairs
4254 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4255 Testing repair with s:0 r:[ClientId(2)]
4256 Sep 22 23:15:07.741 DEBG Get repair info for 0 source, : downstairs
4257 Sep 22 23:15:07.741 DEBG Get repair info for 2 bad, : downstairs
4258 Sep 22 23:15:07.741 INFO Repair for extent 0 s:0 d:[ClientId(2)], : downstairs
4259 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4260 Testing repair with s:1 r:[ClientId(0)]
4261 Sep 22 23:15:07.741 DEBG Get repair info for 1 source, : downstairs
4262 Sep 22 23:15:07.741 DEBG Get repair info for 0 bad, : downstairs
4263 Sep 22 23:15:07.741 INFO Repair for extent 0 s:1 d:[ClientId(0)], : downstairs
4264 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4265 Testing repair with s:1 r:[ClientId(2)]
4266 Sep 22 23:15:07.741 DEBG Get repair info for 1 source, : downstairs
4267 Sep 22 23:15:07.741 DEBG Get repair info for 2 bad, : downstairs
4268 Sep 22 23:15:07.741 INFO Repair for extent 0 s:1 d:[ClientId(2)], : downstairs
4269 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4270 Testing repair with s:2 r:[ClientId(0)]
4271 Sep 22 23:15:07.741 DEBG Get repair info for 2 source, : downstairs
4272 Sep 22 23:15:07.741 DEBG Get repair info for 0 bad, : downstairs
4273 Sep 22 23:15:07.741 INFO Repair for extent 0 s:2 d:[ClientId(0)], : downstairs
4274 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4275 Testing repair with s:2 r:[ClientId(1)]
4276 Sep 22 23:15:07.741 DEBG Get repair info for 2 source, : downstairs
4277 Sep 22 23:15:07.741 DEBG Get repair info for 1 bad, : downstairs
4278 Sep 22 23:15:07.741 INFO Repair for extent 0 s:2 d:[ClientId(1)], : downstairs
4279 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4280 test live_repair::repair_test::test_solver_gen_lower_needs_repair_one ... ok
4281 Testing repair with s:0 r:[ClientId(1), ClientId(2)]
4282 Sep 22 23:15:07.741 DEBG Get repair info for 0 source, : downstairs
4283 Sep 22 23:15:07.741 DEBG Get repair info for 1 bad, : downstairs
4284 Sep 22 23:15:07.741 DEBG Get repair info for 2 bad, : downstairs
4285 Sep 22 23:15:07.741 INFO Repair for extent 0 s:0 d:[ClientId(1), ClientId(2)], : downstairs
4286 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4287 Testing repair with s:1 r:[ClientId(0), ClientId(2)]
4288 Sep 22 23:15:07.741 DEBG Get repair info for 1 source, : downstairs
4289 Sep 22 23:15:07.741 DEBG Get repair info for 0 bad, : downstairs
4290 Sep 22 23:15:07.741 DEBG Get repair info for 2 bad, : downstairs
4291 Sep 22 23:15:07.741 INFO Repair for extent 0 s:1 d:[ClientId(0), ClientId(2)], : downstairs
4292 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4293 Testing repair with s:2 r:[ClientId(0), ClientId(1)]
4294 Sep 22 23:15:07.741 DEBG Get repair info for 2 source, : downstairs
4295 Sep 22 23:15:07.741 DEBG Get repair info for 0 bad, : downstairs
4296 Sep 22 23:15:07.741 DEBG Get repair info for 1 bad, : downstairs
4297 Sep 22 23:15:07.741 INFO Repair for extent 0 s:2 d:[ClientId(0), ClientId(1)], : downstairs
4298 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4299 test live_repair::repair_test::test_solver_gen_lower_needs_repair_two ... ok
4300 Sep 22 23:15:07.742 DEBG Get repair info for 0 source, : downstairs
4301 Sep 22 23:15:07.742 DEBG Get repair info for 1 bad, : downstairs
4302 Sep 22 23:15:07.742 DEBG Get repair info for 2 bad, : downstairs
4303 Sep 22 23:15:07.742 INFO No repair needed for extent 0, : downstairs
4304 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveNoOp { dependencies: [] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4305 Passed for source 0
4306 Sep 22 23:15:07.742 DEBG Get repair info for 1 source, : downstairs
4307 Sep 22 23:15:07.742 DEBG Get repair info for 0 bad, : downstairs
4308 Sep 22 23:15:07.742 DEBG Get repair info for 2 bad, : downstairs
4309 Sep 22 23:15:07.742 INFO No repair needed for extent 0, : downstairs
4310 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveNoOp { dependencies: [] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4311 Passed for source 1
4312 Sep 22 23:15:07.742 DEBG Get repair info for 2 source, : downstairs
4313 Sep 22 23:15:07.742 DEBG Get repair info for 0 bad, : downstairs
4314 Sep 22 23:15:07.742 DEBG Get repair info for 1 bad, : downstairs
4315 Sep 22 23:15:07.742 INFO No repair needed for extent 0, : downstairs
4316 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveNoOp { dependencies: [] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4317 Passed for source 2
4318 test live_repair::repair_test::test_solver_no_work ... ok
43192023-09-22T23:15:07.742ZINFOcrucible: Crucible stats registered with UUID: 6909e08a-657e-411e-b860-976a08df7f3b
43202023-09-22T23:15:07.742ZINFOcrucible: Crucible 6909e08a-657e-411e-b860-976a08df7f3b has session id: 76d7d52d-b000-4099-8d66-fc470c98c52f
43212023-09-22T23:15:07.742ZINFOcrucible: [0] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) New New New ds_transition to WaitActive
43222023-09-22T23:15:07.742ZINFOcrucible: [0] Transition from New to WaitActive
43232023-09-22T23:15:07.743ZINFOcrucible: [0] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) WaitActive New New ds_transition to WaitQuorum
43242023-09-22T23:15:07.743ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
43252023-09-22T23:15:07.743ZINFOcrucible: [0] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) WaitQuorum New New ds_transition to Active
43262023-09-22T23:15:07.743ZINFOcrucible: [0] Transition from WaitQuorum to Active
43272023-09-22T23:15:07.743ZINFOcrucible: [1] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) Active New New ds_transition to WaitActive
43282023-09-22T23:15:07.743ZINFOcrucible: [1] Transition from New to WaitActive
43292023-09-22T23:15:07.743ZINFOcrucible: [1] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) Active WaitActive New ds_transition to WaitQuorum
43302023-09-22T23:15:07.743ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
43312023-09-22T23:15:07.743ZINFOcrucible: [1] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) Active WaitQuorum New ds_transition to Active
43322023-09-22T23:15:07.743ZINFOcrucible: [1] Transition from WaitQuorum to Active
43332023-09-22T23:15:07.743ZINFOcrucible: [2] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) Active Active New ds_transition to WaitActive
43342023-09-22T23:15:07.743ZINFOcrucible: [2] Transition from New to WaitActive
43352023-09-22T23:15:07.743ZINFOcrucible: [2] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) Active Active WaitActive ds_transition to WaitQuorum
43362023-09-22T23:15:07.743ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
43372023-09-22T23:15:07.743ZINFOcrucible: [2] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) Active Active WaitQuorum ds_transition to Active
43382023-09-22T23:15:07.743ZINFOcrucible: [2] Transition from WaitQuorum to Active
43392023-09-22T23:15:07.743ZINFOcrucible: 6909e08a-657e-411e-b860-976a08df7f3b is now active with session: 9af93bfb-1d45-4033-8c5d-51c86e95e775
43402023-09-22T23:15:07.743ZINFOcrucible: [1] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) Active Active Active ds_transition to Faulted
43412023-09-22T23:15:07.743ZINFOcrucible: [1] Transition from Active to Faulted
43422023-09-22T23:15:07.743ZINFOcrucible: [1] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) Active Faulted Active ds_transition to LiveRepairReady
43432023-09-22T23:15:07.743ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
43442023-09-22T23:15:07.743ZINFOcrucible: [1] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) Active LiveRepairReady Active ds_transition to LiveRepair
43452023-09-22T23:15:07.743ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
43462023-09-22T23:15:07.743ZINFOcrucible: [1] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) Active LiveRepair Active ds_transition to Faulted
43472023-09-22T23:15:07.743ZINFOcrucible: [1] Transition from LiveRepair to Faulted
43482023-09-22T23:15:07.743ZINFOcrucible: [1] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) Active Faulted Active ds_transition to LiveRepairReady
43492023-09-22T23:15:07.743ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
43502023-09-22T23:15:07.743ZINFOcrucible: [1] 6909e08a-657e-411e-b860-976a08df7f3b (9af93bfb-1d45-4033-8c5d-51c86e95e775) Active LiveRepairReady Active ds_transition to LiveRepair
43512023-09-22T23:15:07.743ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
43522023-09-22T23:15:07.743ZWARNcrucible: Write to Extent 0:2:9 under repair
43532023-09-22T23:15:07.743ZWARNcrucible: Write 1:0:9 past extent under repair 0
43542023-09-22T23:15:07.743ZWARNcrucible: IO Write 1005 on eur 0 Added deps 1
43552023-09-22T23:15:07.743ZWARNcrucible: Create read repair deps for extent 2
43562023-09-22T23:15:07.743ZWARNcrucible: IO Read 1010 extent 0 added deps 2
4357 test live_repair::repair_test::test_spicy_live_repair ... ok
4358 test mend::test::reconcile_dirty_length_bad - should panic ... ok
43592023-09-22T23:15:07.744ZINFOcrucible: Extents 2 dirty
43602023-09-22T23:15:07.744ZINFOcrucible: First source client ID for extent 2 mrl = dirty
43612023-09-22T23:15:07.744ZINFOcrucible: extent:2 gens: 7 7 7 mrl = dirty
43622023-09-22T23:15:07.744ZINFOcrucible: extent:2 flush: 2 2 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
43632023-09-22T23:15:07.744ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = dirty
43642023-09-22T23:15:07.744ZINFOcrucible: find dest for source 2 for extent at index 2 mrl = dirty
43652023-09-22T23:15:07.744ZINFOcrucible: source 2, add dest 0 flush mrl = dirty
43662023-09-22T23:15:07.744ZINFOcrucible: source 2, add dest 1 flush mrl = dirty
4367 test mend::test::reconcile_dirty_mismatch_c0 ... ok
43682023-09-22T23:15:07.745ZINFOcrucible: Extents 2 dirty
43692023-09-22T23:15:07.745ZINFOcrucible: First source client ID for extent 2 mrl = dirty
43702023-09-22T23:15:07.745ZINFOcrucible: extent:2 gens: 7 7 7 mrl = dirty
43712023-09-22T23:15:07.745ZINFOcrucible: extent:2 flush: 2 2 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
43722023-09-22T23:15:07.745ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
43732023-09-22T23:15:07.745ZINFOcrucible: extent:2 dirty: false true false mrl = dirty
43742023-09-22T23:15:07.745ZINFOcrucible: find dest for source 1 for extent at index 2 mrl = dirty
43752023-09-22T23:15:07.745ZINFOcrucible: source 1, add dest 0 source flush mrl = dirty
43762023-09-22T23:15:07.745ZINFOcrucible: source 1, add dest 2 source flush mrl = dirty
4377 test mend::test::reconcile_dirty_mismatch_c1 ... ok
43782023-09-22T23:15:07.745ZINFOcrucible: Extents 1 dirty
43792023-09-22T23:15:07.746ZINFOcrucible: First source client ID for extent 1 mrl = dirty
43802023-09-22T23:15:07.746ZINFOcrucible: extent:1 gens: 8 8 7 mrl = dirty
43812023-09-22T23:15:07.746ZINFOcrucible: extent:1 flush: 1 1 1 scs: [ClientId(0), ClientId(1)] mrl = dirty
43822023-09-22T23:15:07.746ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1)] mrl = dirty
43832023-09-22T23:15:07.746ZINFOcrucible: extent:1 dirty: false false true mrl = dirty
43842023-09-22T23:15:07.746ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(1)] mrl = dirty
43852023-09-22T23:15:07.746ZINFOcrucible: find dest for source 0 for extent at index 1 mrl = dirty
43862023-09-22T23:15:07.746ZINFOcrucible: source 0, add dest 2 gen mrl = dirty
4387 test mend::test::reconcile_dirty_mismatch_c2 ... ok
43882023-09-22T23:15:07.746ZINFOcrucible: Extents 0 dirty
43892023-09-22T23:15:07.746ZINFOcrucible: First source client ID for extent 0 mrl = dirty
43902023-09-22T23:15:07.746ZINFOcrucible: extent:0 gens: 9 9 9 mrl = dirty
43912023-09-22T23:15:07.746ZINFOcrucible: extent:0 flush: 2 2 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
43922023-09-22T23:15:07.746ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
43932023-09-22T23:15:07.746ZINFOcrucible: extent:0 dirty: true true true mrl = dirty
43942023-09-22T23:15:07.746ZINFOcrucible: find dest for source 0 for extent at index 0 mrl = dirty
43952023-09-22T23:15:07.746ZINFOcrucible: source 0, add dest 1 source flush mrl = dirty
43962023-09-22T23:15:07.746ZINFOcrucible: source 0, add dest 2 source flush mrl = dirty
43972023-09-22T23:15:07.746ZINFOcrucible: Extents 3 dirty
43982023-09-22T23:15:07.746ZINFOcrucible: First source client ID for extent 3 mrl = dirty
43992023-09-22T23:15:07.746ZINFOcrucible: extent:3 gens: 7 7 7 mrl = dirty
44002023-09-22T23:15:07.746ZINFOcrucible: extent:3 flush: 1 1 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
44012023-09-22T23:15:07.746ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
44022023-09-22T23:15:07.746ZINFOcrucible: extent:3 dirty: true true true mrl = dirty
44032023-09-22T23:15:07.746ZINFOcrucible: find dest for source 0 for extent at index 3 mrl = dirty
44042023-09-22T23:15:07.746ZINFOcrucible: source 0, add dest 1 source flush mrl = dirty
44052023-09-22T23:15:07.746ZINFOcrucible: source 0, add dest 2 source flush mrl = dirty
4406 ef.dest [
4407 ClientId(
4408 1,
4409 ),
4410 ClientId(
4411 2,
4412 ),
4413 ]
4414 test mend::test::reconcile_dirty_true ... ok
44152023-09-22T23:15:07.747ZINFOcrucible: Extent 1 has flush number mismatch
44162023-09-22T23:15:07.747ZINFOcrucible: First source client ID for extent 1 mrl = flush_mismatch
44172023-09-22T23:15:07.747ZINFOcrucible: extent:1 gens: 1 1 1 mrl = flush_mismatch
44182023-09-22T23:15:07.747ZINFOcrucible: extent:1 flush: 2 1 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44192023-09-22T23:15:07.747ZINFOcrucible: max_flush now has: [ClientId(0)] mrl = flush_mismatch
44202023-09-22T23:15:07.747ZINFOcrucible: find dest for source 0 for extent at index 1 mrl = flush_mismatch
44212023-09-22T23:15:07.747ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
44222023-09-22T23:15:07.747ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
44232023-09-22T23:15:07.747ZINFOcrucible: Extent 2 has flush number mismatch
44242023-09-22T23:15:07.747ZINFOcrucible: First source client ID for extent 2 mrl = flush_mismatch
44252023-09-22T23:15:07.747ZINFOcrucible: extent:2 gens: 1 1 1 mrl = flush_mismatch
44262023-09-22T23:15:07.747ZINFOcrucible: extent:2 flush: 3 1 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44272023-09-22T23:15:07.747ZINFOcrucible: max_flush now has: [ClientId(0)] mrl = flush_mismatch
44282023-09-22T23:15:07.747ZINFOcrucible: find dest for source 0 for extent at index 2 mrl = flush_mismatch
44292023-09-22T23:15:07.747ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
44302023-09-22T23:15:07.747ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
44312023-09-22T23:15:07.747ZINFOcrucible: Extent 3 has flush number mismatch
44322023-09-22T23:15:07.747ZINFOcrucible: First source client ID for extent 3 mrl = flush_mismatch
44332023-09-22T23:15:07.747ZINFOcrucible: extent:3 gens: 1 1 1 mrl = flush_mismatch
44342023-09-22T23:15:07.747ZINFOcrucible: extent:3 flush: 1 2 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44352023-09-22T23:15:07.747ZINFOcrucible: max_flush now has: [ClientId(1)] mrl = flush_mismatch
44362023-09-22T23:15:07.747ZINFOcrucible: find dest for source 1 for extent at index 3 mrl = flush_mismatch
44372023-09-22T23:15:07.747ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
44382023-09-22T23:15:07.747ZINFOcrucible: source 1, add dest 2 flush mrl = flush_mismatch
44392023-09-22T23:15:07.747ZINFOcrucible: Extent 4 has flush number mismatch
44402023-09-22T23:15:07.747ZINFOcrucible: First source client ID for extent 4 mrl = flush_mismatch
44412023-09-22T23:15:07.748ZINFOcrucible: extent:4 gens: 1 1 1 mrl = flush_mismatch
44422023-09-22T23:15:07.748ZINFOcrucible: extent:4 flush: 2 2 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44432023-09-22T23:15:07.748ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1)] mrl = flush_mismatch
44442023-09-22T23:15:07.748ZINFOcrucible: extent:4 dirty: false false false mrl = flush_mismatch
44452023-09-22T23:15:07.748ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(1)] mrl = flush_mismatch
44462023-09-22T23:15:07.748ZINFOcrucible: find dest for source 0 for extent at index 4 mrl = flush_mismatch
44472023-09-22T23:15:07.748ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
44482023-09-22T23:15:07.748ZINFOcrucible: Extent 5 has flush number mismatch
44492023-09-22T23:15:07.748ZINFOcrucible: First source client ID for extent 5 mrl = flush_mismatch
44502023-09-22T23:15:07.748ZINFOcrucible: extent:5 gens: 1 1 1 mrl = flush_mismatch
44512023-09-22T23:15:07.748ZINFOcrucible: extent:5 flush: 3 2 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44522023-09-22T23:15:07.748ZINFOcrucible: max_flush now has: [ClientId(0)] mrl = flush_mismatch
44532023-09-22T23:15:07.748ZINFOcrucible: find dest for source 0 for extent at index 5 mrl = flush_mismatch
44542023-09-22T23:15:07.748ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
44552023-09-22T23:15:07.748ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
44562023-09-22T23:15:07.748ZINFOcrucible: Extent 6 has flush number mismatch
44572023-09-22T23:15:07.748ZINFOcrucible: First source client ID for extent 6 mrl = flush_mismatch
44582023-09-22T23:15:07.748ZINFOcrucible: extent:6 gens: 1 1 1 mrl = flush_mismatch
44592023-09-22T23:15:07.748ZINFOcrucible: extent:6 flush: 1 3 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44602023-09-22T23:15:07.748ZINFOcrucible: max_flush now has: [ClientId(1)] mrl = flush_mismatch
44612023-09-22T23:15:07.748ZINFOcrucible: find dest for source 1 for extent at index 6 mrl = flush_mismatch
44622023-09-22T23:15:07.748ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
44632023-09-22T23:15:07.748ZINFOcrucible: source 1, add dest 2 flush mrl = flush_mismatch
44642023-09-22T23:15:07.748ZINFOcrucible: Extent 7 has flush number mismatch
44652023-09-22T23:15:07.748ZINFOcrucible: First source client ID for extent 7 mrl = flush_mismatch
44662023-09-22T23:15:07.748ZINFOcrucible: extent:7 gens: 1 1 1 mrl = flush_mismatch
44672023-09-22T23:15:07.748ZINFOcrucible: extent:7 flush: 2 3 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44682023-09-22T23:15:07.748ZINFOcrucible: max_flush now has: [ClientId(1)] mrl = flush_mismatch
44692023-09-22T23:15:07.748ZINFOcrucible: find dest for source 1 for extent at index 7 mrl = flush_mismatch
44702023-09-22T23:15:07.748ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
44712023-09-22T23:15:07.748ZINFOcrucible: source 1, add dest 2 flush mrl = flush_mismatch
44722023-09-22T23:15:07.748ZINFOcrucible: Extent 8 has flush number mismatch
44732023-09-22T23:15:07.748ZINFOcrucible: First source client ID for extent 8 mrl = flush_mismatch
44742023-09-22T23:15:07.748ZINFOcrucible: extent:8 gens: 1 1 1 mrl = flush_mismatch
44752023-09-22T23:15:07.748ZINFOcrucible: extent:8 flush: 3 3 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44762023-09-22T23:15:07.748ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1)] mrl = flush_mismatch
44772023-09-22T23:15:07.748ZINFOcrucible: extent:8 dirty: false false false mrl = flush_mismatch
44782023-09-22T23:15:07.748ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(1)] mrl = flush_mismatch
44792023-09-22T23:15:07.748ZINFOcrucible: find dest for source 0 for extent at index 8 mrl = flush_mismatch
44802023-09-22T23:15:07.748ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
4481 test mend::test::reconcile_flush_a ... ok
44822023-09-22T23:15:07.749ZINFOcrucible: Extent 0 has flush number mismatch
44832023-09-22T23:15:07.749ZINFOcrucible: First source client ID for extent 0 mrl = flush_mismatch
44842023-09-22T23:15:07.749ZINFOcrucible: extent:0 gens: 1 1 1 mrl = flush_mismatch
44852023-09-22T23:15:07.749ZINFOcrucible: extent:0 flush: 1 1 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44862023-09-22T23:15:07.749ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
44872023-09-22T23:15:07.749ZINFOcrucible: find dest for source 2 for extent at index 0 mrl = flush_mismatch
44882023-09-22T23:15:07.749ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
44892023-09-22T23:15:07.749ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
44902023-09-22T23:15:07.749ZINFOcrucible: Extent 1 has flush number mismatch
44912023-09-22T23:15:07.749ZINFOcrucible: First source client ID for extent 1 mrl = flush_mismatch
44922023-09-22T23:15:07.749ZINFOcrucible: extent:1 gens: 1 1 1 mrl = flush_mismatch
44932023-09-22T23:15:07.749ZINFOcrucible: extent:1 flush: 2 1 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44942023-09-22T23:15:07.749ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = flush_mismatch
44952023-09-22T23:15:07.749ZINFOcrucible: extent:1 dirty: false false false mrl = flush_mismatch
44962023-09-22T23:15:07.749ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = flush_mismatch
44972023-09-22T23:15:07.749ZINFOcrucible: find dest for source 0 for extent at index 1 mrl = flush_mismatch
44982023-09-22T23:15:07.749ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
44992023-09-22T23:15:07.749ZINFOcrucible: Extent 2 has flush number mismatch
45002023-09-22T23:15:07.749ZINFOcrucible: First source client ID for extent 2 mrl = flush_mismatch
45012023-09-22T23:15:07.749ZINFOcrucible: extent:2 gens: 1 1 1 mrl = flush_mismatch
45022023-09-22T23:15:07.749ZINFOcrucible: extent:2 flush: 3 1 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45032023-09-22T23:15:07.749ZINFOcrucible: max_flush now has: [ClientId(0)] mrl = flush_mismatch
45042023-09-22T23:15:07.749ZINFOcrucible: find dest for source 0 for extent at index 2 mrl = flush_mismatch
45052023-09-22T23:15:07.749ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
45062023-09-22T23:15:07.749ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
45072023-09-22T23:15:07.749ZINFOcrucible: Extent 3 has flush number mismatch
45082023-09-22T23:15:07.749ZINFOcrucible: First source client ID for extent 3 mrl = flush_mismatch
45092023-09-22T23:15:07.749ZINFOcrucible: extent:3 gens: 1 1 1 mrl = flush_mismatch
45102023-09-22T23:15:07.749ZINFOcrucible: extent:3 flush: 1 2 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45112023-09-22T23:15:07.749ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = flush_mismatch
45122023-09-22T23:15:07.749ZINFOcrucible: extent:3 dirty: false false false mrl = flush_mismatch
45132023-09-22T23:15:07.749ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = flush_mismatch
45142023-09-22T23:15:07.749ZINFOcrucible: find dest for source 1 for extent at index 3 mrl = flush_mismatch
45152023-09-22T23:15:07.749ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
45162023-09-22T23:15:07.749ZINFOcrucible: Extent 5 has flush number mismatch
45172023-09-22T23:15:07.749ZINFOcrucible: First source client ID for extent 5 mrl = flush_mismatch
45182023-09-22T23:15:07.750ZINFOcrucible: extent:5 gens: 1 1 1 mrl = flush_mismatch
45192023-09-22T23:15:07.750ZINFOcrucible: extent:5 flush: 3 2 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45202023-09-22T23:15:07.750ZINFOcrucible: max_flush now has: [ClientId(0)] mrl = flush_mismatch
45212023-09-22T23:15:07.750ZINFOcrucible: find dest for source 0 for extent at index 5 mrl = flush_mismatch
45222023-09-22T23:15:07.750ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
45232023-09-22T23:15:07.750ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
45242023-09-22T23:15:07.750ZINFOcrucible: Extent 6 has flush number mismatch
45252023-09-22T23:15:07.750ZINFOcrucible: First source client ID for extent 6 mrl = flush_mismatch
45262023-09-22T23:15:07.750ZINFOcrucible: extent:6 gens: 1 1 1 mrl = flush_mismatch
45272023-09-22T23:15:07.750ZINFOcrucible: extent:6 flush: 1 3 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45282023-09-22T23:15:07.750ZINFOcrucible: max_flush now has: [ClientId(1)] mrl = flush_mismatch
45292023-09-22T23:15:07.750ZINFOcrucible: find dest for source 1 for extent at index 6 mrl = flush_mismatch
45302023-09-22T23:15:07.750ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
45312023-09-22T23:15:07.750ZINFOcrucible: source 1, add dest 2 flush mrl = flush_mismatch
45322023-09-22T23:15:07.750ZINFOcrucible: Extent 7 has flush number mismatch
45332023-09-22T23:15:07.750ZINFOcrucible: First source client ID for extent 7 mrl = flush_mismatch
45342023-09-22T23:15:07.750ZINFOcrucible: extent:7 gens: 1 1 1 mrl = flush_mismatch
45352023-09-22T23:15:07.750ZINFOcrucible: extent:7 flush: 2 3 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45362023-09-22T23:15:07.750ZINFOcrucible: max_flush now has: [ClientId(1)] mrl = flush_mismatch
45372023-09-22T23:15:07.750ZINFOcrucible: find dest for source 1 for extent at index 7 mrl = flush_mismatch
45382023-09-22T23:15:07.750ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
45392023-09-22T23:15:07.750ZINFOcrucible: source 1, add dest 2 flush mrl = flush_mismatch
45402023-09-22T23:15:07.750ZINFOcrucible: Extent 8 has flush number mismatch
45412023-09-22T23:15:07.750ZINFOcrucible: First source client ID for extent 8 mrl = flush_mismatch
45422023-09-22T23:15:07.750ZINFOcrucible: extent:8 gens: 1 1 1 mrl = flush_mismatch
45432023-09-22T23:15:07.750ZINFOcrucible: extent:8 flush: 3 3 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45442023-09-22T23:15:07.750ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1)] mrl = flush_mismatch
45452023-09-22T23:15:07.750ZINFOcrucible: extent:8 dirty: false false false mrl = flush_mismatch
45462023-09-22T23:15:07.750ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(1)] mrl = flush_mismatch
45472023-09-22T23:15:07.750ZINFOcrucible: find dest for source 0 for extent at index 8 mrl = flush_mismatch
45482023-09-22T23:15:07.750ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
4549 test mend::test::reconcile_flush_b ... ok
45502023-09-22T23:15:07.751ZINFOcrucible: Extent 0 has flush number mismatch
45512023-09-22T23:15:07.751ZINFOcrucible: First source client ID for extent 0 mrl = flush_mismatch
45522023-09-22T23:15:07.751ZINFOcrucible: extent:0 gens: 1 1 1 mrl = flush_mismatch
45532023-09-22T23:15:07.751ZINFOcrucible: extent:0 flush: 1 1 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45542023-09-22T23:15:07.751ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
45552023-09-22T23:15:07.751ZINFOcrucible: find dest for source 2 for extent at index 0 mrl = flush_mismatch
45562023-09-22T23:15:07.751ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
45572023-09-22T23:15:07.751ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
45582023-09-22T23:15:07.751ZINFOcrucible: Extent 1 has flush number mismatch
45592023-09-22T23:15:07.751ZINFOcrucible: First source client ID for extent 1 mrl = flush_mismatch
45602023-09-22T23:15:07.751ZINFOcrucible: extent:1 gens: 1 1 1 mrl = flush_mismatch
45612023-09-22T23:15:07.751ZINFOcrucible: extent:1 flush: 2 1 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45622023-09-22T23:15:07.751ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
45632023-09-22T23:15:07.751ZINFOcrucible: find dest for source 2 for extent at index 1 mrl = flush_mismatch
45642023-09-22T23:15:07.751ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
45652023-09-22T23:15:07.751ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
45662023-09-22T23:15:07.751ZINFOcrucible: Extent 2 has flush number mismatch
45672023-09-22T23:15:07.751ZINFOcrucible: First source client ID for extent 2 mrl = flush_mismatch
45682023-09-22T23:15:07.751ZINFOcrucible: extent:2 gens: 1 1 1 mrl = flush_mismatch
45692023-09-22T23:15:07.751ZINFOcrucible: extent:2 flush: 3 1 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45702023-09-22T23:15:07.751ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = flush_mismatch
45712023-09-22T23:15:07.751ZINFOcrucible: extent:2 dirty: false false false mrl = flush_mismatch
45722023-09-22T23:15:07.751ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = flush_mismatch
45732023-09-22T23:15:07.751ZINFOcrucible: find dest for source 0 for extent at index 2 mrl = flush_mismatch
45742023-09-22T23:15:07.751ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
45752023-09-22T23:15:07.751ZINFOcrucible: Extent 3 has flush number mismatch
45762023-09-22T23:15:07.751ZINFOcrucible: First source client ID for extent 3 mrl = flush_mismatch
45772023-09-22T23:15:07.751ZINFOcrucible: extent:3 gens: 1 1 1 mrl = flush_mismatch
45782023-09-22T23:15:07.751ZINFOcrucible: extent:3 flush: 1 2 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45792023-09-22T23:15:07.751ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
45802023-09-22T23:15:07.751ZINFOcrucible: find dest for source 2 for extent at index 3 mrl = flush_mismatch
45812023-09-22T23:15:07.751ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
45822023-09-22T23:15:07.751ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
45832023-09-22T23:15:07.751ZINFOcrucible: Extent 4 has flush number mismatch
45842023-09-22T23:15:07.751ZINFOcrucible: First source client ID for extent 4 mrl = flush_mismatch
45852023-09-22T23:15:07.751ZINFOcrucible: extent:4 gens: 1 1 1 mrl = flush_mismatch
45862023-09-22T23:15:07.751ZINFOcrucible: extent:4 flush: 2 2 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45872023-09-22T23:15:07.751ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
45882023-09-22T23:15:07.751ZINFOcrucible: find dest for source 2 for extent at index 4 mrl = flush_mismatch
45892023-09-22T23:15:07.751ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
45902023-09-22T23:15:07.751ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
45912023-09-22T23:15:07.751ZINFOcrucible: Extent 5 has flush number mismatch
45922023-09-22T23:15:07.751ZINFOcrucible: First source client ID for extent 5 mrl = flush_mismatch
45932023-09-22T23:15:07.751ZINFOcrucible: extent:5 gens: 1 1 1 mrl = flush_mismatch
45942023-09-22T23:15:07.751ZINFOcrucible: extent:5 flush: 3 2 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45952023-09-22T23:15:07.752ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = flush_mismatch
45962023-09-22T23:15:07.752ZINFOcrucible: extent:5 dirty: false false false mrl = flush_mismatch
45972023-09-22T23:15:07.752ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = flush_mismatch
45982023-09-22T23:15:07.752ZINFOcrucible: find dest for source 0 for extent at index 5 mrl = flush_mismatch
45992023-09-22T23:15:07.752ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
46002023-09-22T23:15:07.752ZINFOcrucible: Extent 6 has flush number mismatch
46012023-09-22T23:15:07.752ZINFOcrucible: First source client ID for extent 6 mrl = flush_mismatch
46022023-09-22T23:15:07.752ZINFOcrucible: extent:6 gens: 1 1 1 mrl = flush_mismatch
46032023-09-22T23:15:07.752ZINFOcrucible: extent:6 flush: 1 3 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
46042023-09-22T23:15:07.752ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = flush_mismatch
46052023-09-22T23:15:07.752ZINFOcrucible: extent:6 dirty: false false false mrl = flush_mismatch
46062023-09-22T23:15:07.752ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = flush_mismatch
46072023-09-22T23:15:07.752ZINFOcrucible: find dest for source 1 for extent at index 6 mrl = flush_mismatch
46082023-09-22T23:15:07.752ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
46092023-09-22T23:15:07.752ZINFOcrucible: Extent 7 has flush number mismatch
46102023-09-22T23:15:07.752ZINFOcrucible: First source client ID for extent 7 mrl = flush_mismatch
46112023-09-22T23:15:07.752ZINFOcrucible: extent:7 gens: 1 1 1 mrl = flush_mismatch
46122023-09-22T23:15:07.752ZINFOcrucible: extent:7 flush: 2 3 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
46132023-09-22T23:15:07.752ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = flush_mismatch
46142023-09-22T23:15:07.752ZINFOcrucible: extent:7 dirty: false false false mrl = flush_mismatch
46152023-09-22T23:15:07.752ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = flush_mismatch
46162023-09-22T23:15:07.752ZINFOcrucible: find dest for source 1 for extent at index 7 mrl = flush_mismatch
46172023-09-22T23:15:07.752ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
4618 test mend::test::reconcile_flush_c ... ok
4619 test mend::test::reconcile_flush_length_bad - should panic ... ok
46202023-09-22T23:15:07.753ZINFOcrucible: Extent 0 has flush number mismatch
46212023-09-22T23:15:07.753ZINFOcrucible: First source client ID for extent 0 mrl = flush_mismatch
46222023-09-22T23:15:07.753ZINFOcrucible: extent:0 gens: 9 9 9 mrl = flush_mismatch
46232023-09-22T23:15:07.753ZINFOcrucible: extent:0 flush: 1 2 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
46242023-09-22T23:15:07.753ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = flush_mismatch
46252023-09-22T23:15:07.753ZINFOcrucible: extent:0 dirty: false false false mrl = flush_mismatch
46262023-09-22T23:15:07.753ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = flush_mismatch
46272023-09-22T23:15:07.753ZINFOcrucible: find dest for source 1 for extent at index 0 mrl = flush_mismatch
46282023-09-22T23:15:07.753ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
4629 test mend::test::reconcile_flush_mismatch_c0 ... ok
46302023-09-22T23:15:07.753ZINFOcrucible: Extent 0 has flush number mismatch
46312023-09-22T23:15:07.753ZINFOcrucible: First source client ID for extent 0 mrl = flush_mismatch
46322023-09-22T23:15:07.754ZINFOcrucible: extent:0 gens: 9 9 9 mrl = flush_mismatch
46332023-09-22T23:15:07.754ZINFOcrucible: extent:0 flush: 1 2 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
46342023-09-22T23:15:07.754ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
46352023-09-22T23:15:07.754ZINFOcrucible: find dest for source 2 for extent at index 0 mrl = flush_mismatch
46362023-09-22T23:15:07.754ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
46372023-09-22T23:15:07.754ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
46382023-09-22T23:15:07.754ZINFOcrucible: Extent 1 has flush number mismatch
46392023-09-22T23:15:07.754ZINFOcrucible: First source client ID for extent 1 mrl = flush_mismatch
46402023-09-22T23:15:07.754ZINFOcrucible: extent:1 gens: 8 8 8 mrl = flush_mismatch
46412023-09-22T23:15:07.754ZINFOcrucible: extent:1 flush: 2 1 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
46422023-09-22T23:15:07.754ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
46432023-09-22T23:15:07.754ZINFOcrucible: find dest for source 2 for extent at index 1 mrl = flush_mismatch
46442023-09-22T23:15:07.754ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
46452023-09-22T23:15:07.754ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
46462023-09-22T23:15:07.754ZINFOcrucible: Extent 2 has flush number mismatch
46472023-09-22T23:15:07.754ZINFOcrucible: First source client ID for extent 2 mrl = flush_mismatch
46482023-09-22T23:15:07.754ZINFOcrucible: extent:2 gens: 7 7 7 mrl = flush_mismatch
46492023-09-22T23:15:07.754ZINFOcrucible: extent:2 flush: 3 2 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
46502023-09-22T23:15:07.754ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = flush_mismatch
46512023-09-22T23:15:07.754ZINFOcrucible: extent:2 dirty: false false false mrl = flush_mismatch
46522023-09-22T23:15:07.754ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = flush_mismatch
46532023-09-22T23:15:07.754ZINFOcrucible: find dest for source 0 for extent at index 2 mrl = flush_mismatch
46542023-09-22T23:15:07.754ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
46552023-09-22T23:15:07.754ZINFOcrucible: Extent 3 has flush number mismatch
46562023-09-22T23:15:07.754ZINFOcrucible: First source client ID for extent 3 mrl = flush_mismatch
46572023-09-22T23:15:07.754ZINFOcrucible: extent:3 gens: 7 7 7 mrl = flush_mismatch
46582023-09-22T23:15:07.754ZINFOcrucible: extent:3 flush: 3 2 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
46592023-09-22T23:15:07.754ZINFOcrucible: max_flush now has: [ClientId(0)] mrl = flush_mismatch
46602023-09-22T23:15:07.754ZINFOcrucible: find dest for source 0 for extent at index 3 mrl = flush_mismatch
46612023-09-22T23:15:07.754ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
46622023-09-22T23:15:07.754ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
46632023-09-22T23:15:07.754ZINFOcrucible: Extent 4 has flush number mismatch
46642023-09-22T23:15:07.754ZINFOcrucible: First source client ID for extent 4 mrl = flush_mismatch
46652023-09-22T23:15:07.754ZINFOcrucible: extent:4 gens: 6 6 6 mrl = flush_mismatch
46662023-09-22T23:15:07.754ZINFOcrucible: extent:4 flush: 1 3 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
46672023-09-22T23:15:07.754ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = flush_mismatch
46682023-09-22T23:15:07.754ZINFOcrucible: extent:4 dirty: false false false mrl = flush_mismatch
46692023-09-22T23:15:07.754ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = flush_mismatch
46702023-09-22T23:15:07.754ZINFOcrucible: find dest for source 1 for extent at index 4 mrl = flush_mismatch
46712023-09-22T23:15:07.754ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
46722023-09-22T23:15:07.754ZINFOcrucible: Extent 5 has flush number mismatch
46732023-09-22T23:15:07.754ZINFOcrucible: First source client ID for extent 5 mrl = flush_mismatch
46742023-09-22T23:15:07.754ZINFOcrucible: extent:5 gens: 5 5 5 mrl = flush_mismatch
46752023-09-22T23:15:07.754ZINFOcrucible: extent:5 flush: 2 3 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
46762023-09-22T23:15:07.754ZINFOcrucible: max_flush now has: [ClientId(1)] mrl = flush_mismatch
46772023-09-22T23:15:07.754ZINFOcrucible: find dest for source 1 for extent at index 5 mrl = flush_mismatch
46782023-09-22T23:15:07.754ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
46792023-09-22T23:15:07.754ZINFOcrucible: source 1, add dest 2 flush mrl = flush_mismatch
4680 test mend::test::reconcile_flush_mismatch_c1 ... ok
46812023-09-22T23:15:07.755ZINFOcrucible: Extent 0 has flush number mismatch
46822023-09-22T23:15:07.755ZINFOcrucible: First source client ID for extent 0 mrl = flush_mismatch
46832023-09-22T23:15:07.755ZINFOcrucible: extent:0 gens: 9 9 9 mrl = flush_mismatch
46842023-09-22T23:15:07.755ZINFOcrucible: extent:0 flush: 1 1 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
46852023-09-22T23:15:07.755ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
46862023-09-22T23:15:07.755ZINFOcrucible: find dest for source 2 for extent at index 0 mrl = flush_mismatch
46872023-09-22T23:15:07.755ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
46882023-09-22T23:15:07.755ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
46892023-09-22T23:15:07.755ZINFOcrucible: Extent 3 has flush number mismatch
46902023-09-22T23:15:07.755ZINFOcrucible: First source client ID for extent 3 mrl = flush_mismatch
46912023-09-22T23:15:07.755ZINFOcrucible: extent:3 gens: 7 7 7 mrl = flush_mismatch
46922023-09-22T23:15:07.755ZINFOcrucible: extent:3 flush: 1 1 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
46932023-09-22T23:15:07.755ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
46942023-09-22T23:15:07.755ZINFOcrucible: find dest for source 2 for extent at index 3 mrl = flush_mismatch
46952023-09-22T23:15:07.755ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
46962023-09-22T23:15:07.755ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
4697 test mend::test::reconcile_flush_mismatch_c2 ... ok
46982023-09-22T23:15:07.756ZINFOcrucible: generation number mismatch 1
46992023-09-22T23:15:07.756ZINFOcrucible: First source client ID for extent 1 mrl = gen_mismatch
47002023-09-22T23:15:07.756ZINFOcrucible: extent:1 gens: 2 1 1 mrl = gen_mismatch
47012023-09-22T23:15:07.756ZINFOcrucible: find dest for source 0 for extent at index 1 mrl = gen_mismatch
47022023-09-22T23:15:07.756ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
47032023-09-22T23:15:07.756ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
47042023-09-22T23:15:07.756ZINFOcrucible: generation number mismatch 2
47052023-09-22T23:15:07.756ZINFOcrucible: First source client ID for extent 2 mrl = gen_mismatch
47062023-09-22T23:15:07.756ZINFOcrucible: extent:2 gens: 3 1 1 mrl = gen_mismatch
47072023-09-22T23:15:07.756ZINFOcrucible: find dest for source 0 for extent at index 2 mrl = gen_mismatch
47082023-09-22T23:15:07.756ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
47092023-09-22T23:15:07.756ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
47102023-09-22T23:15:07.756ZINFOcrucible: generation number mismatch 3
47112023-09-22T23:15:07.756ZINFOcrucible: First source client ID for extent 3 mrl = gen_mismatch
47122023-09-22T23:15:07.756ZINFOcrucible: extent:3 gens: 1 2 1 mrl = gen_mismatch
47132023-09-22T23:15:07.756ZINFOcrucible: find dest for source 1 for extent at index 3 mrl = gen_mismatch
47142023-09-22T23:15:07.756ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
47152023-09-22T23:15:07.756ZINFOcrucible: source 1, add dest 2 gen mrl = gen_mismatch
47162023-09-22T23:15:07.756ZINFOcrucible: generation number mismatch 4
47172023-09-22T23:15:07.756ZINFOcrucible: First source client ID for extent 4 mrl = gen_mismatch
47182023-09-22T23:15:07.756ZINFOcrucible: extent:4 gens: 2 2 1 mrl = gen_mismatch
47192023-09-22T23:15:07.756ZINFOcrucible: extent:4 flush: 1 1 1 scs: [ClientId(0), ClientId(1)] mrl = gen_mismatch
47202023-09-22T23:15:07.756ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1)] mrl = gen_mismatch
47212023-09-22T23:15:07.756ZINFOcrucible: extent:4 dirty: false false false mrl = gen_mismatch
47222023-09-22T23:15:07.756ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(1)] mrl = gen_mismatch
47232023-09-22T23:15:07.756ZINFOcrucible: find dest for source 0 for extent at index 4 mrl = gen_mismatch
47242023-09-22T23:15:07.756ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
47252023-09-22T23:15:07.756ZINFOcrucible: generation number mismatch 5
47262023-09-22T23:15:07.756ZINFOcrucible: First source client ID for extent 5 mrl = gen_mismatch
47272023-09-22T23:15:07.756ZINFOcrucible: extent:5 gens: 3 2 1 mrl = gen_mismatch
47282023-09-22T23:15:07.756ZINFOcrucible: find dest for source 0 for extent at index 5 mrl = gen_mismatch
47292023-09-22T23:15:07.756ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
47302023-09-22T23:15:07.756ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
47312023-09-22T23:15:07.756ZINFOcrucible: generation number mismatch 6
47322023-09-22T23:15:07.756ZINFOcrucible: First source client ID for extent 6 mrl = gen_mismatch
47332023-09-22T23:15:07.756ZINFOcrucible: extent:6 gens: 1 3 1 mrl = gen_mismatch
47342023-09-22T23:15:07.756ZINFOcrucible: find dest for source 1 for extent at index 6 mrl = gen_mismatch
47352023-09-22T23:15:07.756ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
47362023-09-22T23:15:07.756ZINFOcrucible: source 1, add dest 2 gen mrl = gen_mismatch
47372023-09-22T23:15:07.756ZINFOcrucible: generation number mismatch 7
47382023-09-22T23:15:07.757ZINFOcrucible: First source client ID for extent 7 mrl = gen_mismatch
47392023-09-22T23:15:07.757ZINFOcrucible: extent:7 gens: 2 3 1 mrl = gen_mismatch
47402023-09-22T23:15:07.757ZINFOcrucible: find dest for source 1 for extent at index 7 mrl = gen_mismatch
47412023-09-22T23:15:07.757ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
47422023-09-22T23:15:07.757ZINFOcrucible: source 1, add dest 2 gen mrl = gen_mismatch
47432023-09-22T23:15:07.757ZINFOcrucible: generation number mismatch 8
47442023-09-22T23:15:07.757ZINFOcrucible: First source client ID for extent 8 mrl = gen_mismatch
47452023-09-22T23:15:07.757ZINFOcrucible: extent:8 gens: 3 3 1 mrl = gen_mismatch
47462023-09-22T23:15:07.757ZINFOcrucible: extent:8 flush: 1 1 1 scs: [ClientId(0), ClientId(1)] mrl = gen_mismatch
47472023-09-22T23:15:07.757ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1)] mrl = gen_mismatch
47482023-09-22T23:15:07.757ZINFOcrucible: extent:8 dirty: false false false mrl = gen_mismatch
47492023-09-22T23:15:07.757ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(1)] mrl = gen_mismatch
47502023-09-22T23:15:07.757ZINFOcrucible: find dest for source 0 for extent at index 8 mrl = gen_mismatch
47512023-09-22T23:15:07.757ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
4752 test mend::test::reconcile_gen_a ... ok
47532023-09-22T23:15:07.757ZINFOcrucible: generation number mismatch 0
47542023-09-22T23:15:07.757ZINFOcrucible: First source client ID for extent 0 mrl = gen_mismatch
47552023-09-22T23:15:07.757ZINFOcrucible: extent:0 gens: 1 1 2 mrl = gen_mismatch
47562023-09-22T23:15:07.757ZINFOcrucible: find dest for source 2 for extent at index 0 mrl = gen_mismatch
47572023-09-22T23:15:07.757ZINFOcrucible: source 2, add dest 0 gen mrl = gen_mismatch
47582023-09-22T23:15:07.757ZINFOcrucible: source 2, add dest 1 gen mrl = gen_mismatch
47592023-09-22T23:15:07.757ZINFOcrucible: generation number mismatch 1
47602023-09-22T23:15:07.757ZINFOcrucible: First source client ID for extent 1 mrl = gen_mismatch
47612023-09-22T23:15:07.757ZINFOcrucible: extent:1 gens: 2 1 2 mrl = gen_mismatch
47622023-09-22T23:15:07.757ZINFOcrucible: extent:1 flush: 1 1 1 scs: [ClientId(0), ClientId(2)] mrl = gen_mismatch
47632023-09-22T23:15:07.757ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = gen_mismatch
47642023-09-22T23:15:07.757ZINFOcrucible: extent:1 dirty: false false false mrl = gen_mismatch
47652023-09-22T23:15:07.758ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = gen_mismatch
47662023-09-22T23:15:07.758ZINFOcrucible: find dest for source 0 for extent at index 1 mrl = gen_mismatch
47672023-09-22T23:15:07.758ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
47682023-09-22T23:15:07.758ZINFOcrucible: generation number mismatch 2
47692023-09-22T23:15:07.758ZINFOcrucible: First source client ID for extent 2 mrl = gen_mismatch
47702023-09-22T23:15:07.758ZINFOcrucible: extent:2 gens: 3 1 2 mrl = gen_mismatch
47712023-09-22T23:15:07.758ZINFOcrucible: find dest for source 0 for extent at index 2 mrl = gen_mismatch
47722023-09-22T23:15:07.758ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
47732023-09-22T23:15:07.758ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
47742023-09-22T23:15:07.758ZINFOcrucible: generation number mismatch 3
47752023-09-22T23:15:07.758ZINFOcrucible: First source client ID for extent 3 mrl = gen_mismatch
47762023-09-22T23:15:07.758ZINFOcrucible: extent:3 gens: 1 2 2 mrl = gen_mismatch
47772023-09-22T23:15:07.758ZINFOcrucible: extent:3 flush: 1 1 1 scs: [ClientId(1), ClientId(2)] mrl = gen_mismatch
47782023-09-22T23:15:07.758ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = gen_mismatch
47792023-09-22T23:15:07.758ZINFOcrucible: extent:3 dirty: false false false mrl = gen_mismatch
47802023-09-22T23:15:07.758ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = gen_mismatch
47812023-09-22T23:15:07.758ZINFOcrucible: find dest for source 1 for extent at index 3 mrl = gen_mismatch
47822023-09-22T23:15:07.758ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
47832023-09-22T23:15:07.758ZINFOcrucible: generation number mismatch 5
47842023-09-22T23:15:07.758ZINFOcrucible: First source client ID for extent 5 mrl = gen_mismatch
47852023-09-22T23:15:07.758ZINFOcrucible: extent:5 gens: 3 2 2 mrl = gen_mismatch
47862023-09-22T23:15:07.758ZINFOcrucible: find dest for source 0 for extent at index 5 mrl = gen_mismatch
47872023-09-22T23:15:07.758ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
47882023-09-22T23:15:07.758ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
47892023-09-22T23:15:07.758ZINFOcrucible: generation number mismatch 6
47902023-09-22T23:15:07.758ZINFOcrucible: First source client ID for extent 6 mrl = gen_mismatch
47912023-09-22T23:15:07.758ZINFOcrucible: extent:6 gens: 1 3 2 mrl = gen_mismatch
47922023-09-22T23:15:07.758ZINFOcrucible: find dest for source 1 for extent at index 6 mrl = gen_mismatch
47932023-09-22T23:15:07.758ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
47942023-09-22T23:15:07.758ZINFOcrucible: source 1, add dest 2 gen mrl = gen_mismatch
47952023-09-22T23:15:07.758ZINFOcrucible: generation number mismatch 7
47962023-09-22T23:15:07.758ZINFOcrucible: First source client ID for extent 7 mrl = gen_mismatch
47972023-09-22T23:15:07.758ZINFOcrucible: extent:7 gens: 2 3 2 mrl = gen_mismatch
47982023-09-22T23:15:07.758ZINFOcrucible: find dest for source 1 for extent at index 7 mrl = gen_mismatch
47992023-09-22T23:15:07.758ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
48002023-09-22T23:15:07.758ZINFOcrucible: source 1, add dest 2 gen mrl = gen_mismatch
48012023-09-22T23:15:07.758ZINFOcrucible: generation number mismatch 8
48022023-09-22T23:15:07.758ZINFOcrucible: First source client ID for extent 8 mrl = gen_mismatch
48032023-09-22T23:15:07.758ZINFOcrucible: extent:8 gens: 3 3 2 mrl = gen_mismatch
48042023-09-22T23:15:07.758ZINFOcrucible: extent:8 flush: 1 1 1 scs: [ClientId(0), ClientId(1)] mrl = gen_mismatch
48052023-09-22T23:15:07.758ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1)] mrl = gen_mismatch
48062023-09-22T23:15:07.758ZINFOcrucible: extent:8 dirty: false false false mrl = gen_mismatch
48072023-09-22T23:15:07.758ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(1)] mrl = gen_mismatch
48082023-09-22T23:15:07.758ZINFOcrucible: find dest for source 0 for extent at index 8 mrl = gen_mismatch
48092023-09-22T23:15:07.758ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
4810 test mend::test::reconcile_gen_b ... ok
48112023-09-22T23:15:07.759ZINFOcrucible: generation number mismatch 0
48122023-09-22T23:15:07.759ZINFOcrucible: First source client ID for extent 0 mrl = gen_mismatch
48132023-09-22T23:15:07.759ZINFOcrucible: extent:0 gens: 1 1 3 mrl = gen_mismatch
48142023-09-22T23:15:07.759ZINFOcrucible: find dest for source 2 for extent at index 0 mrl = gen_mismatch
48152023-09-22T23:15:07.759ZINFOcrucible: source 2, add dest 0 gen mrl = gen_mismatch
48162023-09-22T23:15:07.759ZINFOcrucible: source 2, add dest 1 gen mrl = gen_mismatch
48172023-09-22T23:15:07.759ZINFOcrucible: generation number mismatch 1
48182023-09-22T23:15:07.759ZINFOcrucible: First source client ID for extent 1 mrl = gen_mismatch
48192023-09-22T23:15:07.759ZINFOcrucible: extent:1 gens: 2 1 3 mrl = gen_mismatch
48202023-09-22T23:15:07.759ZINFOcrucible: find dest for source 2 for extent at index 1 mrl = gen_mismatch
48212023-09-22T23:15:07.759ZINFOcrucible: source 2, add dest 0 gen mrl = gen_mismatch
48222023-09-22T23:15:07.759ZINFOcrucible: source 2, add dest 1 gen mrl = gen_mismatch
48232023-09-22T23:15:07.759ZINFOcrucible: generation number mismatch 2
48242023-09-22T23:15:07.759ZINFOcrucible: First source client ID for extent 2 mrl = gen_mismatch
48252023-09-22T23:15:07.759ZINFOcrucible: extent:2 gens: 3 1 3 mrl = gen_mismatch
48262023-09-22T23:15:07.759ZINFOcrucible: extent:2 flush: 1 1 1 scs: [ClientId(0), ClientId(2)] mrl = gen_mismatch
48272023-09-22T23:15:07.759ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = gen_mismatch
48282023-09-22T23:15:07.759ZINFOcrucible: extent:2 dirty: false false false mrl = gen_mismatch
48292023-09-22T23:15:07.759ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = gen_mismatch
48302023-09-22T23:15:07.759ZINFOcrucible: find dest for source 0 for extent at index 2 mrl = gen_mismatch
48312023-09-22T23:15:07.759ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
48322023-09-22T23:15:07.759ZINFOcrucible: generation number mismatch 3
48332023-09-22T23:15:07.759ZINFOcrucible: First source client ID for extent 3 mrl = gen_mismatch
48342023-09-22T23:15:07.759ZINFOcrucible: extent:3 gens: 1 2 3 mrl = gen_mismatch
48352023-09-22T23:15:07.759ZINFOcrucible: find dest for source 2 for extent at index 3 mrl = gen_mismatch
48362023-09-22T23:15:07.759ZINFOcrucible: source 2, add dest 0 gen mrl = gen_mismatch
48372023-09-22T23:15:07.759ZINFOcrucible: source 2, add dest 1 gen mrl = gen_mismatch
48382023-09-22T23:15:07.759ZINFOcrucible: generation number mismatch 4
48392023-09-22T23:15:07.759ZINFOcrucible: First source client ID for extent 4 mrl = gen_mismatch
48402023-09-22T23:15:07.759ZINFOcrucible: extent:4 gens: 2 2 3 mrl = gen_mismatch
48412023-09-22T23:15:07.759ZINFOcrucible: find dest for source 2 for extent at index 4 mrl = gen_mismatch
48422023-09-22T23:15:07.759ZINFOcrucible: source 2, add dest 0 gen mrl = gen_mismatch
48432023-09-22T23:15:07.760ZINFOcrucible: source 2, add dest 1 gen mrl = gen_mismatch
48442023-09-22T23:15:07.760ZINFOcrucible: generation number mismatch 5
48452023-09-22T23:15:07.760ZINFOcrucible: First source client ID for extent 5 mrl = gen_mismatch
48462023-09-22T23:15:07.760ZINFOcrucible: extent:5 gens: 3 2 3 mrl = gen_mismatch
48472023-09-22T23:15:07.760ZINFOcrucible: extent:5 flush: 1 1 1 scs: [ClientId(0), ClientId(2)] mrl = gen_mismatch
48482023-09-22T23:15:07.760ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = gen_mismatch
48492023-09-22T23:15:07.760ZINFOcrucible: extent:5 dirty: false false false mrl = gen_mismatch
48502023-09-22T23:15:07.760ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = gen_mismatch
48512023-09-22T23:15:07.760ZINFOcrucible: find dest for source 0 for extent at index 5 mrl = gen_mismatch
48522023-09-22T23:15:07.760ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
48532023-09-22T23:15:07.760ZINFOcrucible: generation number mismatch 6
48542023-09-22T23:15:07.760ZINFOcrucible: First source client ID for extent 6 mrl = gen_mismatch
48552023-09-22T23:15:07.760ZINFOcrucible: extent:6 gens: 1 3 3 mrl = gen_mismatch
48562023-09-22T23:15:07.760ZINFOcrucible: extent:6 flush: 1 1 1 scs: [ClientId(1), ClientId(2)] mrl = gen_mismatch
48572023-09-22T23:15:07.760ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = gen_mismatch
48582023-09-22T23:15:07.760ZINFOcrucible: extent:6 dirty: false false false mrl = gen_mismatch
48592023-09-22T23:15:07.760ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = gen_mismatch
48602023-09-22T23:15:07.760ZINFOcrucible: find dest for source 1 for extent at index 6 mrl = gen_mismatch
48612023-09-22T23:15:07.760ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
48622023-09-22T23:15:07.760ZINFOcrucible: generation number mismatch 7
48632023-09-22T23:15:07.760ZINFOcrucible: First source client ID for extent 7 mrl = gen_mismatch
48642023-09-22T23:15:07.760ZINFOcrucible: extent:7 gens: 2 3 3 mrl = gen_mismatch
48652023-09-22T23:15:07.760ZINFOcrucible: extent:7 flush: 1 1 1 scs: [ClientId(1), ClientId(2)] mrl = gen_mismatch
48662023-09-22T23:15:07.760ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = gen_mismatch
48672023-09-22T23:15:07.760ZINFOcrucible: extent:7 dirty: false false false mrl = gen_mismatch
48682023-09-22T23:15:07.760ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = gen_mismatch
48692023-09-22T23:15:07.760ZINFOcrucible: find dest for source 1 for extent at index 7 mrl = gen_mismatch
48702023-09-22T23:15:07.760ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
4871 test mend::test::reconcile_gen_c ... ok
4872 test mend::test::reconcile_gen_length_bad - should panic ... ok
48732023-09-22T23:15:07.761ZINFOcrucible: generation number mismatch 0
48742023-09-22T23:15:07.761ZINFOcrucible: First source client ID for extent 0 mrl = gen_mismatch
48752023-09-22T23:15:07.761ZINFOcrucible: extent:0 gens: 9 8 8 mrl = gen_mismatch
48762023-09-22T23:15:07.761ZINFOcrucible: find dest for source 0 for extent at index 0 mrl = gen_mismatch
48772023-09-22T23:15:07.761ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
48782023-09-22T23:15:07.761ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
4879 test mend::test::reconcile_generation_mismatch_c0 ... ok
48802023-09-22T23:15:07.762ZINFOcrucible: generation number mismatch 0
48812023-09-22T23:15:07.762ZINFOcrucible: First source client ID for extent 0 mrl = gen_mismatch
48822023-09-22T23:15:07.762ZINFOcrucible: extent:0 gens: 9 8 9 mrl = gen_mismatch
48832023-09-22T23:15:07.762ZINFOcrucible: extent:0 flush: 2 2 2 scs: [ClientId(0), ClientId(2)] mrl = gen_mismatch
48842023-09-22T23:15:07.762ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = gen_mismatch
48852023-09-22T23:15:07.762ZINFOcrucible: extent:0 dirty: false false false mrl = gen_mismatch
48862023-09-22T23:15:07.762ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = gen_mismatch
48872023-09-22T23:15:07.762ZINFOcrucible: find dest for source 0 for extent at index 0 mrl = gen_mismatch
48882023-09-22T23:15:07.762ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
4889 my ef is: ExtentFix { source: ClientId(0), dest: [ClientId(1)] }
4890 test mend::test::reconcile_generation_mismatch_c1 ... ok
48912023-09-22T23:15:07.762ZINFOcrucible: generation number mismatch 0
48922023-09-22T23:15:07.762ZINFOcrucible: First source client ID for extent 0 mrl = gen_mismatch
48932023-09-22T23:15:07.762ZINFOcrucible: extent:0 gens: 7 8 8 mrl = gen_mismatch
48942023-09-22T23:15:07.762ZINFOcrucible: extent:0 flush: 2 2 2 scs: [ClientId(1), ClientId(2)] mrl = gen_mismatch
48952023-09-22T23:15:07.762ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = gen_mismatch
48962023-09-22T23:15:07.762ZINFOcrucible: extent:0 dirty: false false false mrl = gen_mismatch
48972023-09-22T23:15:07.762ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = gen_mismatch
48982023-09-22T23:15:07.762ZINFOcrucible: find dest for source 1 for extent at index 0 mrl = gen_mismatch
48992023-09-22T23:15:07.762ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
49002023-09-22T23:15:07.762ZINFOcrucible: generation number mismatch 1
49012023-09-22T23:15:07.762ZINFOcrucible: First source client ID for extent 1 mrl = gen_mismatch
49022023-09-22T23:15:07.762ZINFOcrucible: extent:1 gens: 8 9 10 mrl = gen_mismatch
49032023-09-22T23:15:07.762ZINFOcrucible: find dest for source 2 for extent at index 1 mrl = gen_mismatch
49042023-09-22T23:15:07.762ZINFOcrucible: source 2, add dest 0 gen mrl = gen_mismatch
49052023-09-22T23:15:07.762ZINFOcrucible: source 2, add dest 1 gen mrl = gen_mismatch
49062023-09-22T23:15:07.762ZINFOcrucible: generation number mismatch 3
49072023-09-22T23:15:07.763ZINFOcrucible: First source client ID for extent 3 mrl = gen_mismatch
49082023-09-22T23:15:07.763ZINFOcrucible: extent:3 gens: 5 4 3 mrl = gen_mismatch
49092023-09-22T23:15:07.763ZINFOcrucible: find dest for source 0 for extent at index 3 mrl = gen_mismatch
49102023-09-22T23:15:07.763ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
49112023-09-22T23:15:07.763ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
4912 test mend::test::reconcile_generation_mismatch_c2 ... ok
4913 test mend::test::reconcile_length_mismatch - should panic ... ok
49142023-09-22T23:15:07.763ZINFOcrucible: Extent 0 has flush number mismatch
49152023-09-22T23:15:07.764ZINFOcrucible: First source client ID for extent 0 mrl = flush_mismatch
49162023-09-22T23:15:07.764ZINFOcrucible: extent:0 gens: 9 9 8 mrl = flush_mismatch
49172023-09-22T23:15:07.764ZINFOcrucible: extent:0 flush: 1 2 3 scs: [ClientId(0), ClientId(1)] mrl = flush_mismatch
49182023-09-22T23:15:07.764ZINFOcrucible: max_flush now has: [ClientId(1)] mrl = flush_mismatch
49192023-09-22T23:15:07.764ZINFOcrucible: find dest for source 1 for extent at index 0 mrl = flush_mismatch
49202023-09-22T23:15:07.764ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
49212023-09-22T23:15:07.764ZINFOcrucible: source 1, add dest 2 gen mrl = flush_mismatch
49222023-09-22T23:15:07.764ZINFOcrucible: Extent 1 has flush number mismatch
49232023-09-22T23:15:07.764ZINFOcrucible: First source client ID for extent 1 mrl = flush_mismatch
49242023-09-22T23:15:07.764ZINFOcrucible: extent:1 gens: 7 8 8 mrl = flush_mismatch
49252023-09-22T23:15:07.764ZINFOcrucible: extent:1 flush: 1 1 2 scs: [ClientId(1), ClientId(2)] mrl = flush_mismatch
49262023-09-22T23:15:07.764ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
49272023-09-22T23:15:07.764ZINFOcrucible: find dest for source 2 for extent at index 1 mrl = flush_mismatch
49282023-09-22T23:15:07.764ZINFOcrucible: source 2, add dest 0 gen mrl = flush_mismatch
49292023-09-22T23:15:07.764ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
49302023-09-22T23:15:07.764ZINFOcrucible: Extent 2 has flush number mismatch
49312023-09-22T23:15:07.764ZINFOcrucible: First source client ID for extent 2 mrl = flush_mismatch
49322023-09-22T23:15:07.764ZINFOcrucible: extent:2 gens: 7 9 7 mrl = flush_mismatch
49332023-09-22T23:15:07.764ZINFOcrucible: find dest for source 1 for extent at index 2 mrl = flush_mismatch
49342023-09-22T23:15:07.764ZINFOcrucible: source 1, add dest 0 gen mrl = flush_mismatch
49352023-09-22T23:15:07.764ZINFOcrucible: source 1, add dest 2 gen mrl = flush_mismatch
49362023-09-22T23:15:07.764ZINFOcrucible: Extent 3 has flush number mismatch
49372023-09-22T23:15:07.764ZINFOcrucible: First source client ID for extent 3 mrl = flush_mismatch
49382023-09-22T23:15:07.764ZINFOcrucible: extent:3 gens: 7 8 9 mrl = flush_mismatch
49392023-09-22T23:15:07.764ZINFOcrucible: find dest for source 2 for extent at index 3 mrl = flush_mismatch
49402023-09-22T23:15:07.764ZINFOcrucible: source 2, add dest 0 gen mrl = flush_mismatch
49412023-09-22T23:15:07.764ZINFOcrucible: source 2, add dest 1 gen mrl = flush_mismatch
4942 test mend::test::reconcile_multiple_source ... ok
4943 test mend::test::reconcile_one ... ok
49442023-09-22T23:15:07.765ZINFOcrucible: Extents 0 dirty
49452023-09-22T23:15:07.765ZINFOcrucible: First source client ID for extent 0 mrl = dirty
49462023-09-22T23:15:07.765ZINFOcrucible: extent:0 gens: 9 9 9 mrl = dirty
49472023-09-22T23:15:07.765ZINFOcrucible: extent:0 flush: 2 2 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
49482023-09-22T23:15:07.765ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = dirty
49492023-09-22T23:15:07.765ZINFOcrucible: find dest for source 2 for extent at index 0 mrl = dirty
49502023-09-22T23:15:07.765ZINFOcrucible: source 2, add dest 0 flush mrl = dirty
49512023-09-22T23:15:07.765ZINFOcrucible: source 2, add dest 1 flush mrl = dirty
49522023-09-22T23:15:07.765ZINFOcrucible: Extents 2 dirty
49532023-09-22T23:15:07.765ZINFOcrucible: First source client ID for extent 2 mrl = dirty
49542023-09-22T23:15:07.765ZINFOcrucible: extent:2 gens: 7 7 8 mrl = dirty
49552023-09-22T23:15:07.765ZINFOcrucible: find dest for source 2 for extent at index 2 mrl = dirty
49562023-09-22T23:15:07.765ZINFOcrucible: source 2, add dest 0 gen mrl = dirty
49572023-09-22T23:15:07.765ZINFOcrucible: source 2, add dest 1 gen mrl = dirty
49582023-09-22T23:15:07.765ZINFOcrucible: Extents 3 dirty
49592023-09-22T23:15:07.765ZINFOcrucible: First source client ID for extent 3 mrl = dirty
49602023-09-22T23:15:07.765ZINFOcrucible: extent:3 gens: 7 7 7 mrl = dirty
49612023-09-22T23:15:07.765ZINFOcrucible: extent:3 flush: 1 1 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
49622023-09-22T23:15:07.765ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
49632023-09-22T23:15:07.765ZINFOcrucible: extent:3 dirty: true true true mrl = dirty
49642023-09-22T23:15:07.765ZINFOcrucible: find dest for source 0 for extent at index 3 mrl = dirty
49652023-09-22T23:15:07.765ZINFOcrucible: source 0, add dest 1 source flush mrl = dirty
49662023-09-22T23:15:07.765ZINFOcrucible: source 0, add dest 2 source flush mrl = dirty
49672023-09-22T23:15:07.765ZINFOcrucible: generation number mismatch 1
49682023-09-22T23:15:07.765ZINFOcrucible: First source client ID for extent 1 mrl = gen_mismatch
49692023-09-22T23:15:07.765ZINFOcrucible: extent:1 gens: 8 7 8 mrl = gen_mismatch
49702023-09-22T23:15:07.765ZINFOcrucible: extent:1 flush: 1 1 1 scs: [ClientId(0), ClientId(2)] mrl = gen_mismatch
49712023-09-22T23:15:07.765ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = gen_mismatch
49722023-09-22T23:15:07.765ZINFOcrucible: extent:1 dirty: false false false mrl = gen_mismatch
49732023-09-22T23:15:07.766ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = gen_mismatch
49742023-09-22T23:15:07.766ZINFOcrucible: find dest for source 0 for extent at index 1 mrl = gen_mismatch
49752023-09-22T23:15:07.766ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
4976 test mend::test::reconcile_one_of_each ... ok
4977 test mend::test::reconcile_to_repair ... ok
49782023-09-22T23:15:07.767ZINFOcrucible: Crucible stats registered with UUID: 3fdb42c8-a904-4e37-9001-6acceac3fb6c
49792023-09-22T23:15:07.767ZINFOcrucible: Crucible 3fdb42c8-a904-4e37-9001-6acceac3fb6c has session id: ffdd31cb-e7a1-4a7e-81ba-a8273c006d89
49802023-09-22T23:15:07.767ZINFOcrucible: 3fdb42c8-a904-4e37-9001-6acceac3fb6c is now active with session: 6247eff3-a4fd-4994-9758-1b20f41c8bd7
49812023-09-22T23:15:07.767ZWARNcrucible: Decryption failed even though integrity hash matched! = downstairs
49822023-09-22T23:15:07.767ZERROcrucible: Decryption failed with correct hash = downstairs
49832023-09-22T23:15:07.767ZERROcrucible: [0] Reports error DecryptionError on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
4984 test test::up_test::bad_decryption_means_panic ... ok
49852023-09-22T23:15:07.768ZERROcrucible: No match for integrity hash = downstairs
49862023-09-22T23:15:07.768ZERROcrucible: Expected: 0x2710 != Computed: 0x5ce12454c95082b7 = downstairs
49872023-09-22T23:15:07.768ZERROcrucible: [0] Reports error HashMismatch on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
4988 test test::up_test::bad_hash_on_encrypted_read_panic ... ok
49892023-09-22T23:15:07.769ZINFOcrucible: Crucible stats registered with UUID: 4316b2ed-72a6-40cb-983c-23141001e682
49902023-09-22T23:15:07.769ZINFOcrucible: Crucible 4316b2ed-72a6-40cb-983c-23141001e682 has session id: 7b325238-1b67-4ebf-934d-f99ead858efa
49912023-09-22T23:15:07.769ZINFOcrucible: 4316b2ed-72a6-40cb-983c-23141001e682 is now active with session: a45ed002-b2b6-4cb3-bb2d-fc5a888bd623
49922023-09-22T23:15:07.769ZERROcrucible: No match computed hash:0x78fc2d7d9eaf9bbf = downstairs
49932023-09-22T23:15:07.769ZERROcrucible: No match hash:0x2710 = downstairs
49942023-09-22T23:15:07.769ZERROcrucible: Data from hash: = downstairs
49952023-09-22T23:15:07.769ZERROcrucible: [0]:1 = downstairs
49962023-09-22T23:15:07.769ZERROcrucible: [1]:1 = downstairs
49972023-09-22T23:15:07.769ZERROcrucible: [2]:1 = downstairs
49982023-09-22T23:15:07.769ZERROcrucible: [3]:1 = downstairs
49992023-09-22T23:15:07.769ZERROcrucible: [4]:1 = downstairs
50002023-09-22T23:15:07.769ZERROcrucible: [5]:1 = downstairs
50012023-09-22T23:15:07.769ZERROcrucible: [0] Reports error HashMismatch on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
5002 test test::up_test::bad_read_hash_means_panic ... ok
50032023-09-22T23:15:07.770ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
50042023-09-22T23:15:07.770ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: d3548f3c-ca67-4037-ac1f-f010a1fdcf1c
50052023-09-22T23:15:07.770ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 6a72f79d-a3f9-43d9-8241-0377b7bbf6e6
50062023-09-22T23:15:07.770ZINFOcrucible: 00000000-0000-0000-0000-000000000000 set deactivating.
50072023-09-22T23:15:07.770ZINFOcrucible: [0] check deactivate YES
50082023-09-22T23:15:07.770ZINFOcrucible: [0] 00000000-0000-0000-0000-000000000000 (6a72f79d-a3f9-43d9-8241-0377b7bbf6e6) Active Active Active ds_transition to Deactivated
50092023-09-22T23:15:07.770ZINFOcrucible: [0] Transition from Active to Deactivated
50102023-09-22T23:15:07.770ZINFOcrucible: [2] check deactivate YES
50112023-09-22T23:15:07.770ZINFOcrucible: [2] 00000000-0000-0000-0000-000000000000 (6a72f79d-a3f9-43d9-8241-0377b7bbf6e6) Deactivated Active Active ds_transition to Deactivated
50122023-09-22T23:15:07.770ZINFOcrucible: [2] Transition from Active to Deactivated
50132023-09-22T23:15:07.770ZINFOcrucible: [1] deactivate job 1001 not New flush, NO
50142023-09-22T23:15:07.770ZINFOcrucible: deactivate transition checking...
50152023-09-22T23:15:07.770ZINFOcrucible: deactivate_transition Deactivated NO
50162023-09-22T23:15:07.770ZINFOcrucible: deactivate_transition Active NO
50172023-09-22T23:15:07.770ZINFOcrucible: deactivate_transition Deactivated NO
50182023-09-22T23:15:07.770ZINFOcrucible: [1] check deactivate YES
50192023-09-22T23:15:07.770ZINFOcrucible: [1] 00000000-0000-0000-0000-000000000000 (6a72f79d-a3f9-43d9-8241-0377b7bbf6e6) Deactivated Active Deactivated ds_transition to Deactivated
50202023-09-22T23:15:07.770ZINFOcrucible: [1] Transition from Active to Deactivated
50212023-09-22T23:15:07.770ZINFOcrucible: [0] 00000000-0000-0000-0000-000000000000 Gone missing, transition from Deactivated to New
50222023-09-22T23:15:07.770ZINFOcrucible: [1] 00000000-0000-0000-0000-000000000000 Gone missing, transition from Deactivated to New
50232023-09-22T23:15:07.770ZINFOcrucible: [2] 00000000-0000-0000-0000-000000000000 Gone missing, transition from Deactivated to New
50242023-09-22T23:15:07.770ZINFOcrucible: deactivate transition checking...
50252023-09-22T23:15:07.770ZINFOcrucible: deactivate_transition New Maybe
50262023-09-22T23:15:07.770ZINFOcrucible: deactivate_transition New Maybe
50272023-09-22T23:15:07.770ZINFOcrucible: deactivate_transition New Maybe
50282023-09-22T23:15:07.770ZINFOcrucible: All DS in the proper state! -> INIT
5029 test test::up_test::deactivate_after_work_completed_write ... ok
50302023-09-22T23:15:07.771ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
50312023-09-22T23:15:07.771ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 703b59ba-34c1-4615-a19f-cb18d21c8d79
50322023-09-22T23:15:07.771ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: f41142b9-96b6-4b78-b743-60791b147d5c
50332023-09-22T23:15:07.771ZINFOcrucible: 00000000-0000-0000-0000-000000000000 set deactivating.
50342023-09-22T23:15:07.771ZINFOcrucible: [0] check deactivate YES
50352023-09-22T23:15:07.771ZINFOcrucible: [0] 00000000-0000-0000-0000-000000000000 (f41142b9-96b6-4b78-b743-60791b147d5c) Active Active Active ds_transition to Deactivated
50362023-09-22T23:15:07.771ZINFOcrucible: [0] Transition from Active to Deactivated
50372023-09-22T23:15:07.771ZINFOcrucible: [2] check deactivate YES
50382023-09-22T23:15:07.771ZINFOcrucible: [2] 00000000-0000-0000-0000-000000000000 (f41142b9-96b6-4b78-b743-60791b147d5c) Deactivated Active Active ds_transition to Deactivated
50392023-09-22T23:15:07.771ZINFOcrucible: [2] Transition from Active to Deactivated
50402023-09-22T23:15:07.771ZINFOcrucible: [1] deactivate job 1001 not New flush, NO
50412023-09-22T23:15:07.771ZINFOcrucible: deactivate transition checking...
50422023-09-22T23:15:07.771ZINFOcrucible: deactivate_transition Deactivated NO
50432023-09-22T23:15:07.771ZINFOcrucible: deactivate_transition Active NO
50442023-09-22T23:15:07.771ZINFOcrucible: deactivate_transition Deactivated NO
50452023-09-22T23:15:07.771ZINFOcrucible: [1] check deactivate YES
50462023-09-22T23:15:07.771ZINFOcrucible: [1] 00000000-0000-0000-0000-000000000000 (f41142b9-96b6-4b78-b743-60791b147d5c) Deactivated Active Deactivated ds_transition to Deactivated
50472023-09-22T23:15:07.771ZINFOcrucible: [1] Transition from Active to Deactivated
50482023-09-22T23:15:07.771ZINFOcrucible: [0] 00000000-0000-0000-0000-000000000000 Gone missing, transition from Deactivated to New
50492023-09-22T23:15:07.771ZINFOcrucible: [1] 00000000-0000-0000-0000-000000000000 Gone missing, transition from Deactivated to New
50502023-09-22T23:15:07.771ZINFOcrucible: [2] 00000000-0000-0000-0000-000000000000 Gone missing, transition from Deactivated to New
50512023-09-22T23:15:07.771ZINFOcrucible: deactivate transition checking...
50522023-09-22T23:15:07.771ZINFOcrucible: deactivate_transition New Maybe
50532023-09-22T23:15:07.771ZINFOcrucible: deactivate_transition New Maybe
50542023-09-22T23:15:07.772ZINFOcrucible: deactivate_transition New Maybe
50552023-09-22T23:15:07.772ZINFOcrucible: All DS in the proper state! -> INIT
5056 test test::up_test::deactivate_after_work_completed_write_unwritten ... ok
50572023-09-22T23:15:07.772ZINFOcrucible: Crucible stats registered with UUID: 59ffeb50-bbfb-468f-9838-3e246cb7bda2
50582023-09-22T23:15:07.772ZINFOcrucible: Crucible 59ffeb50-bbfb-468f-9838-3e246cb7bda2 has session id: 1a8f88ad-9dba-4528-a293-b04c5c6577b0
50592023-09-22T23:15:07.772ZINFOcrucible: 59ffeb50-bbfb-468f-9838-3e246cb7bda2 is now active with session: b07b796c-5eb3-4144-9382-1c890b42ec03
5060 test test::up_test::deactivate_ds_not_when_active ... ok
50612023-09-22T23:15:07.773ZINFOcrucible: Crucible stats registered with UUID: 2d69d5fd-8689-43e8-959d-eb511f22b975
50622023-09-22T23:15:07.773ZINFOcrucible: Crucible 2d69d5fd-8689-43e8-959d-eb511f22b975 has session id: becd7e65-ee77-4283-b44b-1dba6911cf62
5063 test test::up_test::deactivate_ds_not_when_initializing ... ok
50642023-09-22T23:15:07.773ZINFOcrucible: Crucible stats registered with UUID: c9a38720-6349-48b1-9c95-f354263b69a6
50652023-09-22T23:15:07.773ZINFOcrucible: Crucible c9a38720-6349-48b1-9c95-f354263b69a6 has session id: 7160363e-a509-43fd-8141-6136100bdc5d
50662023-09-22T23:15:07.773ZINFOcrucible: c9a38720-6349-48b1-9c95-f354263b69a6 is now active with session: 5e0e2aa8-bd7e-461a-9cd7-dfaae181db17
50672023-09-22T23:15:07.773ZINFOcrucible: c9a38720-6349-48b1-9c95-f354263b69a6 set deactivating.
5068 test test::up_test::deactivate_not_when_active ... ok
50692023-09-22T23:15:07.774ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
50702023-09-22T23:15:07.774ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: fe363f11-515b-4fe1-aaa4-b9af95aefa5f
50712023-09-22T23:15:07.774ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 2c4c5856-415d-4e76-8f68-945ddb52c496
50722023-09-22T23:15:07.774ZINFOcrucible: 00000000-0000-0000-0000-000000000000 set deactivating.
50732023-09-22T23:15:07.774ZINFOcrucible: [0] deactivate job 1001 not New flush, NO
50742023-09-22T23:15:07.774ZINFOcrucible: [1] deactivate job 1001 not New flush, NO
50752023-09-22T23:15:07.774ZINFOcrucible: [2] deactivate job 1001 not New flush, NO
50762023-09-22T23:15:07.774ZINFOcrucible: deactivate transition checking...
50772023-09-22T23:15:07.774ZINFOcrucible: deactivate_transition Active NO
50782023-09-22T23:15:07.774ZINFOcrucible: deactivate_transition Active NO
50792023-09-22T23:15:07.774ZINFOcrucible: deactivate_transition Active NO
5080 test test::up_test::deactivate_not_without_flush_write ... ok
50812023-09-22T23:15:07.775ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
50822023-09-22T23:15:07.775ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 8a054696-825c-412b-9fb8-8a34ca03e973
50832023-09-22T23:15:07.775ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: de2da2cc-9019-4ae0-bcd5-7491dd6434b3
50842023-09-22T23:15:07.775ZINFOcrucible: 00000000-0000-0000-0000-000000000000 set deactivating.
50852023-09-22T23:15:07.775ZINFOcrucible: [0] deactivate job 1001 not New flush, NO
50862023-09-22T23:15:07.775ZINFOcrucible: [1] deactivate job 1001 not New flush, NO
50872023-09-22T23:15:07.775ZINFOcrucible: [2] deactivate job 1001 not New flush, NO
50882023-09-22T23:15:07.775ZINFOcrucible: deactivate transition checking...
50892023-09-22T23:15:07.775ZINFOcrucible: deactivate_transition Active NO
50902023-09-22T23:15:07.775ZINFOcrucible: deactivate_transition Active NO
50912023-09-22T23:15:07.775ZINFOcrucible: deactivate_transition Active NO
5092 test test::up_test::deactivate_not_without_flush_write_unwritten ... ok
50932023-09-22T23:15:07.776ZINFOcrucible: Crucible stats registered with UUID: ff83e226-dd3f-4e44-be84-d52efdd6fcf2
50942023-09-22T23:15:07.776ZINFOcrucible: Crucible ff83e226-dd3f-4e44-be84-d52efdd6fcf2 has session id: da861e5a-8204-44d7-98bc-bf94c06e4dc2
50952023-09-22T23:15:07.776ZINFOcrucible: ff83e226-dd3f-4e44-be84-d52efdd6fcf2 is now active with session: 3352050a-3e4c-4702-b20b-ff40606866a8
50962023-09-22T23:15:07.776ZINFOcrucible: ff83e226-dd3f-4e44-be84-d52efdd6fcf2 set deactivating.
50972023-09-22T23:15:07.776ZINFOcrucible: [0] deactivate, no work so YES
50982023-09-22T23:15:07.776ZINFOcrucible: [0] ff83e226-dd3f-4e44-be84-d52efdd6fcf2 (3352050a-3e4c-4702-b20b-ff40606866a8) Active Active Active ds_transition to Deactivated
50992023-09-22T23:15:07.776ZINFOcrucible: [0] Transition from Active to Deactivated
51002023-09-22T23:15:07.776ZINFOcrucible: [1] deactivate, no work so YES
51012023-09-22T23:15:07.776ZINFOcrucible: [1] ff83e226-dd3f-4e44-be84-d52efdd6fcf2 (3352050a-3e4c-4702-b20b-ff40606866a8) Deactivated Active Active ds_transition to Deactivated
51022023-09-22T23:15:07.776ZINFOcrucible: [1] Transition from Active to Deactivated
51032023-09-22T23:15:07.776ZINFOcrucible: [2] deactivate, no work so YES
51042023-09-22T23:15:07.776ZINFOcrucible: [2] ff83e226-dd3f-4e44-be84-d52efdd6fcf2 (3352050a-3e4c-4702-b20b-ff40606866a8) Deactivated Deactivated Active ds_transition to Deactivated
51052023-09-22T23:15:07.776ZINFOcrucible: [2] Transition from Active to Deactivated
51062023-09-22T23:15:07.776ZINFOcrucible: [0] ff83e226-dd3f-4e44-be84-d52efdd6fcf2 Gone missing, transition from Deactivated to New
51072023-09-22T23:15:07.776ZINFOcrucible: [1] ff83e226-dd3f-4e44-be84-d52efdd6fcf2 Gone missing, transition from Deactivated to New
51082023-09-22T23:15:07.776ZINFOcrucible: [2] ff83e226-dd3f-4e44-be84-d52efdd6fcf2 Gone missing, transition from Deactivated to New
51092023-09-22T23:15:07.776ZINFOcrucible: deactivate transition checking...
51102023-09-22T23:15:07.776ZINFOcrucible: deactivate_transition New Maybe
51112023-09-22T23:15:07.776ZINFOcrucible: deactivate_transition New Maybe
51122023-09-22T23:15:07.776ZINFOcrucible: deactivate_transition New Maybe
51132023-09-22T23:15:07.776ZINFOcrucible: All DS in the proper state! -> INIT
5114 test test::up_test::deactivate_when_empty ... ok
51152023-09-22T23:15:07.777ZINFOcrucible: Crucible stats registered with UUID: 0dc9bb7c-f00c-46b4-b449-b29dea3c6f4b
51162023-09-22T23:15:07.777ZINFOcrucible: Crucible 0dc9bb7c-f00c-46b4-b449-b29dea3c6f4b has session id: 3d1e08b5-373b-41f1-8d6f-ec024985610c
51172023-09-22T23:15:07.777ZINFOcrucible: [0] 0dc9bb7c-f00c-46b4-b449-b29dea3c6f4b (c37f3bec-904d-4191-822c-88c2c81b84df) New New New ds_transition to WaitQuorum
5118 test test::up_test::downstairs_bad_transition_wq - should panic ... ok
51192023-09-22T23:15:07.777ZINFOcrucible: Crucible stats registered with UUID: ead6ac30-3b89-489b-bdff-eba60a2fafb4
51202023-09-22T23:15:07.777ZINFOcrucible: Crucible ead6ac30-3b89-489b-bdff-eba60a2fafb4 has session id: d7727c15-3078-4863-9753-dfd616730ec7
51212023-09-22T23:15:07.777ZINFOcrucible: [0] ead6ac30-3b89-489b-bdff-eba60a2fafb4 (1dcc2031-1f3f-4663-b1df-92a89ad8f278) New New New ds_transition to WaitActive
51222023-09-22T23:15:07.777ZINFOcrucible: [0] Transition from New to WaitActive
51232023-09-22T23:15:07.777ZINFOcrucible: [0] ead6ac30-3b89-489b-bdff-eba60a2fafb4 (1dcc2031-1f3f-4663-b1df-92a89ad8f278) WaitActive New New ds_transition to WaitQuorum
51242023-09-22T23:15:07.777ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
51252023-09-22T23:15:07.778ZINFOcrucible: [0] ead6ac30-3b89-489b-bdff-eba60a2fafb4 (1dcc2031-1f3f-4663-b1df-92a89ad8f278) WaitQuorum New New ds_transition to Active
51262023-09-22T23:15:07.778ZINFOcrucible: [0] Transition from WaitQuorum to Active
51272023-09-22T23:15:07.778ZINFOcrucible: [0] ead6ac30-3b89-489b-bdff-eba60a2fafb4 (1dcc2031-1f3f-4663-b1df-92a89ad8f278) Active New New ds_transition to Faulted
51282023-09-22T23:15:07.778ZINFOcrucible: [0] Transition from Active to Faulted
5129 test test::up_test::downstairs_transition_active_faulted ... ok
51302023-09-22T23:15:07.778ZINFOcrucible: Crucible stats registered with UUID: ce47081e-609d-4bd9-96e0-3e9627a225cb
51312023-09-22T23:15:07.778ZINFOcrucible: Crucible ce47081e-609d-4bd9-96e0-3e9627a225cb has session id: 84acc2f5-2a0a-48f4-a13b-902ed5bcbfe7
51322023-09-22T23:15:07.778ZINFOcrucible: [0] ce47081e-609d-4bd9-96e0-3e9627a225cb (832eddc2-7471-4a10-b7e5-35c0c66d0269) New New New ds_transition to WaitActive
51332023-09-22T23:15:07.778ZINFOcrucible: [0] Transition from New to WaitActive
51342023-09-22T23:15:07.778ZINFOcrucible: [0] ce47081e-609d-4bd9-96e0-3e9627a225cb (832eddc2-7471-4a10-b7e5-35c0c66d0269) WaitActive New New ds_transition to WaitQuorum
51352023-09-22T23:15:07.778ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
51362023-09-22T23:15:07.778ZINFOcrucible: [0] ce47081e-609d-4bd9-96e0-3e9627a225cb (832eddc2-7471-4a10-b7e5-35c0c66d0269) WaitQuorum New New ds_transition to Active
51372023-09-22T23:15:07.778ZINFOcrucible: [0] Transition from WaitQuorum to Active
51382023-09-22T23:15:07.778ZINFOcrucible: [0] ce47081e-609d-4bd9-96e0-3e9627a225cb (832eddc2-7471-4a10-b7e5-35c0c66d0269) Active New New ds_transition to Faulted
51392023-09-22T23:15:07.778ZINFOcrucible: [0] Transition from Active to Faulted
5140 test test::up_test::downstairs_transition_active_to_faulted ... ok
51412023-09-22T23:15:07.779ZINFOcrucible: Crucible stats registered with UUID: d2b7cfe6-1c6e-4500-9d87-c67ccf233a2d
51422023-09-22T23:15:07.779ZINFOcrucible: Crucible d2b7cfe6-1c6e-4500-9d87-c67ccf233a2d has session id: 473f5a84-3310-446b-8b96-2409bce79bc6
51432023-09-22T23:15:07.779ZINFOcrucible: [0] d2b7cfe6-1c6e-4500-9d87-c67ccf233a2d (23118f30-e980-4710-8f76-049fc3c3b60e) New New New ds_transition to WaitActive
51442023-09-22T23:15:07.779ZINFOcrucible: [0] Transition from New to WaitActive
51452023-09-22T23:15:07.779ZINFOcrucible: [0] d2b7cfe6-1c6e-4500-9d87-c67ccf233a2d (23118f30-e980-4710-8f76-049fc3c3b60e) WaitActive New New ds_transition to WaitQuorum
51462023-09-22T23:15:07.779ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
51472023-09-22T23:15:07.779ZINFOcrucible: [0] d2b7cfe6-1c6e-4500-9d87-c67ccf233a2d (23118f30-e980-4710-8f76-049fc3c3b60e) WaitQuorum New New ds_transition to WaitActive
5148 test test::up_test::downstairs_transition_backwards - should panic ... ok
51492023-09-22T23:15:07.780ZINFOcrucible: Crucible stats registered with UUID: 36f25b90-c77d-4d14-88f7-e35a97e003ec
51502023-09-22T23:15:07.780ZINFOcrucible: Crucible 36f25b90-c77d-4d14-88f7-e35a97e003ec has session id: dc40eb20-2983-4df1-82c4-e5685c017c3d
51512023-09-22T23:15:07.780ZINFOcrucible: [0] 36f25b90-c77d-4d14-88f7-e35a97e003ec (efe31aa6-7ba2-4b21-8776-d62aecd34153) New New New ds_transition to WaitActive
51522023-09-22T23:15:07.780ZINFOcrucible: [0] Transition from New to WaitActive
51532023-09-22T23:15:07.780ZINFOcrucible: [0] 36f25b90-c77d-4d14-88f7-e35a97e003ec (efe31aa6-7ba2-4b21-8776-d62aecd34153) WaitActive New New ds_transition to WaitQuorum
51542023-09-22T23:15:07.780ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
51552023-09-22T23:15:07.780ZINFOcrucible: [0] 36f25b90-c77d-4d14-88f7-e35a97e003ec (efe31aa6-7ba2-4b21-8776-d62aecd34153) WaitQuorum New New ds_transition to Active
51562023-09-22T23:15:07.780ZINFOcrucible: [0] Transition from WaitQuorum to Active
51572023-09-22T23:15:07.780ZINFOcrucible: [0] 36f25b90-c77d-4d14-88f7-e35a97e003ec (efe31aa6-7ba2-4b21-8776-d62aecd34153) Active New New ds_transition to WaitQuorum
5158 test test::up_test::downstairs_transition_bad_active - should panic ... ok
51592023-09-22T23:15:07.780ZINFOcrucible: Crucible stats registered with UUID: 7771bb8d-d0f4-46b1-a44d-c23c12ccdd11
51602023-09-22T23:15:07.780ZINFOcrucible: Crucible 7771bb8d-d0f4-46b1-a44d-c23c12ccdd11 has session id: 143c0113-42bf-445f-be5c-16b5d0122915
51612023-09-22T23:15:07.780ZINFOcrucible: [0] 7771bb8d-d0f4-46b1-a44d-c23c12ccdd11 (cc35b5ef-fbcb-41cd-a045-59af90473172) New New New ds_transition to WaitActive
51622023-09-22T23:15:07.780ZINFOcrucible: [0] Transition from New to WaitActive
51632023-09-22T23:15:07.781ZINFOcrucible: [0] 7771bb8d-d0f4-46b1-a44d-c23c12ccdd11 (cc35b5ef-fbcb-41cd-a045-59af90473172) WaitActive New New ds_transition to WaitQuorum
51642023-09-22T23:15:07.781ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
51652023-09-22T23:15:07.781ZINFOcrucible: [0] 7771bb8d-d0f4-46b1-a44d-c23c12ccdd11 (cc35b5ef-fbcb-41cd-a045-59af90473172) WaitQuorum New New ds_transition to Active
51662023-09-22T23:15:07.781ZINFOcrucible: [0] Transition from WaitQuorum to Active
51672023-09-22T23:15:07.781ZINFOcrucible: [0] 7771bb8d-d0f4-46b1-a44d-c23c12ccdd11 (cc35b5ef-fbcb-41cd-a045-59af90473172) Active New New ds_transition to Offline
51682023-09-22T23:15:07.781ZINFOcrucible: [0] Transition from Active to Offline
51692023-09-22T23:15:07.781ZINFOcrucible: [0] 7771bb8d-d0f4-46b1-a44d-c23c12ccdd11 (cc35b5ef-fbcb-41cd-a045-59af90473172) Offline New New ds_transition to WaitQuorum
5170 test test::up_test::downstairs_transition_bad_offline - should panic ... ok
51712023-09-22T23:15:07.781ZINFOcrucible: Crucible stats registered with UUID: 57c8db34-e48b-4a7d-83f3-8a88fa31a197
51722023-09-22T23:15:07.781ZINFOcrucible: Crucible 57c8db34-e48b-4a7d-83f3-8a88fa31a197 has session id: 9a11e2c8-b8a9-48a3-9432-ed0899849ed4
51732023-09-22T23:15:07.781ZINFOcrucible: [0] 57c8db34-e48b-4a7d-83f3-8a88fa31a197 (a7c8ef31-2f13-4be9-afc2-5125fa5aa8f3) New New New ds_transition to Replay
5174 test test::up_test::downstairs_transition_bad_replay - should panic ... ok
51752023-09-22T23:15:07.782ZINFOcrucible: Crucible stats registered with UUID: f7f80cb8-0fc7-47d1-b713-24a9a37d79c7
51762023-09-22T23:15:07.782ZINFOcrucible: Crucible f7f80cb8-0fc7-47d1-b713-24a9a37d79c7 has session id: 3ef0b24e-68a7-442d-ab20-dfb0ad2f2356
51772023-09-22T23:15:07.782ZINFOcrucible: [0] f7f80cb8-0fc7-47d1-b713-24a9a37d79c7 (31b01c30-322a-49cd-af40-24c12e9ce431) New New New ds_transition to WaitActive
51782023-09-22T23:15:07.782ZINFOcrucible: [0] Transition from New to WaitActive
51792023-09-22T23:15:07.782ZINFOcrucible: [0] f7f80cb8-0fc7-47d1-b713-24a9a37d79c7 (31b01c30-322a-49cd-af40-24c12e9ce431) WaitActive New New ds_transition to WaitQuorum
51802023-09-22T23:15:07.782ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
51812023-09-22T23:15:07.782ZINFOcrucible: [0] f7f80cb8-0fc7-47d1-b713-24a9a37d79c7 (31b01c30-322a-49cd-af40-24c12e9ce431) WaitQuorum New New ds_transition to Active
51822023-09-22T23:15:07.782ZINFOcrucible: [0] Transition from WaitQuorum to Active
51832023-09-22T23:15:07.782ZINFOcrucible: f7f80cb8-0fc7-47d1-b713-24a9a37d79c7 is now active with session: 31b01c30-322a-49cd-af40-24c12e9ce431
51842023-09-22T23:15:07.782ZINFOcrucible: [0] f7f80cb8-0fc7-47d1-b713-24a9a37d79c7 (31b01c30-322a-49cd-af40-24c12e9ce431) Active New New ds_transition to Deactivated
51852023-09-22T23:15:07.782ZINFOcrucible: [0] Transition from Active to Deactivated
51862023-09-22T23:15:07.782ZINFOcrucible: [0] f7f80cb8-0fc7-47d1-b713-24a9a37d79c7 (31b01c30-322a-49cd-af40-24c12e9ce431) Deactivated New New ds_transition to New
51872023-09-22T23:15:07.782ZINFOcrucible: [0] Transition from Deactivated to New
5188 test test::up_test::downstairs_transition_deactivate_new ... ok
51892023-09-22T23:15:07.783ZINFOcrucible: Crucible stats registered with UUID: 86e75c64-dac3-4542-a004-9168c4b81811
51902023-09-22T23:15:07.783ZINFOcrucible: Crucible 86e75c64-dac3-4542-a004-9168c4b81811 has session id: 91a1f387-1762-4a79-a507-3646b8ac025b
51912023-09-22T23:15:07.783ZINFOcrucible: [0] 86e75c64-dac3-4542-a004-9168c4b81811 (3ad53d45-4130-4e32-8e58-d7dad4efdd80) New New New ds_transition to Deactivated
5192 test test::up_test::downstairs_transition_deactivate_not_new - should panic ... ok
51932023-09-22T23:15:07.783ZINFOcrucible: Crucible stats registered with UUID: 6f79ea9c-ea43-40f6-b86a-7c378a6b456e
51942023-09-22T23:15:07.783ZINFOcrucible: Crucible 6f79ea9c-ea43-40f6-b86a-7c378a6b456e has session id: be30719f-ba18-48fa-842d-91f3849e6905
51952023-09-22T23:15:07.783ZINFOcrucible: [0] 6f79ea9c-ea43-40f6-b86a-7c378a6b456e (6959e13a-880e-4f13-914a-db664a6b3a0a) New New New ds_transition to WaitActive
51962023-09-22T23:15:07.783ZINFOcrucible: [0] Transition from New to WaitActive
51972023-09-22T23:15:07.783ZINFOcrucible: [0] 6f79ea9c-ea43-40f6-b86a-7c378a6b456e (6959e13a-880e-4f13-914a-db664a6b3a0a) WaitActive New New ds_transition to Deactivated
5198 test test::up_test::downstairs_transition_deactivate_not_wa - should panic ... ok
51992023-09-22T23:15:07.784ZINFOcrucible: Crucible stats registered with UUID: 44c75964-0e56-4296-94f8-97ceb80595ec
52002023-09-22T23:15:07.784ZINFOcrucible: Crucible 44c75964-0e56-4296-94f8-97ceb80595ec has session id: f3323cf3-fddd-45ec-aba7-f6577a8d0005
52012023-09-22T23:15:07.784ZINFOcrucible: [0] 44c75964-0e56-4296-94f8-97ceb80595ec (faa4d97e-b60a-4cc5-b158-d6ae6c9a634b) New New New ds_transition to WaitActive
52022023-09-22T23:15:07.784ZINFOcrucible: [0] Transition from New to WaitActive
52032023-09-22T23:15:07.784ZINFOcrucible: [0] 44c75964-0e56-4296-94f8-97ceb80595ec (faa4d97e-b60a-4cc5-b158-d6ae6c9a634b) WaitActive New New ds_transition to WaitQuorum
52042023-09-22T23:15:07.784ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
52052023-09-22T23:15:07.784ZINFOcrucible: [0] 44c75964-0e56-4296-94f8-97ceb80595ec (faa4d97e-b60a-4cc5-b158-d6ae6c9a634b) WaitQuorum New New ds_transition to Deactivated
5206 test test::up_test::downstairs_transition_deactivate_not_wq - should panic ... ok
52072023-09-22T23:15:07.785ZINFOcrucible: Crucible stats registered with UUID: 59cafd00-d67f-4bd7-af30-6e9787cb43f7
52082023-09-22T23:15:07.785ZINFOcrucible: Crucible 59cafd00-d67f-4bd7-af30-6e9787cb43f7 has session id: 04121b65-e7e7-4091-ba99-7696e7e4f0c2
52092023-09-22T23:15:07.785ZINFOcrucible: [0] 59cafd00-d67f-4bd7-af30-6e9787cb43f7 (1e7f6f83-82c5-49a5-9834-77da5dc3e264) New New New ds_transition to WaitActive
52102023-09-22T23:15:07.785ZINFOcrucible: [0] Transition from New to WaitActive
52112023-09-22T23:15:07.785ZINFOcrucible: [0] 59cafd00-d67f-4bd7-af30-6e9787cb43f7 (1e7f6f83-82c5-49a5-9834-77da5dc3e264) WaitActive New New ds_transition to WaitQuorum
52122023-09-22T23:15:07.785ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
52132023-09-22T23:15:07.785ZINFOcrucible: [0] 59cafd00-d67f-4bd7-af30-6e9787cb43f7 (1e7f6f83-82c5-49a5-9834-77da5dc3e264) WaitQuorum New New ds_transition to Deactivated
5214 test test::up_test::downstairs_transition_disconnect_no_active - should panic ... ok
52152023-09-22T23:15:07.786ZINFOcrucible: Crucible stats registered with UUID: bfd19920-0b05-47a5-828b-205cd56f5687
52162023-09-22T23:15:07.786ZINFOcrucible: Crucible bfd19920-0b05-47a5-828b-205cd56f5687 has session id: e96c1960-330a-4a85-929e-b1de77ccfa65
52172023-09-22T23:15:07.786ZINFOcrucible: [0] bfd19920-0b05-47a5-828b-205cd56f5687 (f3b84223-32dd-4976-ae7b-aa4c14635261) New New New ds_transition to Offline
5218 test test::up_test::downstairs_transition_no_new_to_offline - should panic ... ok
52192023-09-22T23:15:07.786ZINFOcrucible: Crucible stats registered with UUID: 7abd282f-4431-4a20-95df-54ccb48a0671
52202023-09-22T23:15:07.786ZINFOcrucible: Crucible 7abd282f-4431-4a20-95df-54ccb48a0671 has session id: 686b946e-3de7-4d48-b606-577081f18781
52212023-09-22T23:15:07.786ZINFOcrucible: [0] 7abd282f-4431-4a20-95df-54ccb48a0671 (07b84a96-4838-43f4-bc72-cd6ff41826b6) New New New ds_transition to WaitActive
52222023-09-22T23:15:07.786ZINFOcrucible: [0] Transition from New to WaitActive
52232023-09-22T23:15:07.786ZINFOcrucible: [0] 7abd282f-4431-4a20-95df-54ccb48a0671 (07b84a96-4838-43f4-bc72-cd6ff41826b6) WaitActive New New ds_transition to WaitQuorum
52242023-09-22T23:15:07.786ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
52252023-09-22T23:15:07.786ZINFOcrucible: [0] 7abd282f-4431-4a20-95df-54ccb48a0671 (07b84a96-4838-43f4-bc72-cd6ff41826b6) WaitQuorum New New ds_transition to Active
52262023-09-22T23:15:07.786ZINFOcrucible: [0] Transition from WaitQuorum to Active
5227 test test::up_test::downstairs_transition_normal ... ok
52282023-09-22T23:15:07.787ZINFOcrucible: Crucible stats registered with UUID: 2752c185-b27a-47b3-80bf-ed0f9e6e8a44
52292023-09-22T23:15:07.787ZINFOcrucible: Crucible 2752c185-b27a-47b3-80bf-ed0f9e6e8a44 has session id: 8466f959-c80c-4506-a44e-83ebc36b1ddc
52302023-09-22T23:15:07.787ZINFOcrucible: [0] 2752c185-b27a-47b3-80bf-ed0f9e6e8a44 (f5db4d2d-bf63-4549-8328-92ea47aff629) New New New ds_transition to WaitActive
52312023-09-22T23:15:07.787ZINFOcrucible: [0] Transition from New to WaitActive
52322023-09-22T23:15:07.787ZINFOcrucible: [0] 2752c185-b27a-47b3-80bf-ed0f9e6e8a44 (f5db4d2d-bf63-4549-8328-92ea47aff629) WaitActive New New ds_transition to WaitQuorum
52332023-09-22T23:15:07.787ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
52342023-09-22T23:15:07.787ZINFOcrucible: [0] 2752c185-b27a-47b3-80bf-ed0f9e6e8a44 (f5db4d2d-bf63-4549-8328-92ea47aff629) WaitQuorum New New ds_transition to Active
52352023-09-22T23:15:07.787ZINFOcrucible: [0] Transition from WaitQuorum to Active
52362023-09-22T23:15:07.787ZINFOcrucible: [0] 2752c185-b27a-47b3-80bf-ed0f9e6e8a44 (f5db4d2d-bf63-4549-8328-92ea47aff629) Active New New ds_transition to Offline
52372023-09-22T23:15:07.787ZINFOcrucible: [0] Transition from Active to Offline
52382023-09-22T23:15:07.787ZINFOcrucible: [0] 2752c185-b27a-47b3-80bf-ed0f9e6e8a44 (f5db4d2d-bf63-4549-8328-92ea47aff629) Offline New New ds_transition to Active
5239 test test::up_test::downstairs_transition_offline_no_active - should panic ... ok
52402023-09-22T23:15:07.788ZINFOcrucible: Crucible stats registered with UUID: e8eaa90b-d486-4bbb-8e96-dccd88d40eb6
52412023-09-22T23:15:07.788ZINFOcrucible: Crucible e8eaa90b-d486-4bbb-8e96-dccd88d40eb6 has session id: b7b7d676-a503-4725-abc6-176a68860526
52422023-09-22T23:15:07.788ZINFOcrucible: [0] e8eaa90b-d486-4bbb-8e96-dccd88d40eb6 (23b59aff-5c96-4189-9cb9-518bc8aaa60a) New New New ds_transition to WaitActive
52432023-09-22T23:15:07.788ZINFOcrucible: [0] Transition from New to WaitActive
52442023-09-22T23:15:07.788ZINFOcrucible: [0] e8eaa90b-d486-4bbb-8e96-dccd88d40eb6 (23b59aff-5c96-4189-9cb9-518bc8aaa60a) WaitActive New New ds_transition to WaitQuorum
52452023-09-22T23:15:07.788ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
52462023-09-22T23:15:07.788ZINFOcrucible: e8eaa90b-d486-4bbb-8e96-dccd88d40eb6 is now active with session: 23b59aff-5c96-4189-9cb9-518bc8aaa60a
52472023-09-22T23:15:07.788ZINFOcrucible: [0] e8eaa90b-d486-4bbb-8e96-dccd88d40eb6 (23b59aff-5c96-4189-9cb9-518bc8aaa60a) WaitQuorum New New ds_transition to Active
52482023-09-22T23:15:07.788ZINFOcrucible: [0] Transition from WaitQuorum to Active
52492023-09-22T23:15:07.788ZINFOcrucible: [0] e8eaa90b-d486-4bbb-8e96-dccd88d40eb6 (23b59aff-5c96-4189-9cb9-518bc8aaa60a) Active New New ds_transition to Offline
52502023-09-22T23:15:07.788ZINFOcrucible: [0] Transition from Active to Offline
52512023-09-22T23:15:07.788ZINFOcrucible: [0] e8eaa90b-d486-4bbb-8e96-dccd88d40eb6 (23b59aff-5c96-4189-9cb9-518bc8aaa60a) Offline New New ds_transition to Replay
52522023-09-22T23:15:07.788ZINFOcrucible: [0] Transition from Offline to Replay
5253 test test::up_test::downstairs_transition_replay ... ok
52542023-09-22T23:15:07.789ZINFOcrucible: Crucible stats registered with UUID: 6e5e04a4-4f4b-4d45-b79f-e4d4b7899dc2
52552023-09-22T23:15:07.789ZINFOcrucible: Crucible 6e5e04a4-4f4b-4d45-b79f-e4d4b7899dc2 has session id: 501a0f67-5e4d-4a28-bad0-20d91ec74a06
52562023-09-22T23:15:07.789ZINFOcrucible: [0] 6e5e04a4-4f4b-4d45-b79f-e4d4b7899dc2 (dace7cdf-410a-447a-a581-1976a197f1a8) New New New ds_transition to WaitActive
52572023-09-22T23:15:07.789ZINFOcrucible: [0] Transition from New to WaitActive
52582023-09-22T23:15:07.789ZINFOcrucible: [0] 6e5e04a4-4f4b-4d45-b79f-e4d4b7899dc2 (dace7cdf-410a-447a-a581-1976a197f1a8) WaitActive New New ds_transition to WaitQuorum
52592023-09-22T23:15:07.789ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
52602023-09-22T23:15:07.789ZINFOcrucible: [0] 6e5e04a4-4f4b-4d45-b79f-e4d4b7899dc2 (dace7cdf-410a-447a-a581-1976a197f1a8) WaitQuorum New New ds_transition to Active
52612023-09-22T23:15:07.789ZINFOcrucible: [0] Transition from WaitQuorum to Active
52622023-09-22T23:15:07.789ZINFOcrucible: [0] 6e5e04a4-4f4b-4d45-b79f-e4d4b7899dc2 (dace7cdf-410a-447a-a581-1976a197f1a8) Active New New ds_transition to Active
5263 test test::up_test::downstairs_transition_same_active - should panic ... ok
52642023-09-22T23:15:07.789ZINFOcrucible: Crucible stats registered with UUID: 3e38a6a4-5ad1-4517-9c5a-ee2835e33962
52652023-09-22T23:15:07.789ZINFOcrucible: Crucible 3e38a6a4-5ad1-4517-9c5a-ee2835e33962 has session id: 31fa5b18-879c-403d-a5f4-6e235696282e
52662023-09-22T23:15:07.789ZINFOcrucible: [0] 3e38a6a4-5ad1-4517-9c5a-ee2835e33962 (b372a068-e3b2-4f5c-a122-c7947a92b044) New New New ds_transition to WaitActive
52672023-09-22T23:15:07.789ZINFOcrucible: [0] Transition from New to WaitActive
52682023-09-22T23:15:07.789ZINFOcrucible: [0] 3e38a6a4-5ad1-4517-9c5a-ee2835e33962 (b372a068-e3b2-4f5c-a122-c7947a92b044) WaitActive New New ds_transition to WaitQuorum
52692023-09-22T23:15:07.789ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
52702023-09-22T23:15:07.790ZINFOcrucible: [0] 3e38a6a4-5ad1-4517-9c5a-ee2835e33962 (b372a068-e3b2-4f5c-a122-c7947a92b044) WaitQuorum New New ds_transition to Active
52712023-09-22T23:15:07.790ZINFOcrucible: [0] Transition from WaitQuorum to Active
52722023-09-22T23:15:07.790ZINFOcrucible: [0] 3e38a6a4-5ad1-4517-9c5a-ee2835e33962 (b372a068-e3b2-4f5c-a122-c7947a92b044) Active New New ds_transition to Offline
52732023-09-22T23:15:07.790ZINFOcrucible: [0] Transition from Active to Offline
52742023-09-22T23:15:07.790ZINFOcrucible: [0] 3e38a6a4-5ad1-4517-9c5a-ee2835e33962 (b372a068-e3b2-4f5c-a122-c7947a92b044) Offline New New ds_transition to Offline
5275 test test::up_test::downstairs_transition_same_offline - should panic ... ok
52762023-09-22T23:15:07.790ZINFOcrucible: Crucible stats registered with UUID: 38a9c65e-bedc-4ca2-b716-2583bb874a36
52772023-09-22T23:15:07.790ZINFOcrucible: Crucible 38a9c65e-bedc-4ca2-b716-2583bb874a36 has session id: b134edeb-e702-4f4c-b1f8-c009dbb4c307
52782023-09-22T23:15:07.790ZINFOcrucible: [0] 38a9c65e-bedc-4ca2-b716-2583bb874a36 (c2236e94-8c87-4dfa-8447-5bbef3843952) New New New ds_transition to WaitActive
52792023-09-22T23:15:07.790ZINFOcrucible: [0] Transition from New to WaitActive
52802023-09-22T23:15:07.790ZINFOcrucible: [0] 38a9c65e-bedc-4ca2-b716-2583bb874a36 (c2236e94-8c87-4dfa-8447-5bbef3843952) WaitActive New New ds_transition to WaitActive
5281 test test::up_test::downstairs_transition_same_wa - should panic ... ok
52822023-09-22T23:15:07.791ZINFOcrucible: Crucible stats registered with UUID: 2e0b2db2-d735-47ea-9950-19328964ab66
52832023-09-22T23:15:07.791ZINFOcrucible: Crucible 2e0b2db2-d735-47ea-9950-19328964ab66 has session id: c4172829-b2a6-423c-b24c-512ca4f30acf
52842023-09-22T23:15:07.791ZINFOcrucible: [0] 2e0b2db2-d735-47ea-9950-19328964ab66 (daf5c384-9e01-4130-a91f-ebf2171835bb) New New New ds_transition to WaitActive
52852023-09-22T23:15:07.791ZINFOcrucible: [0] Transition from New to WaitActive
52862023-09-22T23:15:07.791ZINFOcrucible: [0] 2e0b2db2-d735-47ea-9950-19328964ab66 (daf5c384-9e01-4130-a91f-ebf2171835bb) WaitActive New New ds_transition to WaitQuorum
52872023-09-22T23:15:07.791ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
52882023-09-22T23:15:07.791ZINFOcrucible: [0] 2e0b2db2-d735-47ea-9950-19328964ab66 (daf5c384-9e01-4130-a91f-ebf2171835bb) WaitQuorum New New ds_transition to WaitQuorum
5289 test test::up_test::downstairs_transition_same_wq - should panic ... ok
52902023-09-22T23:15:07.792ZINFOcrucible: Crucible stats registered with UUID: 43ece84c-fadd-47ba-bae7-1f003b9b46ee
52912023-09-22T23:15:07.792ZINFOcrucible: Crucible 43ece84c-fadd-47ba-bae7-1f003b9b46ee has session id: 78eb4c19-2f8c-4c5e-9d45-53e542429b06
52922023-09-22T23:15:07.792ZINFOcrucible: [0] 43ece84c-fadd-47ba-bae7-1f003b9b46ee (4863f532-0cf8-4602-bd3c-3435618d09b9) New New New ds_transition to WaitActive
52932023-09-22T23:15:07.792ZINFOcrucible: [0] Transition from New to WaitActive
52942023-09-22T23:15:07.792ZINFOcrucible: [0] 43ece84c-fadd-47ba-bae7-1f003b9b46ee (4863f532-0cf8-4602-bd3c-3435618d09b9) WaitActive New New ds_transition to WaitQuorum
52952023-09-22T23:15:07.792ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
52962023-09-22T23:15:07.792ZINFOcrucible: [0] 43ece84c-fadd-47ba-bae7-1f003b9b46ee (4863f532-0cf8-4602-bd3c-3435618d09b9) WaitQuorum New New ds_transition to Active
52972023-09-22T23:15:07.792ZINFOcrucible: [0] Transition from WaitQuorum to Active
52982023-09-22T23:15:07.792ZINFOcrucible: [1] 43ece84c-fadd-47ba-bae7-1f003b9b46ee (4863f532-0cf8-4602-bd3c-3435618d09b9) Active New New ds_transition to WaitActive
52992023-09-22T23:15:07.792ZINFOcrucible: [1] Transition from New to WaitActive
53002023-09-22T23:15:07.792ZINFOcrucible: [1] 43ece84c-fadd-47ba-bae7-1f003b9b46ee (4863f532-0cf8-4602-bd3c-3435618d09b9) Active WaitActive New ds_transition to WaitQuorum
53012023-09-22T23:15:07.792ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
53022023-09-22T23:15:07.792ZINFOcrucible: [1] 43ece84c-fadd-47ba-bae7-1f003b9b46ee (4863f532-0cf8-4602-bd3c-3435618d09b9) Active WaitQuorum New ds_transition to Active
53032023-09-22T23:15:07.792ZINFOcrucible: [1] Transition from WaitQuorum to Active
53042023-09-22T23:15:07.792ZINFOcrucible: [2] 43ece84c-fadd-47ba-bae7-1f003b9b46ee (4863f532-0cf8-4602-bd3c-3435618d09b9) Active Active New ds_transition to WaitActive
53052023-09-22T23:15:07.792ZINFOcrucible: [2] Transition from New to WaitActive
53062023-09-22T23:15:07.792ZINFOcrucible: [2] 43ece84c-fadd-47ba-bae7-1f003b9b46ee (4863f532-0cf8-4602-bd3c-3435618d09b9) Active Active WaitActive ds_transition to WaitQuorum
53072023-09-22T23:15:07.792ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
53082023-09-22T23:15:07.792ZINFOcrucible: [2] 43ece84c-fadd-47ba-bae7-1f003b9b46ee (4863f532-0cf8-4602-bd3c-3435618d09b9) Active Active WaitQuorum ds_transition to Active
53092023-09-22T23:15:07.792ZINFOcrucible: [2] Transition from WaitQuorum to Active
53102023-09-22T23:15:07.792ZINFOcrucible: 43ece84c-fadd-47ba-bae7-1f003b9b46ee is now active with session: 4863f532-0cf8-4602-bd3c-3435618d09b9
53112023-09-22T23:15:07.792ZINFOcrucible: [0] 43ece84c-fadd-47ba-bae7-1f003b9b46ee (4863f532-0cf8-4602-bd3c-3435618d09b9) Active Active Active ds_transition to Faulted
53122023-09-22T23:15:07.792ZINFOcrucible: [0] Transition from Active to Faulted
5313 test test::up_test::faulted_downstairs_skips_but_still_does_work ... ok
53142023-09-22T23:15:07.793ZINFOcrucible: Crucible stats registered with UUID: a8cb0ee1-4130-4193-8ea1-a0aa4ed74780
53152023-09-22T23:15:07.793ZINFOcrucible: Crucible a8cb0ee1-4130-4193-8ea1-a0aa4ed74780 has session id: 1913e096-ef3f-4674-a90d-74a2a7f0d5dd
53162023-09-22T23:15:07.793ZINFOcrucible: [0] a8cb0ee1-4130-4193-8ea1-a0aa4ed74780 (9ccf2467-2665-45a9-86f3-6dee7a59fac5) New New New ds_transition to WaitActive
53172023-09-22T23:15:07.793ZINFOcrucible: [0] Transition from New to WaitActive
53182023-09-22T23:15:07.793ZINFOcrucible: [0] a8cb0ee1-4130-4193-8ea1-a0aa4ed74780 (9ccf2467-2665-45a9-86f3-6dee7a59fac5) WaitActive New New ds_transition to WaitQuorum
53192023-09-22T23:15:07.793ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
53202023-09-22T23:15:07.793ZINFOcrucible: [0] a8cb0ee1-4130-4193-8ea1-a0aa4ed74780 (9ccf2467-2665-45a9-86f3-6dee7a59fac5) WaitQuorum New New ds_transition to Active
53212023-09-22T23:15:07.793ZINFOcrucible: [0] Transition from WaitQuorum to Active
53222023-09-22T23:15:07.793ZINFOcrucible: [1] a8cb0ee1-4130-4193-8ea1-a0aa4ed74780 (9ccf2467-2665-45a9-86f3-6dee7a59fac5) Active New New ds_transition to WaitActive
53232023-09-22T23:15:07.793ZINFOcrucible: [1] Transition from New to WaitActive
53242023-09-22T23:15:07.793ZINFOcrucible: [1] a8cb0ee1-4130-4193-8ea1-a0aa4ed74780 (9ccf2467-2665-45a9-86f3-6dee7a59fac5) Active WaitActive New ds_transition to WaitQuorum
53252023-09-22T23:15:07.793ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
53262023-09-22T23:15:07.793ZINFOcrucible: [1] a8cb0ee1-4130-4193-8ea1-a0aa4ed74780 (9ccf2467-2665-45a9-86f3-6dee7a59fac5) Active WaitQuorum New ds_transition to Active
53272023-09-22T23:15:07.793ZINFOcrucible: [1] Transition from WaitQuorum to Active
53282023-09-22T23:15:07.793ZINFOcrucible: [2] a8cb0ee1-4130-4193-8ea1-a0aa4ed74780 (9ccf2467-2665-45a9-86f3-6dee7a59fac5) Active Active New ds_transition to WaitActive
53292023-09-22T23:15:07.793ZINFOcrucible: [2] Transition from New to WaitActive
53302023-09-22T23:15:07.793ZINFOcrucible: [2] a8cb0ee1-4130-4193-8ea1-a0aa4ed74780 (9ccf2467-2665-45a9-86f3-6dee7a59fac5) Active Active WaitActive ds_transition to WaitQuorum
53312023-09-22T23:15:07.793ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
53322023-09-22T23:15:07.793ZINFOcrucible: [2] a8cb0ee1-4130-4193-8ea1-a0aa4ed74780 (9ccf2467-2665-45a9-86f3-6dee7a59fac5) Active Active WaitQuorum ds_transition to Active
53332023-09-22T23:15:07.793ZINFOcrucible: [2] Transition from WaitQuorum to Active
53342023-09-22T23:15:07.793ZINFOcrucible: a8cb0ee1-4130-4193-8ea1-a0aa4ed74780 is now active with session: 9ccf2467-2665-45a9-86f3-6dee7a59fac5
53352023-09-22T23:15:07.793ZINFOcrucible: [0] a8cb0ee1-4130-4193-8ea1-a0aa4ed74780 (9ccf2467-2665-45a9-86f3-6dee7a59fac5) Active Active Active ds_transition to Faulted
53362023-09-22T23:15:07.793ZINFOcrucible: [0] Transition from Active to Faulted
5337 test test::up_test::faulted_downstairs_skips_work ... ok
53382023-09-22T23:15:07.794ZINFOcrucible: Crucible stats registered with UUID: 3dd83520-4b07-4cfa-9bd0-add451f7ea4c
53392023-09-22T23:15:07.794ZINFOcrucible: Crucible 3dd83520-4b07-4cfa-9bd0-add451f7ea4c has session id: 24c704a8-cffe-4af2-8011-77aa3d444870
53402023-09-22T23:15:07.794ZINFOcrucible: 3dd83520-4b07-4cfa-9bd0-add451f7ea4c is now active with session: 329df8b1-93de-44e0-b9b2-22eb23edd7b6
53412023-09-22T23:15:07.794ZINFOcrucible: [0] 3dd83520-4b07-4cfa-9bd0-add451f7ea4c (329df8b1-93de-44e0-b9b2-22eb23edd7b6) New New New ds_transition to WaitActive
53422023-09-22T23:15:07.794ZINFOcrucible: [0] Transition from New to WaitActive
53432023-09-22T23:15:07.794ZINFOcrucible: [0] 3dd83520-4b07-4cfa-9bd0-add451f7ea4c (329df8b1-93de-44e0-b9b2-22eb23edd7b6) WaitActive New New ds_transition to WaitQuorum
53442023-09-22T23:15:07.794ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
53452023-09-22T23:15:07.794ZINFOcrucible: [0] 3dd83520-4b07-4cfa-9bd0-add451f7ea4c (329df8b1-93de-44e0-b9b2-22eb23edd7b6) WaitQuorum New New ds_transition to Active
53462023-09-22T23:15:07.794ZINFOcrucible: [0] Transition from WaitQuorum to Active
53472023-09-22T23:15:07.794ZINFOcrucible: [1] 3dd83520-4b07-4cfa-9bd0-add451f7ea4c (329df8b1-93de-44e0-b9b2-22eb23edd7b6) Active New New ds_transition to WaitActive
53482023-09-22T23:15:07.794ZINFOcrucible: [1] Transition from New to WaitActive
53492023-09-22T23:15:07.794ZINFOcrucible: [1] 3dd83520-4b07-4cfa-9bd0-add451f7ea4c (329df8b1-93de-44e0-b9b2-22eb23edd7b6) Active WaitActive New ds_transition to WaitQuorum
53502023-09-22T23:15:07.794ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
53512023-09-22T23:15:07.794ZINFOcrucible: [1] 3dd83520-4b07-4cfa-9bd0-add451f7ea4c (329df8b1-93de-44e0-b9b2-22eb23edd7b6) Active WaitQuorum New ds_transition to Active
53522023-09-22T23:15:07.794ZINFOcrucible: [1] Transition from WaitQuorum to Active
53532023-09-22T23:15:07.794ZINFOcrucible: [2] 3dd83520-4b07-4cfa-9bd0-add451f7ea4c (329df8b1-93de-44e0-b9b2-22eb23edd7b6) Active Active New ds_transition to WaitActive
53542023-09-22T23:15:07.794ZINFOcrucible: [2] Transition from New to WaitActive
53552023-09-22T23:15:07.794ZINFOcrucible: [2] 3dd83520-4b07-4cfa-9bd0-add451f7ea4c (329df8b1-93de-44e0-b9b2-22eb23edd7b6) Active Active WaitActive ds_transition to WaitQuorum
53562023-09-22T23:15:07.794ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
53572023-09-22T23:15:07.794ZINFOcrucible: [2] 3dd83520-4b07-4cfa-9bd0-add451f7ea4c (329df8b1-93de-44e0-b9b2-22eb23edd7b6) Active Active WaitQuorum ds_transition to Active
53582023-09-22T23:15:07.794ZINFOcrucible: [2] Transition from WaitQuorum to Active
53592023-09-22T23:15:07.794ZINFOcrucible: [1] 3dd83520-4b07-4cfa-9bd0-add451f7ea4c (329df8b1-93de-44e0-b9b2-22eb23edd7b6) Active Active Active ds_transition to Faulted
53602023-09-22T23:15:07.794ZINFOcrucible: [1] Transition from Active to Faulted
53612023-09-22T23:15:07.794ZINFOcrucible: [2] 3dd83520-4b07-4cfa-9bd0-add451f7ea4c (329df8b1-93de-44e0-b9b2-22eb23edd7b6) Active Faulted Active ds_transition to Faulted
53622023-09-22T23:15:07.794ZINFOcrucible: [2] Transition from Active to Faulted
5363 test test::up_test::flush_io_double_skip ... ok
53642023-09-22T23:15:07.795ZINFOcrucible: Crucible stats registered with UUID: 64fac11f-725d-4d02-bf9e-048fc18f4c91
53652023-09-22T23:15:07.795ZINFOcrucible: Crucible 64fac11f-725d-4d02-bf9e-048fc18f4c91 has session id: e5af5ee5-91a0-49c0-ae2b-b29606b8f641
53662023-09-22T23:15:07.795ZINFOcrucible: 64fac11f-725d-4d02-bf9e-048fc18f4c91 is now active with session: 460cda23-2385-4edd-840e-6c135a560abf
53672023-09-22T23:15:07.795ZINFOcrucible: [0] 64fac11f-725d-4d02-bf9e-048fc18f4c91 (460cda23-2385-4edd-840e-6c135a560abf) New New New ds_transition to WaitActive
53682023-09-22T23:15:07.795ZINFOcrucible: [0] Transition from New to WaitActive
53692023-09-22T23:15:07.795ZINFOcrucible: [0] 64fac11f-725d-4d02-bf9e-048fc18f4c91 (460cda23-2385-4edd-840e-6c135a560abf) WaitActive New New ds_transition to WaitQuorum
53702023-09-22T23:15:07.795ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
53712023-09-22T23:15:07.795ZINFOcrucible: [0] 64fac11f-725d-4d02-bf9e-048fc18f4c91 (460cda23-2385-4edd-840e-6c135a560abf) WaitQuorum New New ds_transition to Active
53722023-09-22T23:15:07.795ZINFOcrucible: [0] Transition from WaitQuorum to Active
53732023-09-22T23:15:07.795ZINFOcrucible: [1] 64fac11f-725d-4d02-bf9e-048fc18f4c91 (460cda23-2385-4edd-840e-6c135a560abf) Active New New ds_transition to WaitActive
53742023-09-22T23:15:07.795ZINFOcrucible: [1] Transition from New to WaitActive
53752023-09-22T23:15:07.795ZINFOcrucible: [1] 64fac11f-725d-4d02-bf9e-048fc18f4c91 (460cda23-2385-4edd-840e-6c135a560abf) Active WaitActive New ds_transition to WaitQuorum
53762023-09-22T23:15:07.795ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
53772023-09-22T23:15:07.795ZINFOcrucible: [1] 64fac11f-725d-4d02-bf9e-048fc18f4c91 (460cda23-2385-4edd-840e-6c135a560abf) Active WaitQuorum New ds_transition to Active
53782023-09-22T23:15:07.795ZINFOcrucible: [1] Transition from WaitQuorum to Active
53792023-09-22T23:15:07.795ZINFOcrucible: [2] 64fac11f-725d-4d02-bf9e-048fc18f4c91 (460cda23-2385-4edd-840e-6c135a560abf) Active Active New ds_transition to WaitActive
53802023-09-22T23:15:07.795ZINFOcrucible: [2] Transition from New to WaitActive
53812023-09-22T23:15:07.795ZINFOcrucible: [2] 64fac11f-725d-4d02-bf9e-048fc18f4c91 (460cda23-2385-4edd-840e-6c135a560abf) Active Active WaitActive ds_transition to WaitQuorum
53822023-09-22T23:15:07.795ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
53832023-09-22T23:15:07.795ZINFOcrucible: [2] 64fac11f-725d-4d02-bf9e-048fc18f4c91 (460cda23-2385-4edd-840e-6c135a560abf) Active Active WaitQuorum ds_transition to Active
53842023-09-22T23:15:07.795ZINFOcrucible: [2] Transition from WaitQuorum to Active
53852023-09-22T23:15:07.795ZINFOcrucible: [0] 64fac11f-725d-4d02-bf9e-048fc18f4c91 (460cda23-2385-4edd-840e-6c135a560abf) Active Active Active ds_transition to Faulted
53862023-09-22T23:15:07.795ZINFOcrucible: [0] Transition from Active to Faulted
53872023-09-22T23:15:07.795ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1010, DownstairsIO { ds_id: JobId(1010), guest_id: 19, work: Flush { dependencies: [], flush_number: 22, gen_number: 11, snapshot_details: None, extent_limit: None }, state: ClientData([Skipped, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
53882023-09-22T23:15:07.795ZERROcrucible: [1] Reports error GenericError("bad") on job 1010, DownstairsIO { ds_id: JobId(1010), guest_id: 19, work: Flush { dependencies: [], flush_number: 22, gen_number: 11, snapshot_details: None, extent_limit: None }, state: ClientData([Skipped, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
5389 test test::up_test::flush_io_fail_and_skip ... ok
53902023-09-22T23:15:07.796ZINFOcrucible: Crucible stats registered with UUID: 90e4abfa-c634-471a-a9d8-50c07c1dff79
53912023-09-22T23:15:07.796ZINFOcrucible: Crucible 90e4abfa-c634-471a-a9d8-50c07c1dff79 has session id: 3fb10cc9-f589-4d77-b07a-c737e26169db
53922023-09-22T23:15:07.796ZINFOcrucible: 90e4abfa-c634-471a-a9d8-50c07c1dff79 is now active with session: e7c36b35-a726-4f02-a6b2-b774d20209eb
53932023-09-22T23:15:07.796ZINFOcrucible: [0] 90e4abfa-c634-471a-a9d8-50c07c1dff79 (e7c36b35-a726-4f02-a6b2-b774d20209eb) New New New ds_transition to WaitActive
53942023-09-22T23:15:07.796ZINFOcrucible: [0] Transition from New to WaitActive
53952023-09-22T23:15:07.796ZINFOcrucible: [0] 90e4abfa-c634-471a-a9d8-50c07c1dff79 (e7c36b35-a726-4f02-a6b2-b774d20209eb) WaitActive New New ds_transition to WaitQuorum
53962023-09-22T23:15:07.796ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
53972023-09-22T23:15:07.796ZINFOcrucible: [0] 90e4abfa-c634-471a-a9d8-50c07c1dff79 (e7c36b35-a726-4f02-a6b2-b774d20209eb) WaitQuorum New New ds_transition to Active
53982023-09-22T23:15:07.796ZINFOcrucible: [0] Transition from WaitQuorum to Active
53992023-09-22T23:15:07.796ZINFOcrucible: [1] 90e4abfa-c634-471a-a9d8-50c07c1dff79 (e7c36b35-a726-4f02-a6b2-b774d20209eb) Active New New ds_transition to WaitActive
54002023-09-22T23:15:07.796ZINFOcrucible: [1] Transition from New to WaitActive
54012023-09-22T23:15:07.796ZINFOcrucible: [1] 90e4abfa-c634-471a-a9d8-50c07c1dff79 (e7c36b35-a726-4f02-a6b2-b774d20209eb) Active WaitActive New ds_transition to WaitQuorum
54022023-09-22T23:15:07.796ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
54032023-09-22T23:15:07.796ZINFOcrucible: [1] 90e4abfa-c634-471a-a9d8-50c07c1dff79 (e7c36b35-a726-4f02-a6b2-b774d20209eb) Active WaitQuorum New ds_transition to Active
54042023-09-22T23:15:07.796ZINFOcrucible: [1] Transition from WaitQuorum to Active
54052023-09-22T23:15:07.796ZINFOcrucible: [2] 90e4abfa-c634-471a-a9d8-50c07c1dff79 (e7c36b35-a726-4f02-a6b2-b774d20209eb) Active Active New ds_transition to WaitActive
54062023-09-22T23:15:07.796ZINFOcrucible: [2] Transition from New to WaitActive
54072023-09-22T23:15:07.796ZINFOcrucible: [2] 90e4abfa-c634-471a-a9d8-50c07c1dff79 (e7c36b35-a726-4f02-a6b2-b774d20209eb) Active Active WaitActive ds_transition to WaitQuorum
54082023-09-22T23:15:07.796ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
54092023-09-22T23:15:07.796ZINFOcrucible: [2] 90e4abfa-c634-471a-a9d8-50c07c1dff79 (e7c36b35-a726-4f02-a6b2-b774d20209eb) Active Active WaitQuorum ds_transition to Active
54102023-09-22T23:15:07.796ZINFOcrucible: [2] Transition from WaitQuorum to Active
54112023-09-22T23:15:07.796ZINFOcrucible: [1] 90e4abfa-c634-471a-a9d8-50c07c1dff79 (e7c36b35-a726-4f02-a6b2-b774d20209eb) Active Active Active ds_transition to Faulted
54122023-09-22T23:15:07.796ZINFOcrucible: [1] Transition from Active to Faulted
5413 test test::up_test::flush_io_single_skip ... ok
54142023-09-22T23:15:07.797ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54152023-09-22T23:15:07.797ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 061eba66-4173-4774-97fd-aadd36dd85d5
5416 test test::up_test::not_right_block_size - should panic ... ok
54172023-09-22T23:15:07.798ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54182023-09-22T23:15:07.798ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 7fefaa67-fc26-4742-b884-35f4bb7d8abb
5419 test test::up_test::off_to_extent_bridge ... ok
54202023-09-22T23:15:07.800ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54212023-09-22T23:15:07.800ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 8248d1ce-196b-4603-b2f3-860a3a1dd925
5422 test test::up_test::off_to_extent_length_almost_too_big ... ok
54232023-09-22T23:15:07.801ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54242023-09-22T23:15:07.801ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 9914ac4f-aa60-463d-8702-ed1f285919e5
5425 test test::up_test::off_to_extent_length_and_offset_almost_too_big ... ok
54262023-09-22T23:15:07.802ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54272023-09-22T23:15:07.802ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: c00d0775-ab92-4694-aebf-86174f45c437
5428 test test::up_test::off_to_extent_length_and_offset_too_big - should panic ... ok
54292023-09-22T23:15:07.802ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54302023-09-22T23:15:07.802ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 595854ca-954b-4521-bebb-1bb6a0b4e9a8
5431 test test::up_test::off_to_extent_length_too_big - should panic ... ok
54322023-09-22T23:15:07.803ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54332023-09-22T23:15:07.803ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: f265562f-d096-464e-8514-7cb7b25a1eb7
5434 test test::up_test::off_to_extent_length_zero ... ok
54352023-09-22T23:15:07.804ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54362023-09-22T23:15:07.804ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 327f36c8-3c7d-4616-a13a-d2ffb802df07
5437 test test::up_test::off_to_extent_one_block ... ok
54382023-09-22T23:15:07.805ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54392023-09-22T23:15:07.805ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 8d094510-cdd3-415e-b451-8328a5ee0443
5440 test test::up_test::off_to_extent_two_blocks ... ok
54412023-09-22T23:15:07.806ZINFOcrucible: Crucible stats registered with UUID: 0a5ceb5e-a244-4fe3-9861-83ccd3099205
54422023-09-22T23:15:07.806ZINFOcrucible: Crucible 0a5ceb5e-a244-4fe3-9861-83ccd3099205 has session id: 1b2a4902-676a-4e1a-87e3-cb0e7c3cbe08
54432023-09-22T23:15:07.806ZINFOcrucible: [0] 0a5ceb5e-a244-4fe3-9861-83ccd3099205 (a60075df-ec91-455d-8fe1-c145ba0f2a55) New New New ds_transition to WaitActive
54442023-09-22T23:15:07.806ZINFOcrucible: [0] Transition from New to WaitActive
54452023-09-22T23:15:07.806ZINFOcrucible: [0] 0a5ceb5e-a244-4fe3-9861-83ccd3099205 (a60075df-ec91-455d-8fe1-c145ba0f2a55) WaitActive New New ds_transition to WaitQuorum
54462023-09-22T23:15:07.806ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
54472023-09-22T23:15:07.806ZINFOcrucible: [0] 0a5ceb5e-a244-4fe3-9861-83ccd3099205 (a60075df-ec91-455d-8fe1-c145ba0f2a55) WaitQuorum New New ds_transition to Active
54482023-09-22T23:15:07.806ZINFOcrucible: [0] Transition from WaitQuorum to Active
54492023-09-22T23:15:07.806ZINFOcrucible: [1] 0a5ceb5e-a244-4fe3-9861-83ccd3099205 (a60075df-ec91-455d-8fe1-c145ba0f2a55) Active New New ds_transition to WaitActive
54502023-09-22T23:15:07.806ZINFOcrucible: [1] Transition from New to WaitActive
54512023-09-22T23:15:07.806ZINFOcrucible: [1] 0a5ceb5e-a244-4fe3-9861-83ccd3099205 (a60075df-ec91-455d-8fe1-c145ba0f2a55) Active WaitActive New ds_transition to WaitQuorum
54522023-09-22T23:15:07.806ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
54532023-09-22T23:15:07.806ZINFOcrucible: [1] 0a5ceb5e-a244-4fe3-9861-83ccd3099205 (a60075df-ec91-455d-8fe1-c145ba0f2a55) Active WaitQuorum New ds_transition to Active
54542023-09-22T23:15:07.806ZINFOcrucible: [1] Transition from WaitQuorum to Active
54552023-09-22T23:15:07.806ZINFOcrucible: [2] 0a5ceb5e-a244-4fe3-9861-83ccd3099205 (a60075df-ec91-455d-8fe1-c145ba0f2a55) Active Active New ds_transition to WaitActive
54562023-09-22T23:15:07.806ZINFOcrucible: [2] Transition from New to WaitActive
54572023-09-22T23:15:07.806ZINFOcrucible: [2] 0a5ceb5e-a244-4fe3-9861-83ccd3099205 (a60075df-ec91-455d-8fe1-c145ba0f2a55) Active Active WaitActive ds_transition to WaitQuorum
54582023-09-22T23:15:07.806ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
54592023-09-22T23:15:07.806ZINFOcrucible: [2] 0a5ceb5e-a244-4fe3-9861-83ccd3099205 (a60075df-ec91-455d-8fe1-c145ba0f2a55) Active Active WaitQuorum ds_transition to Active
54602023-09-22T23:15:07.806ZINFOcrucible: [2] Transition from WaitQuorum to Active
54612023-09-22T23:15:07.806ZINFOcrucible: 0a5ceb5e-a244-4fe3-9861-83ccd3099205 is now active with session: a60075df-ec91-455d-8fe1-c145ba0f2a55
54622023-09-22T23:15:07.806ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
54632023-09-22T23:15:07.806ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
54642023-09-22T23:15:07.806ZINFOcrucible: [0] client skip 1 in process jobs because fault = downstairs
54652023-09-22T23:15:07.806ZINFOcrucible: [0] changed 0 jobs to fault skipped = downstairs
54662023-09-22T23:15:07.806ZINFOcrucible: [0] 0a5ceb5e-a244-4fe3-9861-83ccd3099205 (a60075df-ec91-455d-8fe1-c145ba0f2a55) Active Active Active ds_transition to Faulted
54672023-09-22T23:15:07.806ZINFOcrucible: [0] Transition from Active to Faulted
54682023-09-22T23:15:07.806ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
54692023-09-22T23:15:07.806ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
54702023-09-22T23:15:07.806ZINFOcrucible: [1] client skip 1 in process jobs because fault = downstairs
54712023-09-22T23:15:07.806ZINFOcrucible: [1] changed 0 jobs to fault skipped = downstairs
54722023-09-22T23:15:07.806ZINFOcrucible: [1] 0a5ceb5e-a244-4fe3-9861-83ccd3099205 (a60075df-ec91-455d-8fe1-c145ba0f2a55) Faulted Active Active ds_transition to Faulted
54732023-09-22T23:15:07.806ZINFOcrucible: [1] Transition from Active to Faulted
5474 test test::up_test::read_after_two_write_fail_is_alright ... ok
54752023-09-22T23:15:07.807ZINFOcrucible: Crucible stats registered with UUID: 8bfbf34a-c7f4-4b7f-9fdd-7cec799871e8
54762023-09-22T23:15:07.807ZINFOcrucible: Crucible 8bfbf34a-c7f4-4b7f-9fdd-7cec799871e8 has session id: 6862594b-a5e0-427f-95ca-728133c7c3c4
54772023-09-22T23:15:07.807ZINFOcrucible: [0] 8bfbf34a-c7f4-4b7f-9fdd-7cec799871e8 (5b5e90bd-53e9-47e8-b26d-ccd08acbdeaa) New New New ds_transition to WaitActive
54782023-09-22T23:15:07.807ZINFOcrucible: [0] Transition from New to WaitActive
54792023-09-22T23:15:07.807ZINFOcrucible: [0] 8bfbf34a-c7f4-4b7f-9fdd-7cec799871e8 (5b5e90bd-53e9-47e8-b26d-ccd08acbdeaa) WaitActive New New ds_transition to WaitQuorum
54802023-09-22T23:15:07.807ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
54812023-09-22T23:15:07.807ZINFOcrucible: [0] 8bfbf34a-c7f4-4b7f-9fdd-7cec799871e8 (5b5e90bd-53e9-47e8-b26d-ccd08acbdeaa) WaitQuorum New New ds_transition to Active
54822023-09-22T23:15:07.807ZINFOcrucible: [0] Transition from WaitQuorum to Active
54832023-09-22T23:15:07.807ZINFOcrucible: [1] 8bfbf34a-c7f4-4b7f-9fdd-7cec799871e8 (5b5e90bd-53e9-47e8-b26d-ccd08acbdeaa) Active New New ds_transition to WaitActive
54842023-09-22T23:15:07.807ZINFOcrucible: [1] Transition from New to WaitActive
54852023-09-22T23:15:07.807ZINFOcrucible: [1] 8bfbf34a-c7f4-4b7f-9fdd-7cec799871e8 (5b5e90bd-53e9-47e8-b26d-ccd08acbdeaa) Active WaitActive New ds_transition to WaitQuorum
54862023-09-22T23:15:07.807ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
54872023-09-22T23:15:07.807ZINFOcrucible: [1] 8bfbf34a-c7f4-4b7f-9fdd-7cec799871e8 (5b5e90bd-53e9-47e8-b26d-ccd08acbdeaa) Active WaitQuorum New ds_transition to Active
54882023-09-22T23:15:07.807ZINFOcrucible: [1] Transition from WaitQuorum to Active
54892023-09-22T23:15:07.807ZINFOcrucible: [2] 8bfbf34a-c7f4-4b7f-9fdd-7cec799871e8 (5b5e90bd-53e9-47e8-b26d-ccd08acbdeaa) Active Active New ds_transition to WaitActive
54902023-09-22T23:15:07.807ZINFOcrucible: [2] Transition from New to WaitActive
54912023-09-22T23:15:07.807ZINFOcrucible: [2] 8bfbf34a-c7f4-4b7f-9fdd-7cec799871e8 (5b5e90bd-53e9-47e8-b26d-ccd08acbdeaa) Active Active WaitActive ds_transition to WaitQuorum
54922023-09-22T23:15:07.807ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
54932023-09-22T23:15:07.807ZINFOcrucible: [2] 8bfbf34a-c7f4-4b7f-9fdd-7cec799871e8 (5b5e90bd-53e9-47e8-b26d-ccd08acbdeaa) Active Active WaitQuorum ds_transition to Active
54942023-09-22T23:15:07.807ZINFOcrucible: [2] Transition from WaitQuorum to Active
54952023-09-22T23:15:07.807ZINFOcrucible: 8bfbf34a-c7f4-4b7f-9fdd-7cec799871e8 is now active with session: 5b5e90bd-53e9-47e8-b26d-ccd08acbdeaa
54962023-09-22T23:15:07.807ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
54972023-09-22T23:15:07.807ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
54982023-09-22T23:15:07.807ZINFOcrucible: [0] client skip 1 in process jobs because fault = downstairs
54992023-09-22T23:15:07.807ZINFOcrucible: [0] changed 0 jobs to fault skipped = downstairs
55002023-09-22T23:15:07.808ZINFOcrucible: [0] 8bfbf34a-c7f4-4b7f-9fdd-7cec799871e8 (5b5e90bd-53e9-47e8-b26d-ccd08acbdeaa) Active Active Active ds_transition to Faulted
55012023-09-22T23:15:07.808ZINFOcrucible: [0] Transition from Active to Faulted
5502 test test::up_test::read_after_write_fail_is_alright ... ok
55032023-09-22T23:15:07.808ZINFOcrucible: Crucible stats registered with UUID: 0b89c204-7ea7-4b26-8e17-efa6d769eee7
55042023-09-22T23:15:07.808ZINFOcrucible: Crucible 0b89c204-7ea7-4b26-8e17-efa6d769eee7 has session id: 5d48fae0-e5b7-4078-a58c-af8e75c04681
55052023-09-22T23:15:07.808ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
55062023-09-22T23:15:07.808ZINFOcrucible: [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
55072023-09-22T23:15:07.808ZINFOcrucible: [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, New]) } = downstairs
55082023-09-22T23:15:07.808ZINFOcrucible: [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, InProgress]) } = downstairs
55092023-09-22T23:15:07.808ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
5510 test test::up_test::reconcile_leave_no_job_behind - should panic ... ok
55112023-09-22T23:15:07.809ZINFOcrucible: Crucible stats registered with UUID: bbf965c8-2f32-4092-ad08-341946975de0
55122023-09-22T23:15:07.809ZINFOcrucible: Crucible bbf965c8-2f32-4092-ad08-341946975de0 has session id: e0e46971-08b9-45db-b39e-ed0707a1cef0
55132023-09-22T23:15:07.809ZINFOcrucible: [0] bbf965c8-2f32-4092-ad08-341946975de0 (0be11dd1-6309-4977-a837-3113bf96dbf7) New New New ds_transition to WaitActive
55142023-09-22T23:15:07.809ZINFOcrucible: [0] Transition from New to WaitActive
55152023-09-22T23:15:07.809ZINFOcrucible: [0] bbf965c8-2f32-4092-ad08-341946975de0 (0be11dd1-6309-4977-a837-3113bf96dbf7) WaitActive New New ds_transition to WaitQuorum
55162023-09-22T23:15:07.809ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
55172023-09-22T23:15:07.809ZINFOcrucible: [1] bbf965c8-2f32-4092-ad08-341946975de0 (0be11dd1-6309-4977-a837-3113bf96dbf7) WaitQuorum New New ds_transition to WaitActive
55182023-09-22T23:15:07.809ZINFOcrucible: [1] Transition from New to WaitActive
55192023-09-22T23:15:07.809ZINFOcrucible: [1] bbf965c8-2f32-4092-ad08-341946975de0 (0be11dd1-6309-4977-a837-3113bf96dbf7) WaitQuorum WaitActive New ds_transition to WaitQuorum
55202023-09-22T23:15:07.809ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
55212023-09-22T23:15:07.809ZINFOcrucible: Waiting for 1 more clients to be ready
5522 test test::up_test::reconcile_not_ready ... ok
55232023-09-22T23:15:07.810ZINFOcrucible: Crucible stats registered with UUID: 1096010d-5ed4-4780-9317-bfd7ffb0ff17
55242023-09-22T23:15:07.810ZINFOcrucible: Crucible 1096010d-5ed4-4780-9317-bfd7ffb0ff17 has session id: 6252da48-dccd-4eeb-b3cb-f42ab9e3f698
55252023-09-22T23:15:07.810ZINFOcrucible: Full repair list: {9: ExtentFix { source: ClientId(0), dest: [ClientId(1), ClientId(2)] }} = downstairs
55262023-09-22T23:15:07.810ZINFOcrucible: Task list: [ReconcileIO { id: ReconciliationId(0), op: ExtentFlush { repair_id: ReconciliationId(0), extent_id: 9, client_id: ClientId(0), flush_number: 22, gen_number: 33 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(1), extent_id: 9 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(2), op: ExtentRepair { repair_id: ReconciliationId(2), extent_id: 9, source_client_id: ClientId(0), source_repair_address: 127.0.0.1:801, dest_clients: [ClientId(1), ClientId(2)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(3), op: ExtentReopen { repair_id: ReconciliationId(3), extent_id: 9 }, state: ClientData([New, New, New]) }] = downstairs
5527 test test::up_test::reconcile_rc_to_message ... ok
55282023-09-22T23:15:07.810ZINFOcrucible: Crucible stats registered with UUID: d30f6c89-428e-48b7-877d-8aa2c23da5a2
55292023-09-22T23:15:07.810ZINFOcrucible: Crucible d30f6c89-428e-48b7-877d-8aa2c23da5a2 has session id: a97256fe-a31d-43d6-a5a4-e09fffabd5dc
55302023-09-22T23:15:07.811ZINFOcrucible: Full repair list: {5: ExtentFix { source: ClientId(2), dest: [ClientId(0), ClientId(1)] }} = downstairs
55312023-09-22T23:15:07.811ZINFOcrucible: Task list: [ReconcileIO { id: ReconciliationId(0), op: ExtentFlush { repair_id: ReconciliationId(0), extent_id: 5, client_id: ClientId(2), flush_number: 66, gen_number: 77 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(1), extent_id: 5 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(2), op: ExtentRepair { repair_id: ReconciliationId(2), extent_id: 5, source_client_id: ClientId(2), source_repair_address: 127.0.0.1:803, dest_clients: [ClientId(0), ClientId(1)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(3), op: ExtentReopen { repair_id: ReconciliationId(3), extent_id: 5 }, state: ClientData([New, New, New]) }] = downstairs
5532 test test::up_test::reconcile_rc_to_message_two ... ok
55332023-09-22T23:15:07.811ZINFOcrucible: Crucible stats registered with UUID: c065c25d-9b93-4776-b10a-88edf0f4668d
55342023-09-22T23:15:07.811ZINFOcrucible: Crucible c065c25d-9b93-4776-b10a-88edf0f4668d has session id: b9421b5b-fd28-41ba-96dd-46815b9abce9
55352023-09-22T23:15:07.811ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
5536 test test::up_test::reconcile_rep_done_too_soon - should panic ... ok
55372023-09-22T23:15:07.812ZINFOcrucible: Crucible stats registered with UUID: 84fa6ec6-e5be-4e13-b154-a89c96163be0
55382023-09-22T23:15:07.812ZINFOcrucible: Crucible 84fa6ec6-e5be-4e13-b154-a89c96163be0 has session id: f7795422-1eb4-4bfa-b4c4-e4754e16a099
55392023-09-22T23:15:07.812ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
55402023-09-22T23:15:07.812ZINFOcrucible: [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
55412023-09-22T23:15:07.812ZINFOcrucible: [0] rep_in_progress ignore submitted job ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
5542 test test::up_test::reconcile_rep_in_progress_bad1 - should panic ... ok
55432023-09-22T23:15:07.813ZINFOcrucible: Crucible stats registered with UUID: 2e39358b-3ac9-4955-ab6e-32a34bdd71d8
55442023-09-22T23:15:07.813ZINFOcrucible: Crucible 2e39358b-3ac9-4955-ab6e-32a34bdd71d8 has session id: 8f21b75e-3ea2-47e8-940b-816bc1d88370
5545 test test::up_test::reconcile_rep_in_progress_none ... ok
55462023-09-22T23:15:07.813ZINFOcrucible: Crucible stats registered with UUID: 1f19f65d-2351-4f27-b314-11785e0566df
55472023-09-22T23:15:07.813ZINFOcrucible: Crucible 1f19f65d-2351-4f27-b314-11785e0566df has session id: 64740696-b9f7-489d-96fa-a766864012b5
55482023-09-22T23:15:07.813ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
5549 test test::up_test::reconcile_repair_inprogress_not_done - should panic ... ok
55502023-09-22T23:15:07.814ZINFOcrucible: Crucible stats registered with UUID: 0628d57e-31e0-4662-9521-c7b8bb6fef09
55512023-09-22T23:15:07.814ZINFOcrucible: Crucible 0628d57e-31e0-4662-9521-c7b8bb6fef09 has session id: 7f5cbd16-7b78-44fb-b4d9-571fe0dfbfee
55522023-09-22T23:15:07.814ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
55532023-09-22T23:15:07.814ZINFOcrucible: [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
55542023-09-22T23:15:07.814ZINFOcrucible: [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, New]) } = downstairs
55552023-09-22T23:15:07.814ZINFOcrucible: [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, InProgress]) } = downstairs
55562023-09-22T23:15:07.814ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
55572023-09-22T23:15:07.814ZINFOcrucible: [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
55582023-09-22T23:15:07.814ZINFOcrucible: [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, New]) } = downstairs
55592023-09-22T23:15:07.814ZINFOcrucible: [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, InProgress]) } = downstairs
5560 test test::up_test::reconcile_repair_workflow_1 ... ok
55612023-09-22T23:15:07.815ZINFOcrucible: Crucible stats registered with UUID: d839dc93-0b6c-4d12-8978-7d3523282044
55622023-09-22T23:15:07.815ZINFOcrucible: Crucible d839dc93-0b6c-4d12-8978-7d3523282044 has session id: baaaad4e-f367-461a-af99-145df3770fe2
55632023-09-22T23:15:07.815ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
55642023-09-22T23:15:07.815ZINFOcrucible: [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
55652023-09-22T23:15:07.815ZINFOcrucible: [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, Skipped, InProgress]) } = downstairs
5566 test test::up_test::reconcile_repair_workflow_2 ... ok
55672023-09-22T23:15:07.815ZINFOcrucible: Crucible stats registered with UUID: 342ea606-82b9-4a22-9948-f23af258a77a
55682023-09-22T23:15:07.815ZINFOcrucible: Crucible 342ea606-82b9-4a22-9948-f23af258a77a has session id: 768f12da-4a5e-48b6-9603-65336a434a7a
55692023-09-22T23:15:07.815ZERROcrucible: Mark 0 as FAILED REPAIR = downstairs
55702023-09-22T23:15:07.815ZERROcrucible: Mark 2 as FAILED REPAIR = downstairs
55712023-09-22T23:15:07.815ZINFOcrucible: Clear out existing repair work queue = downstairs
5572 test test::up_test::reconcile_repair_workflow_not_repair ... ok
55732023-09-22T23:15:07.816ZINFOcrucible: Crucible stats registered with UUID: 045bf5a0-f24b-4e5d-8eee-5cd365625bb9
55742023-09-22T23:15:07.816ZINFOcrucible: Crucible 045bf5a0-f24b-4e5d-8eee-5cd365625bb9 has session id: 5deed47f-0e87-489c-b858-8a7052767c4f
55752023-09-22T23:15:07.816ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
55762023-09-22T23:15:07.816ZINFOcrucible: [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
55772023-09-22T23:15:07.816ZINFOcrucible: [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, New]) } = downstairs
55782023-09-22T23:15:07.816ZINFOcrucible: [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, InProgress]) } = downstairs
55792023-09-22T23:15:07.816ZERROcrucible: Mark 0 as FAILED REPAIR = downstairs
55802023-09-22T23:15:07.816ZERROcrucible: Mark 2 as FAILED REPAIR = downstairs
55812023-09-22T23:15:07.816ZINFOcrucible: Clear out existing repair work queue = downstairs
5582 test test::up_test::reconcile_repair_workflow_not_repair_later ... ok
55832023-09-22T23:15:07.817ZINFOcrucible: Crucible stats registered with UUID: dbe048ac-a1fe-4b72-a0aa-f525ae7d3cd2
55842023-09-22T23:15:07.817ZINFOcrucible: Crucible dbe048ac-a1fe-4b72-a0aa-f525ae7d3cd2 has session id: 9bacc61f-055d-4854-8801-b6b9cc396b23
55852023-09-22T23:15:07.817ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
55862023-09-22T23:15:07.817ZINFOcrucible: [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
55872023-09-22T23:15:07.817ZINFOcrucible: [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, New]) } = downstairs
55882023-09-22T23:15:07.817ZINFOcrucible: [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, InProgress]) } = downstairs
5589 test test::up_test::reconcile_repair_workflow_repair_later ... ok
55902023-09-22T23:15:07.817ZINFOcrucible: Crucible stats registered with UUID: ad0ad087-5636-4dbb-9b1f-4e7162aaf473
55912023-09-22T23:15:07.817ZINFOcrucible: Crucible ad0ad087-5636-4dbb-9b1f-4e7162aaf473 has session id: 4a62f8ec-8f60-49a6-971d-73bd97648a14
55922023-09-22T23:15:07.817ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
5593 test test::up_test::reconcile_repair_workflow_too_soon - should panic ... ok
55942023-09-22T23:15:07.818ZINFOcrucible: Crucible stats registered with UUID: 9b0d7727-40bf-4713-94eb-685aef15650d
55952023-09-22T23:15:07.818ZINFOcrucible: Crucible 9b0d7727-40bf-4713-94eb-685aef15650d has session id: 9ae2b501-1c6c-427a-8268-fe90a7916390
55962023-09-22T23:15:07.818ZINFOcrucible: 9b0d7727-40bf-4713-94eb-685aef15650d is now active with session: ba963a29-d324-4d03-b68c-c282aea5ecfc
55972023-09-22T23:15:07.818ZWARNcrucible: [rc] leave job 1000 on the queue when removing 1001 WorkCounts { active: 2, error: 0, skipped: 0, done: 1 } = downstairs
5598 test test::up_test::retire_dont_retire_everything ... ok
5599 test test::up_test::send_io_live_repair_read ... ok
5600 test test::up_test::send_io_live_repair_unwritten_write ... ok
5601 test test::up_test::send_io_live_repair_write ... ok
56022023-09-22T23:15:07.820ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56032023-09-22T23:15:07.820ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 400ccb50-91c9-4748-87ad-eed7797a135d
56042023-09-22T23:15:07.820ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 8026c04b-c42e-4254-90e4-dc5293998b77
5605 test test::up_test::test_deps_all_writes_depend_on_flushes ... ok
56062023-09-22T23:15:07.820ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56072023-09-22T23:15:07.820ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 9c4aeccc-a39e-4f93-9224-ab443220b887
56082023-09-22T23:15:07.821ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 4b13766d-1c35-44bd-97a3-3f7dfb293c0c
5609 test test::up_test::test_deps_big_read_depends_on_little_writes ... ok
56102023-09-22T23:15:07.821ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56112023-09-22T23:15:07.821ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 3e9855cf-470f-49ac-918b-e26876852912
56122023-09-22T23:15:07.821ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 49984a05-6b64-4e70-a1f4-90987c78e84f
5613 test test::up_test::test_deps_big_write_depends_on_little_writes ... ok
56142023-09-22T23:15:07.822ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56152023-09-22T23:15:07.822ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: c2a0fb8f-ef49-4f5a-9a05-8860414a6280
56162023-09-22T23:15:07.822ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 2b55c596-21e4-4a7e-a578-388b3a9bcf1e
5617 test test::up_test::test_deps_depend_on_acked_work ... ok
56182023-09-22T23:15:07.823ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56192023-09-22T23:15:07.823ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 55ee8f87-8d1d-4e94-8894-c052b9b692f5
56202023-09-22T23:15:07.823ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 705a17e5-2505-4c68-a7df-f68f5a16a90c
5621 test test::up_test::test_deps_flushes_depend_on_flushes ... ok
56222023-09-22T23:15:07.824ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56232023-09-22T23:15:07.824ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: f56910f3-efd3-4de7-afaf-7d6accd13d87
56242023-09-22T23:15:07.824ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: f92ee978-1a92-44c5-8d7c-6007c8bb7bb5
5625 test test::up_test::test_deps_flushes_depend_on_flushes_and_all_writes ... ok
56262023-09-22T23:15:07.824ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56272023-09-22T23:15:07.824ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: d22fa24e-acd4-4706-aaaf-9ed18ffa17a2
56282023-09-22T23:15:07.825ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 1d0a3a03-deb8-4faa-94ab-faff91b11b40
5629 test test::up_test::test_deps_little_writes_depend_on_big_write ... ok
56302023-09-22T23:15:07.825ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56312023-09-22T23:15:07.825ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 01425d2c-36fb-4ea3-8623-74aa5d0faa5b
56322023-09-22T23:15:07.825ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 8d3d8f32-5bae-45e2-afa3-e7ea4d82fc31
5633 test test::up_test::test_deps_little_writes_depend_on_big_write_chain ... ok
56342023-09-22T23:15:07.826ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56352023-09-22T23:15:07.826ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: ba898aea-2870-4e77-8cda-df85c7b6ddbe
56362023-09-22T23:15:07.826ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: aaa4205d-523a-4668-a5a0-ebe56f00f290
5637 test test::up_test::test_deps_multi_extent_batman ... ok
56382023-09-22T23:15:07.827ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56392023-09-22T23:15:07.827ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 2f03f070-8e48-4ae1-ba83-3570ef59e3ca
56402023-09-22T23:15:07.827ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 32fb8017-0f69-44f5-bd03-e7b3cc5263a6
5641 test test::up_test::test_deps_multi_extent_there_and_back_again ... ok
56422023-09-22T23:15:07.828ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56432023-09-22T23:15:07.828ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 98c73ec0-8fef-495f-a57b-b497944d25eb
56442023-09-22T23:15:07.828ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: cd9b78ef-6883-42e6-9dc8-4df3da57cea5
5645 test test::up_test::test_deps_multi_extent_write ... ok
56462023-09-22T23:15:07.829ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56472023-09-22T23:15:07.829ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: fe3063a7-85d9-4920-925d-fe47b0464524
56482023-09-22T23:15:07.829ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 0a8341e4-2e95-4877-abda-cd13672f2ec2
5649 test test::up_test::test_deps_multiple_reads_depend_on_write ... ok
56502023-09-22T23:15:07.829ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56512023-09-22T23:15:07.829ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 20ea752f-11ff-45e7-9739-3337d54ea0f8
56522023-09-22T23:15:07.829ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 5fd49c0d-4371-4544-ae12-37b2a0440c29
5653 test test::up_test::test_deps_read_depends_on_flush ... ok
56542023-09-22T23:15:07.830ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56552023-09-22T23:15:07.830ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 48714655-f322-46b9-ac70-e785c3994869
56562023-09-22T23:15:07.830ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 925aa39e-6824-4e72-bc51-9a85b7fa8392
5657 test test::up_test::test_deps_read_depends_on_write ... ok
56582023-09-22T23:15:07.831ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56592023-09-22T23:15:07.831ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 88ebc32d-c873-4e0c-84f5-4d960c35a5bd
56602023-09-22T23:15:07.831ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: cbc5cc40-5f88-41bd-b66e-218569c8df9c
5661 test test::up_test::test_deps_read_no_depend_on_read ... ok
56622023-09-22T23:15:07.832ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56632023-09-22T23:15:07.832ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: d9610d11-d69b-4620-ac5f-9562c46c288b
56642023-09-22T23:15:07.832ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: ca97c4e0-1919-4d0b-aeb4-cf397f4dabe7
5665 test test::up_test::test_deps_read_write_batman ... ok
56662023-09-22T23:15:07.832ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56672023-09-22T23:15:07.832ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: f8b0e600-168e-439a-a50e-8754fdcca0fe
56682023-09-22T23:15:07.832ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 8887bd21-6151-4472-9cb4-7a76ea21ade4
5669 test test::up_test::test_deps_read_write_ladder_1 ... ok
56702023-09-22T23:15:07.833ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56712023-09-22T23:15:07.833ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 095a7024-45f2-4a3f-81ae-92c9a766278f
56722023-09-22T23:15:07.833ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 90d57ae5-1f02-4ffa-b6ed-2c94cb4e3e09
5673 test test::up_test::test_deps_read_write_ladder_2 ... ok
56742023-09-22T23:15:07.834ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56752023-09-22T23:15:07.834ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 82a703c3-f9a8-4153-a21b-e85cbb19ede6
56762023-09-22T23:15:07.834ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: f4e63527-5021-4e21-acf9-1e191852240e
5677 test test::up_test::test_deps_read_write_ladder_3 ... ok
56782023-09-22T23:15:07.835ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56792023-09-22T23:15:07.835ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: b2d843e7-e553-4e4d-b4a7-c971a147dae1
56802023-09-22T23:15:07.835ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 9770c79d-b115-4452-b2b4-e6b6abc0fc27
5681 test test::up_test::test_deps_write_unwrittens_depend_on_read ... ok
56822023-09-22T23:15:07.835ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56832023-09-22T23:15:07.835ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: fd3652d6-b8c8-4cc1-a493-9287e468bbd9
56842023-09-22T23:15:07.836ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 8518cf9a-fb55-4c27-bab2-dc9059d42c26
5685 test test::up_test::test_deps_writes_depend_on_overlapping_writes ... ok
56862023-09-22T23:15:07.836ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56872023-09-22T23:15:07.836ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 13d29be0-8f00-4a8e-9ba9-d45fd3756542
56882023-09-22T23:15:07.836ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: ee73f036-d026-4e56-890e-7f7380d872e4
5689 test test::up_test::test_deps_writes_depend_on_overlapping_writes_and_flushes ... ok
56902023-09-22T23:15:07.837ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56912023-09-22T23:15:07.837ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: a2ff5a1f-78b8-4ea3-ac4d-ec560dad768f
56922023-09-22T23:15:07.837ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 1d090668-176a-496c-9a94-b5de202b9d61
5693 test test::up_test::test_deps_writes_depend_on_overlapping_writes_chain ... ok
56942023-09-22T23:15:07.838ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
56952023-09-22T23:15:07.838ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: ce8c253a-d0c3-40af-aaa3-8598e5065cd3
56962023-09-22T23:15:07.838ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: cb90bcd6-2d66-4b22-8d0b-a666140843f0
5697 test test::up_test::test_deps_writes_depend_on_read ... ok
5698 test test::up_test::test_flush_does_not_consume_bw ... ok
5699 test test::up_test::test_flush_does_not_consume_iops ... ok
57002023-09-22T23:15:07.851ZINFOcrucible: Now move the NoOp job forward
57012023-09-22T23:15:07.851ZINFOcrucible: Now ACK the NoOp job
57022023-09-22T23:15:07.851ZINFOcrucible: Finally, move the ReOpen job forward
57032023-09-22T23:15:07.851ZINFOcrucible: Now ACK the Reopen job
57042023-09-22T23:15:07.851ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
57052023-09-22T23:15:07.851ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
57062023-09-22T23:15:07.851ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
57072023-09-22T23:15:07.851ZWARNcrucible: RE:0 Bailing with error
57082023-09-22T23:15:07.852ZINFOcrucible: Crucible stats registered with UUID: 36f6e1bc-5305-47e3-91bd-fe997b9223d6
57092023-09-22T23:15:07.852ZINFOcrucible: Crucible 36f6e1bc-5305-47e3-91bd-fe997b9223d6 has session id: 3c815966-8e5b-497b-b01e-7b8c2cf11054
57102023-09-22T23:15:07.852ZINFOcrucible: [0] 36f6e1bc-5305-47e3-91bd-fe997b9223d6 (98b45c36-052b-4d2c-8209-f6db7df5fa0e) New New New ds_transition to WaitActive
57112023-09-22T23:15:07.852ZINFOcrucible: [0] Transition from New to WaitActive
57122023-09-22T23:15:07.852ZINFOcrucible: [0] 36f6e1bc-5305-47e3-91bd-fe997b9223d6 (98b45c36-052b-4d2c-8209-f6db7df5fa0e) WaitActive New New ds_transition to WaitQuorum
57132023-09-22T23:15:07.852ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
57142023-09-22T23:15:07.852ZINFOcrucible: [0] 36f6e1bc-5305-47e3-91bd-fe997b9223d6 (98b45c36-052b-4d2c-8209-f6db7df5fa0e) WaitQuorum New New ds_transition to Active
57152023-09-22T23:15:07.852ZINFOcrucible: [0] Transition from WaitQuorum to Active
57162023-09-22T23:15:07.852ZINFOcrucible: [1] 36f6e1bc-5305-47e3-91bd-fe997b9223d6 (98b45c36-052b-4d2c-8209-f6db7df5fa0e) Active New New ds_transition to WaitActive
57172023-09-22T23:15:07.852ZINFOcrucible: [1] Transition from New to WaitActive
57182023-09-22T23:15:07.852ZINFOcrucible: [1] 36f6e1bc-5305-47e3-91bd-fe997b9223d6 (98b45c36-052b-4d2c-8209-f6db7df5fa0e) Active WaitActive New ds_transition to WaitQuorum
57192023-09-22T23:15:07.852ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
57202023-09-22T23:15:07.852ZINFOcrucible: [1] 36f6e1bc-5305-47e3-91bd-fe997b9223d6 (98b45c36-052b-4d2c-8209-f6db7df5fa0e) Active WaitQuorum New ds_transition to Active
57212023-09-22T23:15:07.852ZINFOcrucible: [1] Transition from WaitQuorum to Active
57222023-09-22T23:15:07.852ZINFOcrucible: [2] 36f6e1bc-5305-47e3-91bd-fe997b9223d6 (98b45c36-052b-4d2c-8209-f6db7df5fa0e) Active Active New ds_transition to WaitActive
57232023-09-22T23:15:07.852ZINFOcrucible: [2] Transition from New to WaitActive
57242023-09-22T23:15:07.852ZINFOcrucible: [2] 36f6e1bc-5305-47e3-91bd-fe997b9223d6 (98b45c36-052b-4d2c-8209-f6db7df5fa0e) Active Active WaitActive ds_transition to WaitQuorum
57252023-09-22T23:15:07.852ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
57262023-09-22T23:15:07.852ZINFOcrucible: [2] 36f6e1bc-5305-47e3-91bd-fe997b9223d6 (98b45c36-052b-4d2c-8209-f6db7df5fa0e) Active Active WaitQuorum ds_transition to Active
57272023-09-22T23:15:07.852ZINFOcrucible: [2] Transition from WaitQuorum to Active
57282023-09-22T23:15:07.852ZINFOcrucible: 36f6e1bc-5305-47e3-91bd-fe997b9223d6 is now active with session: 98b45c36-052b-4d2c-8209-f6db7df5fa0e
57292023-09-22T23:15:07.852ZINFOcrucible: [0] 36f6e1bc-5305-47e3-91bd-fe997b9223d6 (98b45c36-052b-4d2c-8209-f6db7df5fa0e) Active Active Active ds_transition to Faulted
57302023-09-22T23:15:07.852ZINFOcrucible: [0] Transition from Active to Faulted
57312023-09-22T23:15:07.852ZINFOcrucible: [0] 36f6e1bc-5305-47e3-91bd-fe997b9223d6 (98b45c36-052b-4d2c-8209-f6db7df5fa0e) Faulted Active Active ds_transition to LiveRepairReady
57322023-09-22T23:15:07.852ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
57332023-09-22T23:15:07.852ZINFOcrucible: [0] 36f6e1bc-5305-47e3-91bd-fe997b9223d6 (98b45c36-052b-4d2c-8209-f6db7df5fa0e) LiveRepairReady Active Active ds_transition to LiveRepair
57342023-09-22T23:15:07.853ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
57352023-09-22T23:15:07.853ZINFOcrucible: Waiting for Close + ReOpen jobs
57362023-09-22T23:15:07.853ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
57372023-09-22T23:15:07.853ZINFOcrucible: RE:0 close id:1000 queued, notify DS
57382023-09-22T23:15:07.853ZINFOcrucible: RE:0 Wait for result from close command 1000:1
5739 test test::up_test::test_impossible_io ... ok
5740 test test::up_test::test_iop_and_bw_limit ... ok
5741 test test::up_test::test_iospan ... ok
5742 test test::up_test::test_iospan_buffer_read_write ... ok
5743 test test::up_test::test_no_iop_limit ... ok
57442023-09-22T23:15:07.889ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
57452023-09-22T23:15:07.889ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 35fe92bd-7d43-46d8-8864-a73306208500
57462023-09-22T23:15:07.889ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: ae6fec1e-970f-40b4-a78f-9ce644a4f5fa
5747 test test::up_test::test_read_flush_write_hash_mismatch ... ok
5748 test test::up_test::test_set_bw_limit ... ok
5749 test test::up_test::test_set_iop_limit ... ok
5750 test test::up_test::test_upstairs_encryption_context_ok ... ok
5751 test test::up_test::test_upstairs_encryption_context_wrong_nonce ... ok
5752 test test::up_test::test_upstairs_encryption_context_wrong_tag ... ok
5753 test test::up_test::test_upstairs_validate_encrypted_read_response ... ok
5754 test test::up_test::test_upstairs_validate_encrypted_read_response_blank_block ... ok
5755 test test::up_test::test_upstairs_validate_encrypted_read_response_multiple_contexts ... ok
5756 test test::up_test::test_upstairs_validate_unencrypted_read_response ... ok
5757 test test::up_test::test_upstairs_validate_unencrypted_read_response_blank_block ... ok
5758 test test::up_test::test_upstairs_validate_unencrypted_read_response_multiple_contexts ... ok
5759 test test::up_test::test_upstairs_validate_unencrypted_read_response_multiple_hashes ... ok
57602023-09-22T23:15:07.896ZINFOcrucible: Crucible stats registered with UUID: ed279642-cd12-4875-9d22-1d8bffc528d0
57612023-09-22T23:15:07.896ZINFOcrucible: Crucible ed279642-cd12-4875-9d22-1d8bffc528d0 has session id: 9c84cecd-63ea-4c2d-97a9-cb6429ea0ae1
57622023-09-22T23:15:07.896ZINFOcrucible: [0] ed279642-cd12-4875-9d22-1d8bffc528d0 (459b81d2-7ba4-4d09-bbf5-23f5147c2925) New New New ds_transition to WaitActive
57632023-09-22T23:15:07.896ZINFOcrucible: [0] Transition from New to WaitActive
57642023-09-22T23:15:07.896ZINFOcrucible: [0] ed279642-cd12-4875-9d22-1d8bffc528d0 (459b81d2-7ba4-4d09-bbf5-23f5147c2925) WaitActive New New ds_transition to WaitQuorum
57652023-09-22T23:15:07.896ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
57662023-09-22T23:15:07.896ZINFOcrucible: [0] ed279642-cd12-4875-9d22-1d8bffc528d0 (459b81d2-7ba4-4d09-bbf5-23f5147c2925) WaitQuorum New New ds_transition to Active
57672023-09-22T23:15:07.896ZINFOcrucible: [0] Transition from WaitQuorum to Active
57682023-09-22T23:15:07.896ZINFOcrucible: [1] ed279642-cd12-4875-9d22-1d8bffc528d0 (459b81d2-7ba4-4d09-bbf5-23f5147c2925) Active New New ds_transition to WaitActive
57692023-09-22T23:15:07.896ZINFOcrucible: [1] Transition from New to WaitActive
57702023-09-22T23:15:07.896ZINFOcrucible: [1] ed279642-cd12-4875-9d22-1d8bffc528d0 (459b81d2-7ba4-4d09-bbf5-23f5147c2925) Active WaitActive New ds_transition to WaitQuorum
57712023-09-22T23:15:07.896ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
57722023-09-22T23:15:07.896ZINFOcrucible: [1] ed279642-cd12-4875-9d22-1d8bffc528d0 (459b81d2-7ba4-4d09-bbf5-23f5147c2925) Active WaitQuorum New ds_transition to Active
57732023-09-22T23:15:07.897ZINFOcrucible: [1] Transition from WaitQuorum to Active
57742023-09-22T23:15:07.897ZINFOcrucible: [2] ed279642-cd12-4875-9d22-1d8bffc528d0 (459b81d2-7ba4-4d09-bbf5-23f5147c2925) Active Active New ds_transition to WaitActive
57752023-09-22T23:15:07.897ZINFOcrucible: [2] Transition from New to WaitActive
57762023-09-22T23:15:07.897ZINFOcrucible: [2] ed279642-cd12-4875-9d22-1d8bffc528d0 (459b81d2-7ba4-4d09-bbf5-23f5147c2925) Active Active WaitActive ds_transition to WaitQuorum
57772023-09-22T23:15:07.897ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
57782023-09-22T23:15:07.897ZINFOcrucible: [2] ed279642-cd12-4875-9d22-1d8bffc528d0 (459b81d2-7ba4-4d09-bbf5-23f5147c2925) Active Active WaitQuorum ds_transition to Active
57792023-09-22T23:15:07.897ZINFOcrucible: [2] Transition from WaitQuorum to Active
57802023-09-22T23:15:07.897ZINFOcrucible: ed279642-cd12-4875-9d22-1d8bffc528d0 is now active with session: 459b81d2-7ba4-4d09-bbf5-23f5147c2925
57812023-09-22T23:15:07.897ZINFOcrucible: [0] ed279642-cd12-4875-9d22-1d8bffc528d0 (459b81d2-7ba4-4d09-bbf5-23f5147c2925) Active Active Active ds_transition to Faulted
57822023-09-22T23:15:07.897ZINFOcrucible: [0] Transition from Active to Faulted
57832023-09-22T23:15:07.897ZINFOcrucible: [1] ed279642-cd12-4875-9d22-1d8bffc528d0 (459b81d2-7ba4-4d09-bbf5-23f5147c2925) Faulted Active Active ds_transition to Faulted
57842023-09-22T23:15:07.897ZINFOcrucible: [1] Transition from Active to Faulted
57852023-09-22T23:15:07.897ZINFOcrucible: [2] ed279642-cd12-4875-9d22-1d8bffc528d0 (459b81d2-7ba4-4d09-bbf5-23f5147c2925) Faulted Faulted Active ds_transition to Faulted
57862023-09-22T23:15:07.897ZINFOcrucible: [2] Transition from Active to Faulted
57872023-09-22T23:15:07.897ZWARNcrucible: job 1000 skipped on all downstairs = downstairs
57882023-09-22T23:15:07.897ZINFOcrucible: Enqueue job 1000 goes straight to AckReady = downstairs
5789 test test::up_test::three_faulted_enqueue_will_handle_flush ... ok
57902023-09-22T23:15:07.897ZINFOcrucible: Crucible stats registered with UUID: 15d8cd85-3d92-4a9e-b443-604c8bfa1f28
57912023-09-22T23:15:07.897ZINFOcrucible: Crucible 15d8cd85-3d92-4a9e-b443-604c8bfa1f28 has session id: dfa7edc9-137c-4e89-8b6a-42c071f07e00
57922023-09-22T23:15:07.897ZINFOcrucible: [0] 15d8cd85-3d92-4a9e-b443-604c8bfa1f28 (ab77762c-1887-4181-bbb2-afc7bf0d173c) New New New ds_transition to WaitActive
57932023-09-22T23:15:07.897ZINFOcrucible: [0] Transition from New to WaitActive
57942023-09-22T23:15:07.897ZINFOcrucible: [0] 15d8cd85-3d92-4a9e-b443-604c8bfa1f28 (ab77762c-1887-4181-bbb2-afc7bf0d173c) WaitActive New New ds_transition to WaitQuorum
57952023-09-22T23:15:07.897ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
57962023-09-22T23:15:07.898ZINFOcrucible: [0] 15d8cd85-3d92-4a9e-b443-604c8bfa1f28 (ab77762c-1887-4181-bbb2-afc7bf0d173c) WaitQuorum New New ds_transition to Active
57972023-09-22T23:15:07.898ZINFOcrucible: [0] Transition from WaitQuorum to Active
57982023-09-22T23:15:07.898ZINFOcrucible: [1] 15d8cd85-3d92-4a9e-b443-604c8bfa1f28 (ab77762c-1887-4181-bbb2-afc7bf0d173c) Active New New ds_transition to WaitActive
57992023-09-22T23:15:07.898ZINFOcrucible: [1] Transition from New to WaitActive
58002023-09-22T23:15:07.898ZINFOcrucible: [1] 15d8cd85-3d92-4a9e-b443-604c8bfa1f28 (ab77762c-1887-4181-bbb2-afc7bf0d173c) Active WaitActive New ds_transition to WaitQuorum
58012023-09-22T23:15:07.898ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
58022023-09-22T23:15:07.898ZINFOcrucible: [1] 15d8cd85-3d92-4a9e-b443-604c8bfa1f28 (ab77762c-1887-4181-bbb2-afc7bf0d173c) Active WaitQuorum New ds_transition to Active
58032023-09-22T23:15:07.898ZINFOcrucible: [1] Transition from WaitQuorum to Active
58042023-09-22T23:15:07.898ZINFOcrucible: [2] 15d8cd85-3d92-4a9e-b443-604c8bfa1f28 (ab77762c-1887-4181-bbb2-afc7bf0d173c) Active Active New ds_transition to WaitActive
58052023-09-22T23:15:07.898ZINFOcrucible: [2] Transition from New to WaitActive
58062023-09-22T23:15:07.898ZINFOcrucible: [2] 15d8cd85-3d92-4a9e-b443-604c8bfa1f28 (ab77762c-1887-4181-bbb2-afc7bf0d173c) Active Active WaitActive ds_transition to WaitQuorum
58072023-09-22T23:15:07.898ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
58082023-09-22T23:15:07.898ZINFOcrucible: [2] 15d8cd85-3d92-4a9e-b443-604c8bfa1f28 (ab77762c-1887-4181-bbb2-afc7bf0d173c) Active Active WaitQuorum ds_transition to Active
58092023-09-22T23:15:07.898ZINFOcrucible: [2] Transition from WaitQuorum to Active
58102023-09-22T23:15:07.898ZINFOcrucible: 15d8cd85-3d92-4a9e-b443-604c8bfa1f28 is now active with session: ab77762c-1887-4181-bbb2-afc7bf0d173c
58112023-09-22T23:15:07.898ZINFOcrucible: [0] 15d8cd85-3d92-4a9e-b443-604c8bfa1f28 (ab77762c-1887-4181-bbb2-afc7bf0d173c) Active Active Active ds_transition to Faulted
58122023-09-22T23:15:07.898ZINFOcrucible: [0] Transition from Active to Faulted
58132023-09-22T23:15:07.898ZINFOcrucible: [1] 15d8cd85-3d92-4a9e-b443-604c8bfa1f28 (ab77762c-1887-4181-bbb2-afc7bf0d173c) Faulted Active Active ds_transition to Faulted
58142023-09-22T23:15:07.898ZINFOcrucible: [1] Transition from Active to Faulted
58152023-09-22T23:15:07.898ZINFOcrucible: [2] 15d8cd85-3d92-4a9e-b443-604c8bfa1f28 (ab77762c-1887-4181-bbb2-afc7bf0d173c) Faulted Faulted Active ds_transition to Faulted
58162023-09-22T23:15:07.898ZINFOcrucible: [2] Transition from Active to Faulted
58172023-09-22T23:15:07.898ZWARNcrucible: job 1000 skipped on all downstairs = downstairs
58182023-09-22T23:15:07.898ZINFOcrucible: Enqueue job 1000 goes straight to AckReady = downstairs
58192023-09-22T23:15:07.898ZWARNcrucible: job 1001 skipped on all downstairs = downstairs
58202023-09-22T23:15:07.898ZINFOcrucible: Enqueue job 1001 goes straight to AckReady = downstairs
58212023-09-22T23:15:07.898ZWARNcrucible: job 1002 skipped on all downstairs = downstairs
58222023-09-22T23:15:07.898ZINFOcrucible: Enqueue job 1002 goes straight to AckReady = downstairs
5823 test test::up_test::three_faulted_enqueue_will_handle_many_ios ... ok
58242023-09-22T23:15:07.899ZINFOcrucible: Crucible stats registered with UUID: 34ba4809-324b-4e27-ac13-29098c033c38
58252023-09-22T23:15:07.899ZINFOcrucible: Crucible 34ba4809-324b-4e27-ac13-29098c033c38 has session id: 922d968f-9aae-41ed-a2f1-fcb4c980fc40
58262023-09-22T23:15:07.899ZINFOcrucible: [0] 34ba4809-324b-4e27-ac13-29098c033c38 (75ce574e-2900-4bdd-93ad-a65d388a02a5) New New New ds_transition to WaitActive
58272023-09-22T23:15:07.899ZINFOcrucible: [0] Transition from New to WaitActive
58282023-09-22T23:15:07.899ZINFOcrucible: [0] 34ba4809-324b-4e27-ac13-29098c033c38 (75ce574e-2900-4bdd-93ad-a65d388a02a5) WaitActive New New ds_transition to WaitQuorum
58292023-09-22T23:15:07.899ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
58302023-09-22T23:15:07.899ZINFOcrucible: [0] 34ba4809-324b-4e27-ac13-29098c033c38 (75ce574e-2900-4bdd-93ad-a65d388a02a5) WaitQuorum New New ds_transition to Active
58312023-09-22T23:15:07.899ZINFOcrucible: [0] Transition from WaitQuorum to Active
58322023-09-22T23:15:07.899ZINFOcrucible: [1] 34ba4809-324b-4e27-ac13-29098c033c38 (75ce574e-2900-4bdd-93ad-a65d388a02a5) Active New New ds_transition to WaitActive
58332023-09-22T23:15:07.899ZINFOcrucible: [1] Transition from New to WaitActive
58342023-09-22T23:15:07.899ZINFOcrucible: [1] 34ba4809-324b-4e27-ac13-29098c033c38 (75ce574e-2900-4bdd-93ad-a65d388a02a5) Active WaitActive New ds_transition to WaitQuorum
58352023-09-22T23:15:07.899ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
58362023-09-22T23:15:07.899ZINFOcrucible: [1] 34ba4809-324b-4e27-ac13-29098c033c38 (75ce574e-2900-4bdd-93ad-a65d388a02a5) Active WaitQuorum New ds_transition to Active
58372023-09-22T23:15:07.899ZINFOcrucible: [1] Transition from WaitQuorum to Active
58382023-09-22T23:15:07.899ZINFOcrucible: [2] 34ba4809-324b-4e27-ac13-29098c033c38 (75ce574e-2900-4bdd-93ad-a65d388a02a5) Active Active New ds_transition to WaitActive
58392023-09-22T23:15:07.899ZINFOcrucible: [2] Transition from New to WaitActive
58402023-09-22T23:15:07.899ZINFOcrucible: [2] 34ba4809-324b-4e27-ac13-29098c033c38 (75ce574e-2900-4bdd-93ad-a65d388a02a5) Active Active WaitActive ds_transition to WaitQuorum
58412023-09-22T23:15:07.899ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
58422023-09-22T23:15:07.899ZINFOcrucible: [2] 34ba4809-324b-4e27-ac13-29098c033c38 (75ce574e-2900-4bdd-93ad-a65d388a02a5) Active Active WaitQuorum ds_transition to Active
58432023-09-22T23:15:07.899ZINFOcrucible: [2] Transition from WaitQuorum to Active
58442023-09-22T23:15:07.899ZINFOcrucible: 34ba4809-324b-4e27-ac13-29098c033c38 is now active with session: 75ce574e-2900-4bdd-93ad-a65d388a02a5
58452023-09-22T23:15:07.899ZINFOcrucible: [0] 34ba4809-324b-4e27-ac13-29098c033c38 (75ce574e-2900-4bdd-93ad-a65d388a02a5) Active Active Active ds_transition to Faulted
58462023-09-22T23:15:07.899ZINFOcrucible: [0] Transition from Active to Faulted
58472023-09-22T23:15:07.899ZINFOcrucible: [1] 34ba4809-324b-4e27-ac13-29098c033c38 (75ce574e-2900-4bdd-93ad-a65d388a02a5) Faulted Active Active ds_transition to Faulted
58482023-09-22T23:15:07.899ZINFOcrucible: [1] Transition from Active to Faulted
58492023-09-22T23:15:07.899ZINFOcrucible: [2] 34ba4809-324b-4e27-ac13-29098c033c38 (75ce574e-2900-4bdd-93ad-a65d388a02a5) Faulted Faulted Active ds_transition to Faulted
58502023-09-22T23:15:07.899ZINFOcrucible: [2] Transition from Active to Faulted
58512023-09-22T23:15:07.899ZWARNcrucible: job 1000 skipped on all downstairs = downstairs
58522023-09-22T23:15:07.899ZINFOcrucible: Enqueue job 1000 goes straight to AckReady = downstairs
5853 test test::up_test::three_faulted_enqueue_will_handle_read ... ok
58542023-09-22T23:15:07.900ZINFOcrucible: Crucible stats registered with UUID: 268633af-f0dd-45fe-b45e-485802948fda
58552023-09-22T23:15:07.900ZINFOcrucible: Crucible 268633af-f0dd-45fe-b45e-485802948fda has session id: 1362ed8c-38f1-40a4-bc28-a3774613e8f0
58562023-09-22T23:15:07.900ZINFOcrucible: [0] 268633af-f0dd-45fe-b45e-485802948fda (1db398b3-624d-4884-ad1a-935c885a062f) New New New ds_transition to WaitActive
58572023-09-22T23:15:07.900ZINFOcrucible: [0] Transition from New to WaitActive
58582023-09-22T23:15:07.900ZINFOcrucible: [0] 268633af-f0dd-45fe-b45e-485802948fda (1db398b3-624d-4884-ad1a-935c885a062f) WaitActive New New ds_transition to WaitQuorum
58592023-09-22T23:15:07.900ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
58602023-09-22T23:15:07.900ZINFOcrucible: [0] 268633af-f0dd-45fe-b45e-485802948fda (1db398b3-624d-4884-ad1a-935c885a062f) WaitQuorum New New ds_transition to Active
58612023-09-22T23:15:07.900ZINFOcrucible: [0] Transition from WaitQuorum to Active
58622023-09-22T23:15:07.900ZINFOcrucible: [1] 268633af-f0dd-45fe-b45e-485802948fda (1db398b3-624d-4884-ad1a-935c885a062f) Active New New ds_transition to WaitActive
58632023-09-22T23:15:07.900ZINFOcrucible: [1] Transition from New to WaitActive
58642023-09-22T23:15:07.900ZINFOcrucible: [1] 268633af-f0dd-45fe-b45e-485802948fda (1db398b3-624d-4884-ad1a-935c885a062f) Active WaitActive New ds_transition to WaitQuorum
58652023-09-22T23:15:07.900ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
58662023-09-22T23:15:07.900ZINFOcrucible: [1] 268633af-f0dd-45fe-b45e-485802948fda (1db398b3-624d-4884-ad1a-935c885a062f) Active WaitQuorum New ds_transition to Active
58672023-09-22T23:15:07.900ZINFOcrucible: [1] Transition from WaitQuorum to Active
58682023-09-22T23:15:07.900ZINFOcrucible: [2] 268633af-f0dd-45fe-b45e-485802948fda (1db398b3-624d-4884-ad1a-935c885a062f) Active Active New ds_transition to WaitActive
58692023-09-22T23:15:07.900ZINFOcrucible: [2] Transition from New to WaitActive
58702023-09-22T23:15:07.900ZINFOcrucible: [2] 268633af-f0dd-45fe-b45e-485802948fda (1db398b3-624d-4884-ad1a-935c885a062f) Active Active WaitActive ds_transition to WaitQuorum
58712023-09-22T23:15:07.900ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
58722023-09-22T23:15:07.900ZINFOcrucible: [2] 268633af-f0dd-45fe-b45e-485802948fda (1db398b3-624d-4884-ad1a-935c885a062f) Active Active WaitQuorum ds_transition to Active
58732023-09-22T23:15:07.900ZINFOcrucible: [2] Transition from WaitQuorum to Active
58742023-09-22T23:15:07.900ZINFOcrucible: 268633af-f0dd-45fe-b45e-485802948fda is now active with session: 1db398b3-624d-4884-ad1a-935c885a062f
58752023-09-22T23:15:07.900ZINFOcrucible: [0] 268633af-f0dd-45fe-b45e-485802948fda (1db398b3-624d-4884-ad1a-935c885a062f) Active Active Active ds_transition to Faulted
58762023-09-22T23:15:07.900ZINFOcrucible: [0] Transition from Active to Faulted
58772023-09-22T23:15:07.900ZINFOcrucible: [1] 268633af-f0dd-45fe-b45e-485802948fda (1db398b3-624d-4884-ad1a-935c885a062f) Faulted Active Active ds_transition to Faulted
58782023-09-22T23:15:07.900ZINFOcrucible: [1] Transition from Active to Faulted
58792023-09-22T23:15:07.900ZINFOcrucible: [2] 268633af-f0dd-45fe-b45e-485802948fda (1db398b3-624d-4884-ad1a-935c885a062f) Faulted Faulted Active ds_transition to Faulted
58802023-09-22T23:15:07.900ZINFOcrucible: [2] Transition from Active to Faulted
58812023-09-22T23:15:07.900ZWARNcrucible: job 1000 skipped on all downstairs = downstairs
58822023-09-22T23:15:07.900ZINFOcrucible: Enqueue job 1000 goes straight to AckReady = downstairs
5883 test test::up_test::three_faulted_enqueue_will_handle_write ... ok
58842023-09-22T23:15:07.901ZINFOcrucible: Crucible stats registered with UUID: 8f5eb220-4b2e-462b-b7aa-05d0d162c535
58852023-09-22T23:15:07.901ZINFOcrucible: Crucible 8f5eb220-4b2e-462b-b7aa-05d0d162c535 has session id: 51106ee6-4468-4eab-a351-5ead15d0d68a
58862023-09-22T23:15:07.901ZINFOcrucible: [0] 8f5eb220-4b2e-462b-b7aa-05d0d162c535 (e1047f59-8476-4547-aa31-314151aa90eb) New New New ds_transition to WaitActive
58872023-09-22T23:15:07.901ZINFOcrucible: [0] Transition from New to WaitActive
58882023-09-22T23:15:07.901ZINFOcrucible: [0] 8f5eb220-4b2e-462b-b7aa-05d0d162c535 (e1047f59-8476-4547-aa31-314151aa90eb) WaitActive New New ds_transition to WaitQuorum
58892023-09-22T23:15:07.901ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
58902023-09-22T23:15:07.901ZINFOcrucible: [0] 8f5eb220-4b2e-462b-b7aa-05d0d162c535 (e1047f59-8476-4547-aa31-314151aa90eb) WaitQuorum New New ds_transition to Active
58912023-09-22T23:15:07.901ZINFOcrucible: [0] Transition from WaitQuorum to Active
58922023-09-22T23:15:07.901ZINFOcrucible: [1] 8f5eb220-4b2e-462b-b7aa-05d0d162c535 (e1047f59-8476-4547-aa31-314151aa90eb) Active New New ds_transition to WaitActive
58932023-09-22T23:15:07.901ZINFOcrucible: [1] Transition from New to WaitActive
58942023-09-22T23:15:07.901ZINFOcrucible: [1] 8f5eb220-4b2e-462b-b7aa-05d0d162c535 (e1047f59-8476-4547-aa31-314151aa90eb) Active WaitActive New ds_transition to WaitQuorum
58952023-09-22T23:15:07.901ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
58962023-09-22T23:15:07.901ZINFOcrucible: [1] 8f5eb220-4b2e-462b-b7aa-05d0d162c535 (e1047f59-8476-4547-aa31-314151aa90eb) Active WaitQuorum New ds_transition to Active
58972023-09-22T23:15:07.901ZINFOcrucible: [1] Transition from WaitQuorum to Active
58982023-09-22T23:15:07.901ZINFOcrucible: [2] 8f5eb220-4b2e-462b-b7aa-05d0d162c535 (e1047f59-8476-4547-aa31-314151aa90eb) Active Active New ds_transition to WaitActive
58992023-09-22T23:15:07.901ZINFOcrucible: [2] Transition from New to WaitActive
59002023-09-22T23:15:07.901ZINFOcrucible: [2] 8f5eb220-4b2e-462b-b7aa-05d0d162c535 (e1047f59-8476-4547-aa31-314151aa90eb) Active Active WaitActive ds_transition to WaitQuorum
59012023-09-22T23:15:07.901ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
59022023-09-22T23:15:07.901ZINFOcrucible: [2] 8f5eb220-4b2e-462b-b7aa-05d0d162c535 (e1047f59-8476-4547-aa31-314151aa90eb) Active Active WaitQuorum ds_transition to Active
59032023-09-22T23:15:07.901ZINFOcrucible: [2] Transition from WaitQuorum to Active
59042023-09-22T23:15:07.901ZINFOcrucible: 8f5eb220-4b2e-462b-b7aa-05d0d162c535 is now active with session: e1047f59-8476-4547-aa31-314151aa90eb
59052023-09-22T23:15:07.901ZINFOcrucible: [0] 8f5eb220-4b2e-462b-b7aa-05d0d162c535 (e1047f59-8476-4547-aa31-314151aa90eb) Active Active Active ds_transition to Faulted
59062023-09-22T23:15:07.901ZINFOcrucible: [0] Transition from Active to Faulted
59072023-09-22T23:15:07.901ZINFOcrucible: [1] 8f5eb220-4b2e-462b-b7aa-05d0d162c535 (e1047f59-8476-4547-aa31-314151aa90eb) Faulted Active Active ds_transition to Faulted
59082023-09-22T23:15:07.901ZINFOcrucible: [1] Transition from Active to Faulted
59092023-09-22T23:15:07.901ZINFOcrucible: [2] 8f5eb220-4b2e-462b-b7aa-05d0d162c535 (e1047f59-8476-4547-aa31-314151aa90eb) Faulted Faulted Active ds_transition to Faulted
59102023-09-22T23:15:07.901ZINFOcrucible: [2] Transition from Active to Faulted
59112023-09-22T23:15:07.901ZWARNcrucible: job 1000 skipped on all downstairs = downstairs
59122023-09-22T23:15:07.901ZINFOcrucible: Enqueue job 1000 goes straight to AckReady = downstairs
59132023-09-22T23:15:07.901ZWARNcrucible: job 1001 skipped on all downstairs = downstairs
59142023-09-22T23:15:07.902ZINFOcrucible: Enqueue job 1001 goes straight to AckReady = downstairs
59152023-09-22T23:15:07.902ZWARNcrucible: job 1002 skipped on all downstairs = downstairs
59162023-09-22T23:15:07.902ZINFOcrucible: Enqueue job 1002 goes straight to AckReady = downstairs
59172023-09-22T23:15:07.902ZWARNcrucible: job 1003 skipped on all downstairs = downstairs
59182023-09-22T23:15:07.902ZINFOcrucible: Enqueue job 1003 goes straight to AckReady = downstairs
59192023-09-22T23:15:07.902ZWARNcrucible: job 1004 skipped on all downstairs = downstairs
59202023-09-22T23:15:07.902ZINFOcrucible: Enqueue job 1004 goes straight to AckReady = downstairs
59212023-09-22T23:15:07.902ZWARNcrucible: job 1005 skipped on all downstairs = downstairs
59222023-09-22T23:15:07.902ZINFOcrucible: Enqueue job 1005 goes straight to AckReady = downstairs
5923 test test::up_test::three_faulted_retire_skipped_some_leave_some ... ok
59242023-09-22T23:15:07.902ZINFOcrucible: Crucible stats registered with UUID: 2a452f49-e65c-46b0-b3af-bedeb41456b1
59252023-09-22T23:15:07.902ZINFOcrucible: Crucible 2a452f49-e65c-46b0-b3af-bedeb41456b1 has session id: fe4be24f-4caf-4e63-8e2a-72f42b0f7838
59262023-09-22T23:15:07.902ZINFOcrucible: [0] 2a452f49-e65c-46b0-b3af-bedeb41456b1 (6814b1ec-b4d9-4518-b268-4bd533f7ddf5) New New New ds_transition to WaitActive
59272023-09-22T23:15:07.902ZINFOcrucible: [0] Transition from New to WaitActive
59282023-09-22T23:15:07.902ZINFOcrucible: [0] 2a452f49-e65c-46b0-b3af-bedeb41456b1 (6814b1ec-b4d9-4518-b268-4bd533f7ddf5) WaitActive New New ds_transition to WaitQuorum
59292023-09-22T23:15:07.902ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
59302023-09-22T23:15:07.902ZINFOcrucible: [0] 2a452f49-e65c-46b0-b3af-bedeb41456b1 (6814b1ec-b4d9-4518-b268-4bd533f7ddf5) WaitQuorum New New ds_transition to Active
59312023-09-22T23:15:07.902ZINFOcrucible: [0] Transition from WaitQuorum to Active
59322023-09-22T23:15:07.902ZINFOcrucible: [1] 2a452f49-e65c-46b0-b3af-bedeb41456b1 (6814b1ec-b4d9-4518-b268-4bd533f7ddf5) Active New New ds_transition to WaitActive
59332023-09-22T23:15:07.902ZINFOcrucible: [1] Transition from New to WaitActive
59342023-09-22T23:15:07.902ZINFOcrucible: [1] 2a452f49-e65c-46b0-b3af-bedeb41456b1 (6814b1ec-b4d9-4518-b268-4bd533f7ddf5) Active WaitActive New ds_transition to WaitQuorum
59352023-09-22T23:15:07.902ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
59362023-09-22T23:15:07.902ZINFOcrucible: [1] 2a452f49-e65c-46b0-b3af-bedeb41456b1 (6814b1ec-b4d9-4518-b268-4bd533f7ddf5) Active WaitQuorum New ds_transition to Active
59372023-09-22T23:15:07.903ZINFOcrucible: [1] Transition from WaitQuorum to Active
59382023-09-22T23:15:07.903ZINFOcrucible: [2] 2a452f49-e65c-46b0-b3af-bedeb41456b1 (6814b1ec-b4d9-4518-b268-4bd533f7ddf5) Active Active New ds_transition to WaitActive
59392023-09-22T23:15:07.903ZINFOcrucible: [2] Transition from New to WaitActive
59402023-09-22T23:15:07.903ZINFOcrucible: [2] 2a452f49-e65c-46b0-b3af-bedeb41456b1 (6814b1ec-b4d9-4518-b268-4bd533f7ddf5) Active Active WaitActive ds_transition to WaitQuorum
59412023-09-22T23:15:07.903ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
59422023-09-22T23:15:07.903ZINFOcrucible: [2] 2a452f49-e65c-46b0-b3af-bedeb41456b1 (6814b1ec-b4d9-4518-b268-4bd533f7ddf5) Active Active WaitQuorum ds_transition to Active
59432023-09-22T23:15:07.903ZINFOcrucible: [2] Transition from WaitQuorum to Active
59442023-09-22T23:15:07.903ZINFOcrucible: 2a452f49-e65c-46b0-b3af-bedeb41456b1 is now active with session: 6814b1ec-b4d9-4518-b268-4bd533f7ddf5
59452023-09-22T23:15:07.903ZINFOcrucible: [0] 2a452f49-e65c-46b0-b3af-bedeb41456b1 (6814b1ec-b4d9-4518-b268-4bd533f7ddf5) Active Active Active ds_transition to Faulted
59462023-09-22T23:15:07.903ZINFOcrucible: [0] Transition from Active to Faulted
59472023-09-22T23:15:07.903ZINFOcrucible: [2] 2a452f49-e65c-46b0-b3af-bedeb41456b1 (6814b1ec-b4d9-4518-b268-4bd533f7ddf5) Faulted Active Active ds_transition to Faulted
59482023-09-22T23:15:07.903ZINFOcrucible: [2] Transition from Active to Faulted
5949 test test::up_test::two_faulted_downstairs_can_still_read ... ok
59502023-09-22T23:15:07.903ZINFOcrucible: Crucible stats registered with UUID: 61feadda-093e-49e4-bc24-87950912bfbf
59512023-09-22T23:15:07.903ZINFOcrucible: Crucible 61feadda-093e-49e4-bc24-87950912bfbf has session id: db43a40f-88e5-4680-a866-17e54c8d7a1a
59522023-09-22T23:15:07.903ZINFOcrucible: 61feadda-093e-49e4-bc24-87950912bfbf is now active with session: 8af8e09a-f3b4-4318-9cb1-8376464d7b51
59532023-09-22T23:15:07.903ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59542023-09-22T23:15:07.903ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59552023-09-22T23:15:07.903ZERROcrucible: [0] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59562023-09-22T23:15:07.903ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59572023-09-22T23:15:07.904ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59582023-09-22T23:15:07.904ZERROcrucible: [1] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59592023-09-22T23:15:07.904ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59602023-09-22T23:15:07.904ZERROcrucible: [0] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59612023-09-22T23:15:07.904ZERROcrucible: [0] 1001 read error GenericError("bad") DownstairsIO { ds_id: JobId(1001), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59622023-09-22T23:15:07.904ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59632023-09-22T23:15:07.904ZERROcrucible: [1] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59642023-09-22T23:15:07.904ZERROcrucible: [1] 1001 read error GenericError("bad") DownstairsIO { ds_id: JobId(1001), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
5965 test test::up_test::work_assert_reads_do_not_cause_failure_state_transition ... ok
59662023-09-22T23:15:07.904ZINFOcrucible: Crucible stats registered with UUID: 7804a25f-01f0-40c3-9028-99afe38211f9
59672023-09-22T23:15:07.904ZINFOcrucible: Crucible 7804a25f-01f0-40c3-9028-99afe38211f9 has session id: 05d76864-f028-4f16-8fbc-bff6ad3814ee
59682023-09-22T23:15:07.904ZINFOcrucible: 7804a25f-01f0-40c3-9028-99afe38211f9 is now active with session: 07f4e32b-00a1-4501-99f9-bd3437724390
59692023-09-22T23:15:07.904ZINFOcrucible: [0] client re-new 1 jobs since flush 0 = downstairs
5970 test test::up_test::work_completed_ack_read_replay ... ok
59712023-09-22T23:15:07.905ZINFOcrucible: Crucible stats registered with UUID: 9ad61097-47b6-432f-a974-ccae159da0e2
59722023-09-22T23:15:07.905ZINFOcrucible: Crucible 9ad61097-47b6-432f-a974-ccae159da0e2 has session id: 297e80ce-ad14-41b1-a6db-e3518973f0c8
59732023-09-22T23:15:07.905ZINFOcrucible: 9ad61097-47b6-432f-a974-ccae159da0e2 is now active with session: b0320f7d-c076-4c69-b0eb-c6ddfca67a83
59742023-09-22T23:15:07.905ZINFOcrucible: [0] client re-new 1 jobs since flush 0 = downstairs
59752023-09-22T23:15:07.905ZINFOcrucible: REPLAY [0] read hash mismatch on id 1000 Expected [Some(48a5a7677a8e488)] Computed [Some(ea9ca750a094f609)] guest_id:10 request:[ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] job state:ClientData([Done, InProgress, InProgress]) = downstairs
5976 test test::up_test::work_completed_ack_read_replay_hash_mismatch ... ok
59772023-09-22T23:15:07.906ZINFOcrucible: Crucible stats registered with UUID: 5eee8679-0c3f-4c2d-becc-3a09b2ede674
59782023-09-22T23:15:07.906ZINFOcrucible: Crucible 5eee8679-0c3f-4c2d-becc-3a09b2ede674 has session id: aa045a8d-8fde-421b-9172-8ade46fd0ace
59792023-09-22T23:15:07.906ZINFOcrucible: 5eee8679-0c3f-4c2d-becc-3a09b2ede674 is now active with session: 2e9cce9b-670d-4fa1-b2a8-0670887c5706
59802023-09-22T23:15:07.906ZINFOcrucible: [1] client re-new 1 jobs since flush 0 = downstairs
59812023-09-22T23:15:07.906ZINFOcrucible: REPLAY [1] read hash mismatch on id 1000 Expected [Some(48a5a7677a8e488)] Computed [Some(ea9ca750a094f609)] guest_id:10 request:[ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] job state:ClientData([Done, Done, InProgress]) = downstairs
5982 test test::up_test::work_completed_ack_read_replay_two_hash_mismatch ... ok
59832023-09-22T23:15:07.906ZINFOcrucible: Crucible stats registered with UUID: 03b74852-8a92-452e-8617-c021f4ab681e
59842023-09-22T23:15:07.906ZINFOcrucible: Crucible 03b74852-8a92-452e-8617-c021f4ab681e has session id: 9f284c25-c6f4-404d-b17a-f45300a6eb68
59852023-09-22T23:15:07.906ZINFOcrucible: 03b74852-8a92-452e-8617-c021f4ab681e is now active with session: 9f82f4d3-ee5a-443b-b078-c0f4e8a6e60e
5986 test test::up_test::work_completed_read_flush ... ok
59872023-09-22T23:15:07.907ZINFOcrucible: Crucible stats registered with UUID: ee419862-b955-4190-a38c-f50331776f3d
59882023-09-22T23:15:07.907ZINFOcrucible: Crucible ee419862-b955-4190-a38c-f50331776f3d has session id: db5b7fdd-ad5f-462b-bbde-9c163d7c9303
59892023-09-22T23:15:07.907ZINFOcrucible: ee419862-b955-4190-a38c-f50331776f3d is now active with session: bd7e360e-589b-4752-bb9c-dda6dadd245f
59902023-09-22T23:15:07.907ZINFOcrucible: [0] client re-new 1 jobs since flush 0 = downstairs
59912023-09-22T23:15:07.907ZINFOcrucible: Remove read data for 1000 = downstairs
5992 test test::up_test::work_completed_read_replay ... ok
59932023-09-22T23:15:07.908ZINFOcrucible: Crucible stats registered with UUID: 723d9772-6b27-4b16-b803-6de21de59757
59942023-09-22T23:15:07.908ZINFOcrucible: Crucible 723d9772-6b27-4b16-b803-6de21de59757 has session id: e078dfe2-0d9d-46f6-880b-4ee025fd2587
59952023-09-22T23:15:07.908ZINFOcrucible: 723d9772-6b27-4b16-b803-6de21de59757 is now active with session: c1a7b7b8-7525-464b-a8b0-4412a893fc00
59962023-09-22T23:15:07.908ZINFOcrucible: [0] client re-new 1 jobs since flush 0 = downstairs
59972023-09-22T23:15:07.908ZINFOcrucible: [1] client re-new 1 jobs since flush 0 = downstairs
59982023-09-22T23:15:07.908ZINFOcrucible: Remove read data for 1000 = downstairs
5999 test test::up_test::work_completed_two_read_replay ... ok
60002023-09-22T23:15:07.908ZINFOcrucible: Crucible stats registered with UUID: cb94f5c2-5898-4e01-9edb-5b1abb303464
60012023-09-22T23:15:07.908ZINFOcrucible: Crucible cb94f5c2-5898-4e01-9edb-5b1abb303464 has session id: 109b7061-9714-4123-a6c5-b268ea9811c2
60022023-09-22T23:15:07.908ZINFOcrucible: cb94f5c2-5898-4e01-9edb-5b1abb303464 is now active with session: aee0782f-7549-40ce-a2b8-1ccc3d29aa4c
60032023-09-22T23:15:07.908ZINFOcrucible: [1] client re-new 1 jobs since flush 0 = downstairs
6004 test test::up_test::work_completed_write_ack_ready_replay_write ... ok
60052023-09-22T23:15:07.909ZINFOcrucible: Crucible stats registered with UUID: 0ca83e76-c347-4ce0-9c12-b33ac8c068fe
60062023-09-22T23:15:07.909ZINFOcrucible: Crucible 0ca83e76-c347-4ce0-9c12-b33ac8c068fe has session id: 4c0e6344-5857-47da-b404-54c2813d4a87
60072023-09-22T23:15:07.909ZINFOcrucible: 0ca83e76-c347-4ce0-9c12-b33ac8c068fe is now active with session: d1e1abc7-fafe-4409-bbed-33fb6e35632d
60082023-09-22T23:15:07.909ZINFOcrucible: [1] client re-new 1 jobs since flush 0 = downstairs
60092023-09-22T23:15:07.909ZINFOcrucible: Remove AckReady for Wu/F 1000 = downstairs
6010 test test::up_test::work_completed_write_ack_ready_replay_write_unwritten ... ok
60112023-09-22T23:15:07.910ZINFOcrucible: Crucible stats registered with UUID: a3c60a6e-7a2b-4b66-a59c-c18eb00c3ea2
60122023-09-22T23:15:07.910ZINFOcrucible: Crucible a3c60a6e-7a2b-4b66-a59c-c18eb00c3ea2 has session id: fb2b4b2c-4dd7-43c9-acec-06cb8d080fdc
60132023-09-22T23:15:07.910ZINFOcrucible: a3c60a6e-7a2b-4b66-a59c-c18eb00c3ea2 is now active with session: c27c3047-c34d-42b1-bb92-6b9714feb8f1
60142023-09-22T23:15:07.910ZINFOcrucible: [0] client re-new 1 jobs since flush 0 = downstairs
6015 test test::up_test::work_completed_write_acked_replay_write ... ok
60162023-09-22T23:15:07.910ZINFOcrucible: Crucible stats registered with UUID: 1f9600a7-06a9-4142-a7ac-e0b3a667f0ab
60172023-09-22T23:15:07.910ZINFOcrucible: Crucible 1f9600a7-06a9-4142-a7ac-e0b3a667f0ab has session id: 0c0ff068-7e7f-4af5-ba7e-4fbf8166332b
60182023-09-22T23:15:07.910ZINFOcrucible: 1f9600a7-06a9-4142-a7ac-e0b3a667f0ab is now active with session: 8050d4d8-814b-4881-840f-92bfd639e641
60192023-09-22T23:15:07.910ZINFOcrucible: [0] client re-new 1 jobs since flush 0 = downstairs
6020 test test::up_test::work_completed_write_acked_replay_write_unwritten ... ok
60212023-09-22T23:15:07.911ZINFOcrucible: Crucible stats registered with UUID: c9a76631-7a47-44a8-a263-c2d8ea009f8d
60222023-09-22T23:15:07.911ZINFOcrucible: Crucible c9a76631-7a47-44a8-a263-c2d8ea009f8d has session id: 46bfd0c8-979b-48bb-895c-418f502063af
60232023-09-22T23:15:07.911ZINFOcrucible: c9a76631-7a47-44a8-a263-c2d8ea009f8d is now active with session: 8213d1cb-6d75-4e92-ae36-f0d17a1aea26
6024 test test::up_test::work_completed_write_flush ... ok
60252023-09-22T23:15:07.912ZINFOcrucible: Crucible stats registered with UUID: 893e7565-f000-40e1-b286-2e46c8ccf3d7
60262023-09-22T23:15:07.912ZINFOcrucible: Crucible 893e7565-f000-40e1-b286-2e46c8ccf3d7 has session id: 715a840b-0653-48e9-be16-8bb9bacef61f
60272023-09-22T23:15:07.912ZINFOcrucible: 893e7565-f000-40e1-b286-2e46c8ccf3d7 is now active with session: 884e1daf-48b9-446f-8c24-d8353d4afaa1
6028 test test::up_test::work_completed_write_unwritten_flush ... ok
60292023-09-22T23:15:07.912ZINFOcrucible: Crucible stats registered with UUID: c2df6c83-25e8-41e9-9eab-950a0052cd6e
60302023-09-22T23:15:07.912ZINFOcrucible: Crucible c2df6c83-25e8-41e9-9eab-950a0052cd6e has session id: 8191958a-2051-4d5b-a2d2-5c150ee08809
60312023-09-22T23:15:07.912ZINFOcrucible: c2df6c83-25e8-41e9-9eab-950a0052cd6e is now active with session: e9062d92-108d-4c2c-871a-821a582a987e
6032 test test::up_test::work_delay_completion_flush_order_write ... ok
60332023-09-22T23:15:07.913ZINFOcrucible: Crucible stats registered with UUID: 3590da25-e844-4515-8bb6-c76ac9e53f5b
60342023-09-22T23:15:07.913ZINFOcrucible: Crucible 3590da25-e844-4515-8bb6-c76ac9e53f5b has session id: 6ae907de-bff5-4e26-a023-ff18fe667045
60352023-09-22T23:15:07.913ZINFOcrucible: 3590da25-e844-4515-8bb6-c76ac9e53f5b is now active with session: 79619812-299a-4bf8-bc5c-836b5fb53d1c
6036 test test::up_test::work_delay_completion_flush_order_write_unwritten ... ok
60372023-09-22T23:15:07.914ZINFOcrucible: Crucible stats registered with UUID: 5e51a52c-ceb1-4dab-9bdf-8141b54be0d0
60382023-09-22T23:15:07.914ZINFOcrucible: Crucible 5e51a52c-ceb1-4dab-9bdf-8141b54be0d0 has session id: 2467441e-a61b-4970-bf33-34448c849376
60392023-09-22T23:15:07.914ZINFOcrucible: 5e51a52c-ceb1-4dab-9bdf-8141b54be0d0 is now active with session: a254b3a1-de3b-4df8-9027-0ad142e8f115
6040 test test::up_test::work_delay_completion_flush_write ... ok
60412023-09-22T23:15:07.914ZINFOcrucible: Crucible stats registered with UUID: a5d20d1a-5608-4c5a-93e3-50ed6fd8be5e
60422023-09-22T23:15:07.914ZINFOcrucible: Crucible a5d20d1a-5608-4c5a-93e3-50ed6fd8be5e has session id: 11e4bc89-11bc-4ee7-9146-37f04496fc35
60432023-09-22T23:15:07.914ZINFOcrucible: a5d20d1a-5608-4c5a-93e3-50ed6fd8be5e is now active with session: 9113167a-34ee-4a4f-9aa4-8f505347ff71
6044 test test::up_test::work_delay_completion_flush_write_unwritten ... ok
60452023-09-22T23:15:07.915ZINFOcrucible: Crucible stats registered with UUID: a77ad568-3a2c-48e5-abd3-1375cab8eed8
60462023-09-22T23:15:07.915ZINFOcrucible: Crucible a77ad568-3a2c-48e5-abd3-1375cab8eed8 has session id: 43d1f437-1fe2-4422-aad2-a2fd23f4ba1e
60472023-09-22T23:15:07.915ZINFOcrucible: a77ad568-3a2c-48e5-abd3-1375cab8eed8 is now active with session: c8dc9abc-5bb7-4394-bca5-68f7bb96f516
60482023-09-22T23:15:07.915ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 0, work: Flush { dependencies: [], flush_number: 10, gen_number: 0, snapshot_details: None, extent_limit: None }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
60492023-09-22T23:15:07.915ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 0, work: Flush { dependencies: [], flush_number: 10, gen_number: 0, snapshot_details: None, extent_limit: None }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
6050 test test::up_test::work_flush_one_error_then_ok ... ok
60512023-09-22T23:15:07.916ZINFOcrucible: Crucible stats registered with UUID: e2902b2a-207c-4d49-9e41-2bd9a0901317
60522023-09-22T23:15:07.916ZINFOcrucible: Crucible e2902b2a-207c-4d49-9e41-2bd9a0901317 has session id: 46e0c9b1-8d0c-4caf-81f3-6694058e1861
60532023-09-22T23:15:07.916ZINFOcrucible: e2902b2a-207c-4d49-9e41-2bd9a0901317 is now active with session: bd66bb77-ae90-4d0e-87ad-090dc0dbd7a0
6054 test test::up_test::work_flush_snapshot_needs_three ... ok
60552023-09-22T23:15:07.916ZINFOcrucible: Crucible stats registered with UUID: 80ea100b-4ab8-4801-9a99-9fecbbe4ba26
60562023-09-22T23:15:07.916ZINFOcrucible: Crucible 80ea100b-4ab8-4801-9a99-9fecbbe4ba26 has session id: 86899362-5bfd-4fbf-880c-a85b2a38c25f
60572023-09-22T23:15:07.916ZINFOcrucible: 80ea100b-4ab8-4801-9a99-9fecbbe4ba26 is now active with session: b5a47d70-3e8f-4cc4-86fe-9f6fe621db8b
6058 test test::up_test::work_flush_three_ok ... ok
60592023-09-22T23:15:07.917ZINFOcrucible: Crucible stats registered with UUID: eabeb7c8-e2fb-4a2d-a1b8-cba2b9f6ee4d
60602023-09-22T23:15:07.917ZINFOcrucible: Crucible eabeb7c8-e2fb-4a2d-a1b8-cba2b9f6ee4d has session id: b229608e-75cc-4ce7-9190-dd63eef1a9dc
60612023-09-22T23:15:07.917ZINFOcrucible: eabeb7c8-e2fb-4a2d-a1b8-cba2b9f6ee4d is now active with session: 5caf3b84-6035-40cf-8de0-8c4715e3e015
60622023-09-22T23:15:07.917ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 0, work: Flush { dependencies: [], flush_number: 10, gen_number: 0, snapshot_details: None, extent_limit: None }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
60632023-09-22T23:15:07.917ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 0, work: Flush { dependencies: [], flush_number: 10, gen_number: 0, snapshot_details: None, extent_limit: None }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
60642023-09-22T23:15:07.917ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 0, work: Flush { dependencies: [], flush_number: 10, gen_number: 0, snapshot_details: None, extent_limit: None }, state: ClientData([Error(GenericError("bad")), Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
60652023-09-22T23:15:07.917ZERROcrucible: [2] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 0, work: Flush { dependencies: [], flush_number: 10, gen_number: 0, snapshot_details: None, extent_limit: None }, state: ClientData([Error(GenericError("bad")), Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
6066 test test::up_test::work_flush_two_errors_equals_fail ... ok
60672023-09-22T23:15:07.918ZINFOcrucible: Crucible stats registered with UUID: 2311bf3f-63b8-458a-a536-2d3155cbfb3e
60682023-09-22T23:15:07.918ZINFOcrucible: Crucible 2311bf3f-63b8-458a-a536-2d3155cbfb3e has session id: c9d8b2e6-ee49-4ceb-ae50-2246eda892f1
60692023-09-22T23:15:07.918ZINFOcrucible: 2311bf3f-63b8-458a-a536-2d3155cbfb3e is now active with session: 41d12ee8-0343-4a8c-84dd-fc05be956162
6070 test test::up_test::work_read_hash_mismatch ... ok
60712023-09-22T23:15:07.919ZINFOcrucible: Crucible stats registered with UUID: a381e584-2bf0-43fe-b59b-137e66b44d89
60722023-09-22T23:15:07.919ZINFOcrucible: Crucible a381e584-2bf0-43fe-b59b-137e66b44d89 has session id: 8a6b945f-ea2f-45d5-bc32-535d0679b91d
60732023-09-22T23:15:07.919ZINFOcrucible: a381e584-2bf0-43fe-b59b-137e66b44d89 is now active with session: 07e37163-98b4-4267-8516-6d70b9fb4ddc
6074 test test::up_test::work_read_hash_mismatch_ack ... ok
60752023-09-22T23:15:07.919ZINFOcrucible: Crucible stats registered with UUID: 3b7495fa-ba5a-44ab-9960-29bb1bf952f6
60762023-09-22T23:15:07.919ZINFOcrucible: Crucible 3b7495fa-ba5a-44ab-9960-29bb1bf952f6 has session id: f91dcaea-bed8-46d0-93fe-e24cc82ee575
60772023-09-22T23:15:07.919ZINFOcrucible: 3b7495fa-ba5a-44ab-9960-29bb1bf952f6 is now active with session: ad1c7123-c39a-4d5f-b4c5-337132e76fe8
6078 test test::up_test::work_read_hash_mismatch_inside ... ok
60792023-09-22T23:15:07.920ZINFOcrucible: Crucible stats registered with UUID: d8da0c28-5748-403b-99bf-bdcabcaad028
60802023-09-22T23:15:07.920ZINFOcrucible: Crucible d8da0c28-5748-403b-99bf-bdcabcaad028 has session id: 8a478a7e-be7c-4cdb-8be7-5184fad33a94
60812023-09-22T23:15:07.920ZINFOcrucible: d8da0c28-5748-403b-99bf-bdcabcaad028 is now active with session: 35eefb78-e525-4f1e-b6da-f8a797751acc
6082 test test::up_test::work_read_hash_mismatch_no_data ... ok
60832023-09-22T23:15:07.921ZINFOcrucible: Crucible stats registered with UUID: ce168ce0-e4d8-4043-bc16-797a6bf2999a
60842023-09-22T23:15:07.921ZINFOcrucible: Crucible ce168ce0-e4d8-4043-bc16-797a6bf2999a has session id: 2fd637b5-734c-4281-8c40-e29ac96604cf
60852023-09-22T23:15:07.921ZINFOcrucible: ce168ce0-e4d8-4043-bc16-797a6bf2999a is now active with session: 8c7ca4bd-d489-4bce-8c2f-70c287d18c48
6086 test test::up_test::work_read_hash_mismatch_no_data_next ... ok
6087 test test::up_test::work_read_hash_mismatch_third ... ok
60882023-09-22T23:15:07.922ZINFOcrucible: Crucible stats registered with UUID: ecc5cb47-31fc-43c8-b931-d661a98739ec
60892023-09-22T23:15:07.922ZINFOcrucible: Crucible ecc5cb47-31fc-43c8-b931-d661a98739ec has session id: 16f6c4e0-2ced-4df2-88dc-12e8d0e389cb
60902023-09-22T23:15:07.922ZINFOcrucible: ecc5cb47-31fc-43c8-b931-d661a98739ec is now active with session: 3a5e7884-043a-418c-9d79-f6f1eea94226
6091 test test::up_test::work_read_hash_mismatch_third_ack ... ok
60922023-09-22T23:15:07.923ZINFOcrucible: Crucible stats registered with UUID: 6c66de83-c13e-4e77-88da-9fe3c346561e
60932023-09-22T23:15:07.923ZINFOcrucible: Crucible 6c66de83-c13e-4e77-88da-9fe3c346561e has session id: fd9e3094-cf7f-4855-9207-d41472dbda48
60942023-09-22T23:15:07.923ZINFOcrucible: 6c66de83-c13e-4e77-88da-9fe3c346561e is now active with session: 24c70913-23a1-48ee-bd4c-cd255657956f
60952023-09-22T23:15:07.923ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
60962023-09-22T23:15:07.923ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
60972023-09-22T23:15:07.923ZERROcrucible: [0] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
6098 test test::up_test::work_read_one_bad_two_ok ... ok
60992023-09-22T23:15:07.924ZINFOcrucible: Crucible stats registered with UUID: 07370796-1ca1-43e2-aab5-ac41484fa588
61002023-09-22T23:15:07.924ZINFOcrucible: Crucible 07370796-1ca1-43e2-aab5-ac41484fa588 has session id: feda1885-36c1-4548-b570-3f7fde73ca4c
61012023-09-22T23:15:07.924ZINFOcrucible: 07370796-1ca1-43e2-aab5-ac41484fa588 is now active with session: fb9d06e1-d83d-476d-b125-618e3c89a205
6102 test test::up_test::work_read_one_ok ... ok
61032023-09-22T23:15:07.924ZINFOcrucible: Crucible stats registered with UUID: f21a1586-42c4-4f05-942e-64105fc3f582
61042023-09-22T23:15:07.924ZINFOcrucible: Crucible f21a1586-42c4-4f05-942e-64105fc3f582 has session id: e4a9de7a-ea6b-4a84-8d74-0a753b5576fe
61052023-09-22T23:15:07.924ZINFOcrucible: f21a1586-42c4-4f05-942e-64105fc3f582 is now active with session: d12b487d-03ac-4a65-ae4b-6e903b589ed7
61062023-09-22T23:15:07.924ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61072023-09-22T23:15:07.924ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61082023-09-22T23:15:07.924ZERROcrucible: [0] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61092023-09-22T23:15:07.924ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61102023-09-22T23:15:07.924ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61112023-09-22T23:15:07.924ZERROcrucible: [1] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61122023-09-22T23:15:07.924ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61132023-09-22T23:15:07.924ZERROcrucible: [2] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61142023-09-22T23:15:07.925ZERROcrucible: [2] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), Error(GenericError("bad"))]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
6115 test test::up_test::work_read_three_bad ... ok
61162023-09-22T23:15:07.925ZINFOcrucible: Crucible stats registered with UUID: 1769b1b4-f8f6-487e-89a7-f57129f665cc
61172023-09-22T23:15:07.925ZINFOcrucible: Crucible 1769b1b4-f8f6-487e-89a7-f57129f665cc has session id: 42939798-7944-4117-9827-3c8906b7e5b0
61182023-09-22T23:15:07.925ZINFOcrucible: 1769b1b4-f8f6-487e-89a7-f57129f665cc is now active with session: 7fb7f7c5-1d41-4085-b15a-723483216d4a
61192023-09-22T23:15:07.925ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61202023-09-22T23:15:07.925ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61212023-09-22T23:15:07.925ZERROcrucible: [0] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61222023-09-22T23:15:07.925ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61232023-09-22T23:15:07.925ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61242023-09-22T23:15:07.925ZERROcrucible: [1] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
6125 test test::up_test::work_read_two_bad_one_ok ... ok
61262023-09-22T23:15:07.926ZINFOcrucible: Crucible stats registered with UUID: 495d60dc-e511-4c6f-9bc0-cfc5be4b3b29
61272023-09-22T23:15:07.926ZINFOcrucible: Crucible 495d60dc-e511-4c6f-9bc0-cfc5be4b3b29 has session id: 3981a62c-0154-42af-bf0f-6f429d89ef1d
61282023-09-22T23:15:07.926ZINFOcrucible: 495d60dc-e511-4c6f-9bc0-cfc5be4b3b29 is now active with session: 12ba6721-39bc-49e6-a4bb-054cedcb17cf
61292023-09-22T23:15:07.926ZWARNcrucible: [2] 495d60dc-e511-4c6f-9bc0-cfc5be4b3b29 WARNING finish job 1000 when downstairs state:New
61302023-09-22T23:15:07.926ZWARNcrucible: [0] 495d60dc-e511-4c6f-9bc0-cfc5be4b3b29 WARNING finish job 1000 when downstairs state:New
61312023-09-22T23:15:07.926ZWARNcrucible: [1] 495d60dc-e511-4c6f-9bc0-cfc5be4b3b29 WARNING finish job 1000 when downstairs state:New
61322023-09-22T23:15:07.926ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Done, InProgress, Done]), ack_status: Acked, replay: false, data: Some([ReadResponse { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"", block_contexts: [BlockContext { hash: 17241709254077376921, encryption_context: None }] }]), read_response_hashes: [Some(17241709254077376921)], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61332023-09-22T23:15:07.926ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Done, InProgress, Done]), ack_status: Acked, replay: false, data: Some([ReadResponse { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"", block_contexts: [BlockContext { hash: 17241709254077376921, encryption_context: None }] }]), read_response_hashes: [Some(17241709254077376921)], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61342023-09-22T23:15:07.926ZERROcrucible: [1] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Done, Error(GenericError("bad")), Done]), ack_status: Acked, replay: false, data: Some([ReadResponse { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"", block_contexts: [BlockContext { hash: 17241709254077376921, encryption_context: None }] }]), read_response_hashes: [Some(17241709254077376921)], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
6135 test test::up_test::work_read_two_ok_one_bad ... ok
61362023-09-22T23:15:07.927ZINFOcrucible: Crucible stats registered with UUID: eb3ae96f-ced1-4e88-9e6c-50325f905b2f
61372023-09-22T23:15:07.927ZINFOcrucible: Crucible eb3ae96f-ced1-4e88-9e6c-50325f905b2f has session id: 453a16d0-30f6-448c-b7b3-061649d3a7dd
61382023-09-22T23:15:07.927ZINFOcrucible: eb3ae96f-ced1-4e88-9e6c-50325f905b2f is now active with session: 5cf8d0a5-6ffb-4282-be61-14ff9bfc0be7
61392023-09-22T23:15:07.927ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: WriteUnwritten { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61402023-09-22T23:15:07.927ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: WriteUnwritten { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61412023-09-22T23:15:07.927ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: WriteUnwritten { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61422023-09-22T23:15:07.927ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: WriteUnwritten { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
6143 test test::up_test::work_write_errors_are_counted ... ok
61442023-09-22T23:15:07.927ZINFOcrucible: Crucible stats registered with UUID: b024fa9a-4d3d-406e-b7f0-fd018dbe4d87
61452023-09-22T23:15:07.927ZINFOcrucible: Crucible b024fa9a-4d3d-406e-b7f0-fd018dbe4d87 has session id: a1e44531-21d2-4472-9ebe-e22fe518c57f
61462023-09-22T23:15:07.928ZINFOcrucible: b024fa9a-4d3d-406e-b7f0-fd018dbe4d87 is now active with session: 866103d5-f460-44e5-8451-d449cad06146
61472023-09-22T23:15:07.928ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61482023-09-22T23:15:07.928ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61492023-09-22T23:15:07.928ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61502023-09-22T23:15:07.928ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
6151 test test::up_test::work_write_unwritten_errors_are_counted ... ok
61522023-09-22T23:15:07.928ZINFOcrucible: Crucible stats registered with UUID: 00ccdbf3-b043-4bd8-93b0-b53aed6f525c
61532023-09-22T23:15:07.928ZINFOcrucible: Crucible 00ccdbf3-b043-4bd8-93b0-b53aed6f525c has session id: 2a05c5e3-5f22-48ce-abba-7e1d0a0e7f02
61542023-09-22T23:15:07.928ZINFOcrucible: [0] 00ccdbf3-b043-4bd8-93b0-b53aed6f525c (de9541f2-f260-432a-ba78-50547da1de7f) New New New ds_transition to WaitActive
61552023-09-22T23:15:07.928ZINFOcrucible: [0] Transition from New to WaitActive
61562023-09-22T23:15:07.928ZINFOcrucible: [0] 00ccdbf3-b043-4bd8-93b0-b53aed6f525c (de9541f2-f260-432a-ba78-50547da1de7f) WaitActive New New ds_transition to WaitQuorum
61572023-09-22T23:15:07.928ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
61582023-09-22T23:15:07.928ZINFOcrucible: [0] 00ccdbf3-b043-4bd8-93b0-b53aed6f525c (de9541f2-f260-432a-ba78-50547da1de7f) WaitQuorum New New ds_transition to Active
61592023-09-22T23:15:07.928ZINFOcrucible: [0] Transition from WaitQuorum to Active
61602023-09-22T23:15:07.928ZINFOcrucible: [1] 00ccdbf3-b043-4bd8-93b0-b53aed6f525c (de9541f2-f260-432a-ba78-50547da1de7f) Active New New ds_transition to WaitActive
61612023-09-22T23:15:07.928ZINFOcrucible: [1] Transition from New to WaitActive
61622023-09-22T23:15:07.928ZINFOcrucible: [1] 00ccdbf3-b043-4bd8-93b0-b53aed6f525c (de9541f2-f260-432a-ba78-50547da1de7f) Active WaitActive New ds_transition to WaitQuorum
61632023-09-22T23:15:07.928ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
61642023-09-22T23:15:07.928ZINFOcrucible: [1] 00ccdbf3-b043-4bd8-93b0-b53aed6f525c (de9541f2-f260-432a-ba78-50547da1de7f) Active WaitQuorum New ds_transition to Active
61652023-09-22T23:15:07.928ZINFOcrucible: [1] Transition from WaitQuorum to Active
61662023-09-22T23:15:07.929ZINFOcrucible: [2] 00ccdbf3-b043-4bd8-93b0-b53aed6f525c (de9541f2-f260-432a-ba78-50547da1de7f) Active Active New ds_transition to WaitActive
61672023-09-22T23:15:07.929ZINFOcrucible: [2] Transition from New to WaitActive
61682023-09-22T23:15:07.929ZINFOcrucible: [2] 00ccdbf3-b043-4bd8-93b0-b53aed6f525c (de9541f2-f260-432a-ba78-50547da1de7f) Active Active WaitActive ds_transition to WaitQuorum
61692023-09-22T23:15:07.929ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
61702023-09-22T23:15:07.929ZINFOcrucible: [2] 00ccdbf3-b043-4bd8-93b0-b53aed6f525c (de9541f2-f260-432a-ba78-50547da1de7f) Active Active WaitQuorum ds_transition to Active
61712023-09-22T23:15:07.929ZINFOcrucible: [2] Transition from WaitQuorum to Active
61722023-09-22T23:15:07.929ZINFOcrucible: 00ccdbf3-b043-4bd8-93b0-b53aed6f525c is now active with session: de9541f2-f260-432a-ba78-50547da1de7f
61732023-09-22T23:15:07.929ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61742023-09-22T23:15:07.929ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61752023-09-22T23:15:07.929ZINFOcrucible: [0] client skip 1 in process jobs because fault = downstairs
61762023-09-22T23:15:07.929ZINFOcrucible: [0] changed 0 jobs to fault skipped = downstairs
61772023-09-22T23:15:07.929ZINFOcrucible: [0] 00ccdbf3-b043-4bd8-93b0-b53aed6f525c (de9541f2-f260-432a-ba78-50547da1de7f) Active Active Active ds_transition to Faulted
61782023-09-22T23:15:07.929ZINFOcrucible: [0] Transition from Active to Faulted
61792023-09-22T23:15:07.929ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61802023-09-22T23:15:07.929ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61812023-09-22T23:15:07.929ZINFOcrucible: [1] client skip 1 in process jobs because fault = downstairs
61822023-09-22T23:15:07.929ZINFOcrucible: [1] changed 0 jobs to fault skipped = downstairs
61832023-09-22T23:15:07.929ZINFOcrucible: [1] 00ccdbf3-b043-4bd8-93b0-b53aed6f525c (de9541f2-f260-432a-ba78-50547da1de7f) Faulted Active Active ds_transition to Faulted
61842023-09-22T23:15:07.929ZINFOcrucible: [1] Transition from Active to Faulted
61852023-09-22T23:15:07.929ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61862023-09-22T23:15:07.929ZERROcrucible: [2] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61872023-09-22T23:15:07.929ZINFOcrucible: [2] client skip 1 in process jobs because fault = downstairs
61882023-09-22T23:15:07.929ZINFOcrucible: [2] changed 0 jobs to fault skipped = downstairs
61892023-09-22T23:15:07.929ZINFOcrucible: [2] 00ccdbf3-b043-4bd8-93b0-b53aed6f525c (de9541f2-f260-432a-ba78-50547da1de7f) Faulted Faulted Active ds_transition to Faulted
61902023-09-22T23:15:07.929ZINFOcrucible: [2] Transition from Active to Faulted
6191 test test::up_test::work_writes_bad ... ok
61922023-09-22T23:15:07.930ZINFOcrucible: Crucible stats registered with UUID: 33421f87-1ccb-4cfb-81de-fcfc054a60c1
61932023-09-22T23:15:07.930ZINFOcrucible: Crucible 33421f87-1ccb-4cfb-81de-fcfc054a60c1 has session id: 32fec4dc-94d8-44a6-a66f-bf241edf53a3
61942023-09-22T23:15:07.930ZINFOcrucible: [0] 33421f87-1ccb-4cfb-81de-fcfc054a60c1 (3b5c7131-ce19-4d7f-8c58-f07ac11ebaa3) New New New ds_transition to WaitActive
61952023-09-22T23:15:07.930ZINFOcrucible: [0] Transition from New to WaitActive
61962023-09-22T23:15:07.930ZINFOcrucible: [0] 33421f87-1ccb-4cfb-81de-fcfc054a60c1 (3b5c7131-ce19-4d7f-8c58-f07ac11ebaa3) WaitActive New New ds_transition to WaitQuorum
61972023-09-22T23:15:07.930ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
61982023-09-22T23:15:07.930ZINFOcrucible: [0] 33421f87-1ccb-4cfb-81de-fcfc054a60c1 (3b5c7131-ce19-4d7f-8c58-f07ac11ebaa3) WaitQuorum New New ds_transition to Active
61992023-09-22T23:15:07.930ZINFOcrucible: [0] Transition from WaitQuorum to Active
62002023-09-22T23:15:07.930ZINFOcrucible: [1] 33421f87-1ccb-4cfb-81de-fcfc054a60c1 (3b5c7131-ce19-4d7f-8c58-f07ac11ebaa3) Active New New ds_transition to WaitActive
62012023-09-22T23:15:07.930ZINFOcrucible: [1] Transition from New to WaitActive
62022023-09-22T23:15:07.930ZINFOcrucible: [1] 33421f87-1ccb-4cfb-81de-fcfc054a60c1 (3b5c7131-ce19-4d7f-8c58-f07ac11ebaa3) Active WaitActive New ds_transition to WaitQuorum
62032023-09-22T23:15:07.930ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
62042023-09-22T23:15:07.930ZINFOcrucible: [1] 33421f87-1ccb-4cfb-81de-fcfc054a60c1 (3b5c7131-ce19-4d7f-8c58-f07ac11ebaa3) Active WaitQuorum New ds_transition to Active
62052023-09-22T23:15:07.930ZINFOcrucible: [1] Transition from WaitQuorum to Active
62062023-09-22T23:15:07.930ZINFOcrucible: [2] 33421f87-1ccb-4cfb-81de-fcfc054a60c1 (3b5c7131-ce19-4d7f-8c58-f07ac11ebaa3) Active Active New ds_transition to WaitActive
62072023-09-22T23:15:07.930ZINFOcrucible: [2] Transition from New to WaitActive
62082023-09-22T23:15:07.930ZINFOcrucible: [2] 33421f87-1ccb-4cfb-81de-fcfc054a60c1 (3b5c7131-ce19-4d7f-8c58-f07ac11ebaa3) Active Active WaitActive ds_transition to WaitQuorum
62092023-09-22T23:15:07.930ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
62102023-09-22T23:15:07.930ZINFOcrucible: [2] 33421f87-1ccb-4cfb-81de-fcfc054a60c1 (3b5c7131-ce19-4d7f-8c58-f07ac11ebaa3) Active Active WaitQuorum ds_transition to Active
62112023-09-22T23:15:07.930ZINFOcrucible: [2] Transition from WaitQuorum to Active
62122023-09-22T23:15:07.930ZINFOcrucible: 33421f87-1ccb-4cfb-81de-fcfc054a60c1 is now active with session: 3b5c7131-ce19-4d7f-8c58-f07ac11ebaa3
62132023-09-22T23:15:07.930ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
62142023-09-22T23:15:07.930ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
62152023-09-22T23:15:07.930ZINFOcrucible: [1] client skip 1 in process jobs because fault = downstairs
62162023-09-22T23:15:07.930ZINFOcrucible: [1] changed 0 jobs to fault skipped = downstairs
62172023-09-22T23:15:07.930ZINFOcrucible: [1] 33421f87-1ccb-4cfb-81de-fcfc054a60c1 (3b5c7131-ce19-4d7f-8c58-f07ac11ebaa3) Active Active Active ds_transition to Faulted
62182023-09-22T23:15:07.930ZINFOcrucible: [1] Transition from Active to Faulted
6219 test test::up_test::write_after_write_fail_is_alright ... ok
62202023-09-22T23:15:07.931ZINFOcrucible: Crucible stats registered with UUID: 270553f6-3717-4381-b86f-5053f8a21eae
62212023-09-22T23:15:07.931ZINFOcrucible: Crucible 270553f6-3717-4381-b86f-5053f8a21eae has session id: c952298b-2110-439b-bbad-439723c8b004
62222023-09-22T23:15:07.931ZINFOcrucible: 270553f6-3717-4381-b86f-5053f8a21eae is now active with session: 4624fd44-b981-4ccb-a010-c3704c3ca4b9
62232023-09-22T23:15:07.931ZINFOcrucible: [0] 270553f6-3717-4381-b86f-5053f8a21eae (4624fd44-b981-4ccb-a010-c3704c3ca4b9) New New New ds_transition to WaitActive
62242023-09-22T23:15:07.931ZINFOcrucible: [0] Transition from New to WaitActive
62252023-09-22T23:15:07.931ZINFOcrucible: [0] 270553f6-3717-4381-b86f-5053f8a21eae (4624fd44-b981-4ccb-a010-c3704c3ca4b9) WaitActive New New ds_transition to WaitQuorum
62262023-09-22T23:15:07.931ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
62272023-09-22T23:15:07.931ZINFOcrucible: [0] 270553f6-3717-4381-b86f-5053f8a21eae (4624fd44-b981-4ccb-a010-c3704c3ca4b9) WaitQuorum New New ds_transition to Active
62282023-09-22T23:15:07.931ZINFOcrucible: [0] Transition from WaitQuorum to Active
62292023-09-22T23:15:07.931ZINFOcrucible: [1] 270553f6-3717-4381-b86f-5053f8a21eae (4624fd44-b981-4ccb-a010-c3704c3ca4b9) Active New New ds_transition to WaitActive
62302023-09-22T23:15:07.931ZINFOcrucible: [1] Transition from New to WaitActive
62312023-09-22T23:15:07.931ZINFOcrucible: [1] 270553f6-3717-4381-b86f-5053f8a21eae (4624fd44-b981-4ccb-a010-c3704c3ca4b9) Active WaitActive New ds_transition to WaitQuorum
62322023-09-22T23:15:07.931ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
62332023-09-22T23:15:07.931ZINFOcrucible: [1] 270553f6-3717-4381-b86f-5053f8a21eae (4624fd44-b981-4ccb-a010-c3704c3ca4b9) Active WaitQuorum New ds_transition to Active
62342023-09-22T23:15:07.931ZINFOcrucible: [1] Transition from WaitQuorum to Active
62352023-09-22T23:15:07.931ZINFOcrucible: [2] 270553f6-3717-4381-b86f-5053f8a21eae (4624fd44-b981-4ccb-a010-c3704c3ca4b9) Active Active New ds_transition to WaitActive
62362023-09-22T23:15:07.931ZINFOcrucible: [2] Transition from New to WaitActive
62372023-09-22T23:15:07.931ZINFOcrucible: [2] 270553f6-3717-4381-b86f-5053f8a21eae (4624fd44-b981-4ccb-a010-c3704c3ca4b9) Active Active WaitActive ds_transition to WaitQuorum
62382023-09-22T23:15:07.931ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
62392023-09-22T23:15:07.931ZINFOcrucible: [2] 270553f6-3717-4381-b86f-5053f8a21eae (4624fd44-b981-4ccb-a010-c3704c3ca4b9) Active Active WaitQuorum ds_transition to Active
62402023-09-22T23:15:07.931ZINFOcrucible: [2] Transition from WaitQuorum to Active
62412023-09-22T23:15:07.931ZINFOcrucible: [1] 270553f6-3717-4381-b86f-5053f8a21eae (4624fd44-b981-4ccb-a010-c3704c3ca4b9) Active Active Active ds_transition to Faulted
62422023-09-22T23:15:07.931ZINFOcrucible: [1] Transition from Active to Faulted
62432023-09-22T23:15:07.931ZINFOcrucible: [2] 270553f6-3717-4381-b86f-5053f8a21eae (4624fd44-b981-4ccb-a010-c3704c3ca4b9) Active Faulted Active ds_transition to Faulted
62442023-09-22T23:15:07.931ZINFOcrucible: [2] Transition from Active to Faulted
6245 test test::up_test::write_double_skip ... ok
62462023-09-22T23:15:07.932ZINFOcrucible: Crucible stats registered with UUID: 90797728-3dac-480d-97b8-e1acae1f83f6
62472023-09-22T23:15:07.932ZINFOcrucible: Crucible 90797728-3dac-480d-97b8-e1acae1f83f6 has session id: 7aec65a8-f9d8-4f31-bd48-00356705b043
62482023-09-22T23:15:07.932ZINFOcrucible: 90797728-3dac-480d-97b8-e1acae1f83f6 is now active with session: fdf44758-7d74-4ede-a5e5-f6f9ba3175ad
62492023-09-22T23:15:07.932ZINFOcrucible: [0] 90797728-3dac-480d-97b8-e1acae1f83f6 (fdf44758-7d74-4ede-a5e5-f6f9ba3175ad) New New New ds_transition to WaitActive
62502023-09-22T23:15:07.932ZINFOcrucible: [0] Transition from New to WaitActive
62512023-09-22T23:15:07.932ZINFOcrucible: [0] 90797728-3dac-480d-97b8-e1acae1f83f6 (fdf44758-7d74-4ede-a5e5-f6f9ba3175ad) WaitActive New New ds_transition to WaitQuorum
62522023-09-22T23:15:07.932ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
62532023-09-22T23:15:07.932ZINFOcrucible: [0] 90797728-3dac-480d-97b8-e1acae1f83f6 (fdf44758-7d74-4ede-a5e5-f6f9ba3175ad) WaitQuorum New New ds_transition to Active
62542023-09-22T23:15:07.932ZINFOcrucible: [0] Transition from WaitQuorum to Active
62552023-09-22T23:15:07.932ZINFOcrucible: [1] 90797728-3dac-480d-97b8-e1acae1f83f6 (fdf44758-7d74-4ede-a5e5-f6f9ba3175ad) Active New New ds_transition to WaitActive
62562023-09-22T23:15:07.932ZINFOcrucible: [1] Transition from New to WaitActive
62572023-09-22T23:15:07.932ZINFOcrucible: [1] 90797728-3dac-480d-97b8-e1acae1f83f6 (fdf44758-7d74-4ede-a5e5-f6f9ba3175ad) Active WaitActive New ds_transition to WaitQuorum
62582023-09-22T23:15:07.932ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
62592023-09-22T23:15:07.932ZINFOcrucible: [1] 90797728-3dac-480d-97b8-e1acae1f83f6 (fdf44758-7d74-4ede-a5e5-f6f9ba3175ad) Active WaitQuorum New ds_transition to Active
62602023-09-22T23:15:07.932ZINFOcrucible: [1] Transition from WaitQuorum to Active
62612023-09-22T23:15:07.932ZINFOcrucible: [2] 90797728-3dac-480d-97b8-e1acae1f83f6 (fdf44758-7d74-4ede-a5e5-f6f9ba3175ad) Active Active New ds_transition to WaitActive
62622023-09-22T23:15:07.932ZINFOcrucible: [2] Transition from New to WaitActive
62632023-09-22T23:15:07.932ZINFOcrucible: [2] 90797728-3dac-480d-97b8-e1acae1f83f6 (fdf44758-7d74-4ede-a5e5-f6f9ba3175ad) Active Active WaitActive ds_transition to WaitQuorum
62642023-09-22T23:15:07.932ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
62652023-09-22T23:15:07.932ZINFOcrucible: [2] 90797728-3dac-480d-97b8-e1acae1f83f6 (fdf44758-7d74-4ede-a5e5-f6f9ba3175ad) Active Active WaitQuorum ds_transition to Active
62662023-09-22T23:15:07.932ZINFOcrucible: [2] Transition from WaitQuorum to Active
62672023-09-22T23:15:07.932ZINFOcrucible: [2] 90797728-3dac-480d-97b8-e1acae1f83f6 (fdf44758-7d74-4ede-a5e5-f6f9ba3175ad) Active Active Active ds_transition to Faulted
62682023-09-22T23:15:07.932ZINFOcrucible: [2] Transition from Active to Faulted
62692023-09-22T23:15:07.932ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1010, DownstairsIO { ds_id: JobId(1010), guest_id: 19, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, Skipped]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
62702023-09-22T23:15:07.932ZERROcrucible: [1] Reports error GenericError("bad") on job 1010, DownstairsIO { ds_id: JobId(1010), guest_id: 19, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, Skipped]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
62712023-09-22T23:15:07.933ZINFOcrucible: All done
6272 test test::up_test::write_fail_and_skip ... ok
62732023-09-22T23:15:07.933ZINFOcrucible: Crucible stats registered with UUID: 2d3b1a7f-5bd8-4cf2-ba71-8cf44b7668bc
62742023-09-22T23:15:07.933ZINFOcrucible: Crucible 2d3b1a7f-5bd8-4cf2-ba71-8cf44b7668bc has session id: 795dc1f2-2855-49b2-92c1-cefa30100659
62752023-09-22T23:15:07.933ZINFOcrucible: [0] 2d3b1a7f-5bd8-4cf2-ba71-8cf44b7668bc (fae73c4b-29fa-4f0a-b721-b0d0fad067e5) New New New ds_transition to WaitActive
62762023-09-22T23:15:07.933ZINFOcrucible: [0] Transition from New to WaitActive
62772023-09-22T23:15:07.933ZINFOcrucible: [0] 2d3b1a7f-5bd8-4cf2-ba71-8cf44b7668bc (fae73c4b-29fa-4f0a-b721-b0d0fad067e5) WaitActive New New ds_transition to WaitQuorum
62782023-09-22T23:15:07.933ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
62792023-09-22T23:15:07.933ZINFOcrucible: [0] 2d3b1a7f-5bd8-4cf2-ba71-8cf44b7668bc (fae73c4b-29fa-4f0a-b721-b0d0fad067e5) WaitQuorum New New ds_transition to Active
62802023-09-22T23:15:07.933ZINFOcrucible: [0] Transition from WaitQuorum to Active
62812023-09-22T23:15:07.933ZINFOcrucible: [1] 2d3b1a7f-5bd8-4cf2-ba71-8cf44b7668bc (fae73c4b-29fa-4f0a-b721-b0d0fad067e5) Active New New ds_transition to WaitActive
62822023-09-22T23:15:07.933ZINFOcrucible: [1] Transition from New to WaitActive
62832023-09-22T23:15:07.933ZINFOcrucible: [1] 2d3b1a7f-5bd8-4cf2-ba71-8cf44b7668bc (fae73c4b-29fa-4f0a-b721-b0d0fad067e5) Active WaitActive New ds_transition to WaitQuorum
62842023-09-22T23:15:07.933ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
62852023-09-22T23:15:07.933ZINFOcrucible: [1] 2d3b1a7f-5bd8-4cf2-ba71-8cf44b7668bc (fae73c4b-29fa-4f0a-b721-b0d0fad067e5) Active WaitQuorum New ds_transition to Active
62862023-09-22T23:15:07.933ZINFOcrucible: [1] Transition from WaitQuorum to Active
62872023-09-22T23:15:07.933ZINFOcrucible: [2] 2d3b1a7f-5bd8-4cf2-ba71-8cf44b7668bc (fae73c4b-29fa-4f0a-b721-b0d0fad067e5) Active Active New ds_transition to WaitActive
62882023-09-22T23:15:07.933ZINFOcrucible: [2] Transition from New to WaitActive
62892023-09-22T23:15:07.933ZINFOcrucible: [2] 2d3b1a7f-5bd8-4cf2-ba71-8cf44b7668bc (fae73c4b-29fa-4f0a-b721-b0d0fad067e5) Active Active WaitActive ds_transition to WaitQuorum
62902023-09-22T23:15:07.933ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
62912023-09-22T23:15:07.933ZINFOcrucible: [2] 2d3b1a7f-5bd8-4cf2-ba71-8cf44b7668bc (fae73c4b-29fa-4f0a-b721-b0d0fad067e5) Active Active WaitQuorum ds_transition to Active
62922023-09-22T23:15:07.934ZINFOcrucible: [2] Transition from WaitQuorum to Active
62932023-09-22T23:15:07.934ZINFOcrucible: 2d3b1a7f-5bd8-4cf2-ba71-8cf44b7668bc is now active with session: fae73c4b-29fa-4f0a-b721-b0d0fad067e5
62942023-09-22T23:15:07.934ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, Done, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
62952023-09-22T23:15:07.934ZERROcrucible: [2] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, Done, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
62962023-09-22T23:15:07.934ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
62972023-09-22T23:15:07.934ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
62982023-09-22T23:15:07.934ZINFOcrucible: [2] 2d3b1a7f-5bd8-4cf2-ba71-8cf44b7668bc (fae73c4b-29fa-4f0a-b721-b0d0fad067e5) Active Active Active ds_transition to Faulted
62992023-09-22T23:15:07.934ZINFOcrucible: [2] Transition from Active to Faulted
6300 test test::up_test::write_fail_past_present_future ... ok
63012023-09-22T23:15:07.934ZINFOcrucible: Crucible stats registered with UUID: 4f4545ae-5a18-4558-9d4d-b6b789aed250
63022023-09-22T23:15:07.934ZINFOcrucible: Crucible 4f4545ae-5a18-4558-9d4d-b6b789aed250 has session id: 4800db57-e1a6-4df9-8626-09fadf459163
63032023-09-22T23:15:07.934ZINFOcrucible: [0] 4f4545ae-5a18-4558-9d4d-b6b789aed250 (609a3918-75a6-4a97-9171-83db5dc7e1a3) New New New ds_transition to WaitActive
63042023-09-22T23:15:07.934ZINFOcrucible: [0] Transition from New to WaitActive
63052023-09-22T23:15:07.934ZINFOcrucible: [0] 4f4545ae-5a18-4558-9d4d-b6b789aed250 (609a3918-75a6-4a97-9171-83db5dc7e1a3) WaitActive New New ds_transition to WaitQuorum
63062023-09-22T23:15:07.934ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
63072023-09-22T23:15:07.934ZINFOcrucible: [0] 4f4545ae-5a18-4558-9d4d-b6b789aed250 (609a3918-75a6-4a97-9171-83db5dc7e1a3) WaitQuorum New New ds_transition to Active
63082023-09-22T23:15:07.934ZINFOcrucible: [0] Transition from WaitQuorum to Active
63092023-09-22T23:15:07.934ZINFOcrucible: [1] 4f4545ae-5a18-4558-9d4d-b6b789aed250 (609a3918-75a6-4a97-9171-83db5dc7e1a3) Active New New ds_transition to WaitActive
63102023-09-22T23:15:07.934ZINFOcrucible: [1] Transition from New to WaitActive
63112023-09-22T23:15:07.934ZINFOcrucible: [1] 4f4545ae-5a18-4558-9d4d-b6b789aed250 (609a3918-75a6-4a97-9171-83db5dc7e1a3) Active WaitActive New ds_transition to WaitQuorum
63122023-09-22T23:15:07.934ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
63132023-09-22T23:15:07.935ZINFOcrucible: [1] 4f4545ae-5a18-4558-9d4d-b6b789aed250 (609a3918-75a6-4a97-9171-83db5dc7e1a3) Active WaitQuorum New ds_transition to Active
63142023-09-22T23:15:07.935ZINFOcrucible: [1] Transition from WaitQuorum to Active
63152023-09-22T23:15:07.935ZINFOcrucible: [2] 4f4545ae-5a18-4558-9d4d-b6b789aed250 (609a3918-75a6-4a97-9171-83db5dc7e1a3) Active Active New ds_transition to WaitActive
63162023-09-22T23:15:07.935ZINFOcrucible: [2] Transition from New to WaitActive
63172023-09-22T23:15:07.935ZINFOcrucible: [2] 4f4545ae-5a18-4558-9d4d-b6b789aed250 (609a3918-75a6-4a97-9171-83db5dc7e1a3) Active Active WaitActive ds_transition to WaitQuorum
63182023-09-22T23:15:07.935ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
63192023-09-22T23:15:07.935ZINFOcrucible: [2] 4f4545ae-5a18-4558-9d4d-b6b789aed250 (609a3918-75a6-4a97-9171-83db5dc7e1a3) Active Active WaitQuorum ds_transition to Active
63202023-09-22T23:15:07.935ZINFOcrucible: [2] Transition from WaitQuorum to Active
63212023-09-22T23:15:07.935ZINFOcrucible: 4f4545ae-5a18-4558-9d4d-b6b789aed250 is now active with session: 609a3918-75a6-4a97-9171-83db5dc7e1a3
63222023-09-22T23:15:07.935ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
63232023-09-22T23:15:07.935ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
63242023-09-22T23:15:07.935ZINFOcrucible: [1] client skip 2 in process jobs because fault = downstairs
63252023-09-22T23:15:07.935ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
63262023-09-22T23:15:07.935ZINFOcrucible: [1] 4f4545ae-5a18-4558-9d4d-b6b789aed250 (609a3918-75a6-4a97-9171-83db5dc7e1a3) Active Active Active ds_transition to Faulted
63272023-09-22T23:15:07.935ZINFOcrucible: [1] Transition from Active to Faulted
6328 test test::up_test::write_fail_skips_inprogress_jobs ... ok
63292023-09-22T23:15:07.935ZINFOcrucible: Crucible stats registered with UUID: f0fb8083-4305-44d5-b813-348c2229d463
63302023-09-22T23:15:07.935ZINFOcrucible: Crucible f0fb8083-4305-44d5-b813-348c2229d463 has session id: 930ea42e-4697-4f96-ba50-ad58e1278fe3
63312023-09-22T23:15:07.935ZINFOcrucible: [0] f0fb8083-4305-44d5-b813-348c2229d463 (0dbd1576-efcc-4b21-ba6d-cae1dcf23f6d) New New New ds_transition to WaitActive
63322023-09-22T23:15:07.935ZINFOcrucible: [0] Transition from New to WaitActive
63332023-09-22T23:15:07.935ZINFOcrucible: [0] f0fb8083-4305-44d5-b813-348c2229d463 (0dbd1576-efcc-4b21-ba6d-cae1dcf23f6d) WaitActive New New ds_transition to WaitQuorum
63342023-09-22T23:15:07.936ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
63352023-09-22T23:15:07.936ZINFOcrucible: [0] f0fb8083-4305-44d5-b813-348c2229d463 (0dbd1576-efcc-4b21-ba6d-cae1dcf23f6d) WaitQuorum New New ds_transition to Active
63362023-09-22T23:15:07.936ZINFOcrucible: [0] Transition from WaitQuorum to Active
63372023-09-22T23:15:07.936ZINFOcrucible: [1] f0fb8083-4305-44d5-b813-348c2229d463 (0dbd1576-efcc-4b21-ba6d-cae1dcf23f6d) Active New New ds_transition to WaitActive
63382023-09-22T23:15:07.936ZINFOcrucible: [1] Transition from New to WaitActive
63392023-09-22T23:15:07.936ZINFOcrucible: [1] f0fb8083-4305-44d5-b813-348c2229d463 (0dbd1576-efcc-4b21-ba6d-cae1dcf23f6d) Active WaitActive New ds_transition to WaitQuorum
63402023-09-22T23:15:07.936ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
63412023-09-22T23:15:07.936ZINFOcrucible: [1] f0fb8083-4305-44d5-b813-348c2229d463 (0dbd1576-efcc-4b21-ba6d-cae1dcf23f6d) Active WaitQuorum New ds_transition to Active
63422023-09-22T23:15:07.936ZINFOcrucible: [1] Transition from WaitQuorum to Active
63432023-09-22T23:15:07.936ZINFOcrucible: [2] f0fb8083-4305-44d5-b813-348c2229d463 (0dbd1576-efcc-4b21-ba6d-cae1dcf23f6d) Active Active New ds_transition to WaitActive
63442023-09-22T23:15:07.936ZINFOcrucible: [2] Transition from New to WaitActive
63452023-09-22T23:15:07.936ZINFOcrucible: [2] f0fb8083-4305-44d5-b813-348c2229d463 (0dbd1576-efcc-4b21-ba6d-cae1dcf23f6d) Active Active WaitActive ds_transition to WaitQuorum
63462023-09-22T23:15:07.936ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
63472023-09-22T23:15:07.936ZINFOcrucible: [2] f0fb8083-4305-44d5-b813-348c2229d463 (0dbd1576-efcc-4b21-ba6d-cae1dcf23f6d) Active Active WaitQuorum ds_transition to Active
63482023-09-22T23:15:07.936ZINFOcrucible: [2] Transition from WaitQuorum to Active
63492023-09-22T23:15:07.936ZINFOcrucible: f0fb8083-4305-44d5-b813-348c2229d463 is now active with session: 0dbd1576-efcc-4b21-ba6d-cae1dcf23f6d
63502023-09-22T23:15:07.936ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, Done, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
63512023-09-22T23:15:07.936ZERROcrucible: [2] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, Done, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
63522023-09-22T23:15:07.936ZINFOcrucible: [2] client skip 3 in process jobs because fault = downstairs
63532023-09-22T23:15:07.936ZINFOcrucible: [2] changed 0 jobs to fault skipped = downstairs
63542023-09-22T23:15:07.936ZINFOcrucible: [2] f0fb8083-4305-44d5-b813-348c2229d463 (0dbd1576-efcc-4b21-ba6d-cae1dcf23f6d) Active Active Active ds_transition to Faulted
63552023-09-22T23:15:07.936ZINFOcrucible: [2] Transition from Active to Faulted
6356 test test::up_test::write_fail_skips_many_jobs ... ok
63572023-09-22T23:15:07.937ZINFOcrucible: Crucible stats registered with UUID: a1a73118-3bfb-476f-959e-036eda3741ab
63582023-09-22T23:15:07.937ZINFOcrucible: Crucible a1a73118-3bfb-476f-959e-036eda3741ab has session id: 34ff5d46-1992-4faa-88b4-15e60ffb743f
63592023-09-22T23:15:07.937ZINFOcrucible: [0] a1a73118-3bfb-476f-959e-036eda3741ab (81b560d5-2e28-48a8-87e9-1e1f45fdd5d8) New New New ds_transition to WaitActive
63602023-09-22T23:15:07.937ZINFOcrucible: [0] Transition from New to WaitActive
63612023-09-22T23:15:07.937ZINFOcrucible: [0] a1a73118-3bfb-476f-959e-036eda3741ab (81b560d5-2e28-48a8-87e9-1e1f45fdd5d8) WaitActive New New ds_transition to WaitQuorum
63622023-09-22T23:15:07.937ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
63632023-09-22T23:15:07.937ZINFOcrucible: [0] a1a73118-3bfb-476f-959e-036eda3741ab (81b560d5-2e28-48a8-87e9-1e1f45fdd5d8) WaitQuorum New New ds_transition to Active
63642023-09-22T23:15:07.937ZINFOcrucible: [0] Transition from WaitQuorum to Active
63652023-09-22T23:15:07.937ZINFOcrucible: [1] a1a73118-3bfb-476f-959e-036eda3741ab (81b560d5-2e28-48a8-87e9-1e1f45fdd5d8) Active New New ds_transition to WaitActive
63662023-09-22T23:15:07.937ZINFOcrucible: [1] Transition from New to WaitActive
63672023-09-22T23:15:07.937ZINFOcrucible: [1] a1a73118-3bfb-476f-959e-036eda3741ab (81b560d5-2e28-48a8-87e9-1e1f45fdd5d8) Active WaitActive New ds_transition to WaitQuorum
63682023-09-22T23:15:07.937ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
63692023-09-22T23:15:07.937ZINFOcrucible: [1] a1a73118-3bfb-476f-959e-036eda3741ab (81b560d5-2e28-48a8-87e9-1e1f45fdd5d8) Active WaitQuorum New ds_transition to Active
63702023-09-22T23:15:07.937ZINFOcrucible: [1] Transition from WaitQuorum to Active
63712023-09-22T23:15:07.937ZINFOcrucible: [2] a1a73118-3bfb-476f-959e-036eda3741ab (81b560d5-2e28-48a8-87e9-1e1f45fdd5d8) Active Active New ds_transition to WaitActive
63722023-09-22T23:15:07.937ZINFOcrucible: [2] Transition from New to WaitActive
63732023-09-22T23:15:07.937ZINFOcrucible: [2] a1a73118-3bfb-476f-959e-036eda3741ab (81b560d5-2e28-48a8-87e9-1e1f45fdd5d8) Active Active WaitActive ds_transition to WaitQuorum
63742023-09-22T23:15:07.937ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
63752023-09-22T23:15:07.937ZINFOcrucible: [2] a1a73118-3bfb-476f-959e-036eda3741ab (81b560d5-2e28-48a8-87e9-1e1f45fdd5d8) Active Active WaitQuorum ds_transition to Active
63762023-09-22T23:15:07.937ZINFOcrucible: [2] Transition from WaitQuorum to Active
63772023-09-22T23:15:07.937ZINFOcrucible: a1a73118-3bfb-476f-959e-036eda3741ab is now active with session: 81b560d5-2e28-48a8-87e9-1e1f45fdd5d8
63782023-09-22T23:15:07.937ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
63792023-09-22T23:15:07.937ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
63802023-09-22T23:15:07.937ZINFOcrucible: [1] client skip 2 in process jobs because fault = downstairs
63812023-09-22T23:15:07.937ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
63822023-09-22T23:15:07.937ZINFOcrucible: [1] a1a73118-3bfb-476f-959e-036eda3741ab (81b560d5-2e28-48a8-87e9-1e1f45fdd5d8) Active Active Active ds_transition to Faulted
63832023-09-22T23:15:07.937ZINFOcrucible: [1] Transition from Active to Faulted
6384 test test::up_test::write_fail_skips_new_jobs ... ok
63852023-09-22T23:15:07.938ZINFOcrucible: Crucible stats registered with UUID: 5fea0624-430f-4e90-9e64-ea7c9e24dbc8
63862023-09-22T23:15:07.938ZINFOcrucible: Crucible 5fea0624-430f-4e90-9e64-ea7c9e24dbc8 has session id: ad865632-2d7e-4a30-9729-48987a0e96ac
63872023-09-22T23:15:07.938ZINFOcrucible: 5fea0624-430f-4e90-9e64-ea7c9e24dbc8 is now active with session: 41e514fe-864a-46fe-9eac-6b73d710d856
63882023-09-22T23:15:07.938ZINFOcrucible: [0] 5fea0624-430f-4e90-9e64-ea7c9e24dbc8 (41e514fe-864a-46fe-9eac-6b73d710d856) New New New ds_transition to WaitActive
63892023-09-22T23:15:07.938ZINFOcrucible: [0] Transition from New to WaitActive
63902023-09-22T23:15:07.938ZINFOcrucible: [0] 5fea0624-430f-4e90-9e64-ea7c9e24dbc8 (41e514fe-864a-46fe-9eac-6b73d710d856) WaitActive New New ds_transition to WaitQuorum
63912023-09-22T23:15:07.938ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
63922023-09-22T23:15:07.938ZINFOcrucible: [0] 5fea0624-430f-4e90-9e64-ea7c9e24dbc8 (41e514fe-864a-46fe-9eac-6b73d710d856) WaitQuorum New New ds_transition to Active
63932023-09-22T23:15:07.938ZINFOcrucible: [0] Transition from WaitQuorum to Active
63942023-09-22T23:15:07.938ZINFOcrucible: [1] 5fea0624-430f-4e90-9e64-ea7c9e24dbc8 (41e514fe-864a-46fe-9eac-6b73d710d856) Active New New ds_transition to WaitActive
63952023-09-22T23:15:07.938ZINFOcrucible: [1] Transition from New to WaitActive
63962023-09-22T23:15:07.938ZINFOcrucible: [1] 5fea0624-430f-4e90-9e64-ea7c9e24dbc8 (41e514fe-864a-46fe-9eac-6b73d710d856) Active WaitActive New ds_transition to WaitQuorum
63972023-09-22T23:15:07.938ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
63982023-09-22T23:15:07.938ZINFOcrucible: [1] 5fea0624-430f-4e90-9e64-ea7c9e24dbc8 (41e514fe-864a-46fe-9eac-6b73d710d856) Active WaitQuorum New ds_transition to Active
63992023-09-22T23:15:07.938ZINFOcrucible: [1] Transition from WaitQuorum to Active
64002023-09-22T23:15:07.938ZINFOcrucible: [2] 5fea0624-430f-4e90-9e64-ea7c9e24dbc8 (41e514fe-864a-46fe-9eac-6b73d710d856) Active Active New ds_transition to WaitActive
64012023-09-22T23:15:07.938ZINFOcrucible: [2] Transition from New to WaitActive
64022023-09-22T23:15:07.938ZINFOcrucible: [2] 5fea0624-430f-4e90-9e64-ea7c9e24dbc8 (41e514fe-864a-46fe-9eac-6b73d710d856) Active Active WaitActive ds_transition to WaitQuorum
64032023-09-22T23:15:07.938ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
64042023-09-22T23:15:07.938ZINFOcrucible: [2] 5fea0624-430f-4e90-9e64-ea7c9e24dbc8 (41e514fe-864a-46fe-9eac-6b73d710d856) Active Active WaitQuorum ds_transition to Active
64052023-09-22T23:15:07.938ZINFOcrucible: [2] Transition from WaitQuorum to Active
64062023-09-22T23:15:07.938ZINFOcrucible: [1] 5fea0624-430f-4e90-9e64-ea7c9e24dbc8 (41e514fe-864a-46fe-9eac-6b73d710d856) Active Active Active ds_transition to Faulted
64072023-09-22T23:15:07.938ZINFOcrucible: [1] Transition from Active to Faulted
6408 test test::up_test::write_single_skip ... ok
64092023-09-22T23:15:07.939ZINFOcrucible: Crucible stats registered with UUID: 830be375-01ed-4185-b9f4-a587fecd7f4c
64102023-09-22T23:15:07.939ZINFOcrucible: Crucible 830be375-01ed-4185-b9f4-a587fecd7f4c has session id: a1d16d88-a9d9-41b3-aebb-43ec2fab8857
64112023-09-22T23:15:07.939ZINFOcrucible: 830be375-01ed-4185-b9f4-a587fecd7f4c is now active with session: 9761517b-640f-4e3d-a10b-ffee13c2a335
64122023-09-22T23:15:07.939ZINFOcrucible: [0] 830be375-01ed-4185-b9f4-a587fecd7f4c (9761517b-640f-4e3d-a10b-ffee13c2a335) New New New ds_transition to WaitActive
64132023-09-22T23:15:07.939ZINFOcrucible: [0] Transition from New to WaitActive
64142023-09-22T23:15:07.939ZINFOcrucible: [0] 830be375-01ed-4185-b9f4-a587fecd7f4c (9761517b-640f-4e3d-a10b-ffee13c2a335) WaitActive New New ds_transition to WaitQuorum
64152023-09-22T23:15:07.939ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
64162023-09-22T23:15:07.939ZINFOcrucible: [0] 830be375-01ed-4185-b9f4-a587fecd7f4c (9761517b-640f-4e3d-a10b-ffee13c2a335) WaitQuorum New New ds_transition to Active
64172023-09-22T23:15:07.939ZINFOcrucible: [0] Transition from WaitQuorum to Active
64182023-09-22T23:15:07.939ZINFOcrucible: [1] 830be375-01ed-4185-b9f4-a587fecd7f4c (9761517b-640f-4e3d-a10b-ffee13c2a335) Active New New ds_transition to WaitActive
64192023-09-22T23:15:07.939ZINFOcrucible: [1] Transition from New to WaitActive
64202023-09-22T23:15:07.939ZINFOcrucible: [1] 830be375-01ed-4185-b9f4-a587fecd7f4c (9761517b-640f-4e3d-a10b-ffee13c2a335) Active WaitActive New ds_transition to WaitQuorum
64212023-09-22T23:15:07.939ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
64222023-09-22T23:15:07.939ZINFOcrucible: [1] 830be375-01ed-4185-b9f4-a587fecd7f4c (9761517b-640f-4e3d-a10b-ffee13c2a335) Active WaitQuorum New ds_transition to Active
64232023-09-22T23:15:07.939ZINFOcrucible: [1] Transition from WaitQuorum to Active
64242023-09-22T23:15:07.939ZINFOcrucible: [2] 830be375-01ed-4185-b9f4-a587fecd7f4c (9761517b-640f-4e3d-a10b-ffee13c2a335) Active Active New ds_transition to WaitActive
64252023-09-22T23:15:07.939ZINFOcrucible: [2] Transition from New to WaitActive
64262023-09-22T23:15:07.939ZINFOcrucible: [2] 830be375-01ed-4185-b9f4-a587fecd7f4c (9761517b-640f-4e3d-a10b-ffee13c2a335) Active Active WaitActive ds_transition to WaitQuorum
64272023-09-22T23:15:07.939ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
64282023-09-22T23:15:07.939ZINFOcrucible: [2] 830be375-01ed-4185-b9f4-a587fecd7f4c (9761517b-640f-4e3d-a10b-ffee13c2a335) Active Active WaitQuorum ds_transition to Active
64292023-09-22T23:15:07.939ZINFOcrucible: [2] Transition from WaitQuorum to Active
64302023-09-22T23:15:07.939ZINFOcrucible: [1] 830be375-01ed-4185-b9f4-a587fecd7f4c (9761517b-640f-4e3d-a10b-ffee13c2a335) Active Active Active ds_transition to Faulted
64312023-09-22T23:15:07.939ZINFOcrucible: [1] Transition from Active to Faulted
64322023-09-22T23:15:07.939ZINFOcrucible: [2] 830be375-01ed-4185-b9f4-a587fecd7f4c (9761517b-640f-4e3d-a10b-ffee13c2a335) Active Faulted Active ds_transition to Faulted
64332023-09-22T23:15:07.939ZINFOcrucible: [2] Transition from Active to Faulted
6434 test test::up_test::write_unwritten_double_skip ... ok
64352023-09-22T23:15:07.940ZINFOcrucible: Crucible stats registered with UUID: 9c388cd6-fae1-4b08-8053-810eeb837889
64362023-09-22T23:15:07.940ZINFOcrucible: Crucible 9c388cd6-fae1-4b08-8053-810eeb837889 has session id: dfbcf242-6116-407f-b4f0-6ad620d22767
64372023-09-22T23:15:07.940ZINFOcrucible: 9c388cd6-fae1-4b08-8053-810eeb837889 is now active with session: b665010c-5239-4d80-9110-d7705153361f
64382023-09-22T23:15:07.940ZINFOcrucible: [0] 9c388cd6-fae1-4b08-8053-810eeb837889 (b665010c-5239-4d80-9110-d7705153361f) New New New ds_transition to WaitActive
64392023-09-22T23:15:07.940ZINFOcrucible: [0] Transition from New to WaitActive
64402023-09-22T23:15:07.940ZINFOcrucible: [0] 9c388cd6-fae1-4b08-8053-810eeb837889 (b665010c-5239-4d80-9110-d7705153361f) WaitActive New New ds_transition to WaitQuorum
64412023-09-22T23:15:07.940ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
64422023-09-22T23:15:07.940ZINFOcrucible: [0] 9c388cd6-fae1-4b08-8053-810eeb837889 (b665010c-5239-4d80-9110-d7705153361f) WaitQuorum New New ds_transition to Active
64432023-09-22T23:15:07.940ZINFOcrucible: [0] Transition from WaitQuorum to Active
64442023-09-22T23:15:07.940ZINFOcrucible: [1] 9c388cd6-fae1-4b08-8053-810eeb837889 (b665010c-5239-4d80-9110-d7705153361f) Active New New ds_transition to WaitActive
64452023-09-22T23:15:07.940ZINFOcrucible: [1] Transition from New to WaitActive
64462023-09-22T23:15:07.940ZINFOcrucible: [1] 9c388cd6-fae1-4b08-8053-810eeb837889 (b665010c-5239-4d80-9110-d7705153361f) Active WaitActive New ds_transition to WaitQuorum
64472023-09-22T23:15:07.940ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
64482023-09-22T23:15:07.940ZINFOcrucible: [1] 9c388cd6-fae1-4b08-8053-810eeb837889 (b665010c-5239-4d80-9110-d7705153361f) Active WaitQuorum New ds_transition to Active
64492023-09-22T23:15:07.940ZINFOcrucible: [1] Transition from WaitQuorum to Active
64502023-09-22T23:15:07.940ZINFOcrucible: [2] 9c388cd6-fae1-4b08-8053-810eeb837889 (b665010c-5239-4d80-9110-d7705153361f) Active Active New ds_transition to WaitActive
64512023-09-22T23:15:07.940ZINFOcrucible: [2] Transition from New to WaitActive
64522023-09-22T23:15:07.940ZINFOcrucible: [2] 9c388cd6-fae1-4b08-8053-810eeb837889 (b665010c-5239-4d80-9110-d7705153361f) Active Active WaitActive ds_transition to WaitQuorum
64532023-09-22T23:15:07.940ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
64542023-09-22T23:15:07.940ZINFOcrucible: [2] 9c388cd6-fae1-4b08-8053-810eeb837889 (b665010c-5239-4d80-9110-d7705153361f) Active Active WaitQuorum ds_transition to Active
64552023-09-22T23:15:07.940ZINFOcrucible: [2] Transition from WaitQuorum to Active
64562023-09-22T23:15:07.940ZINFOcrucible: [2] 9c388cd6-fae1-4b08-8053-810eeb837889 (b665010c-5239-4d80-9110-d7705153361f) Active Active Active ds_transition to Faulted
64572023-09-22T23:15:07.940ZINFOcrucible: [2] Transition from Active to Faulted
64582023-09-22T23:15:07.940ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1010, DownstairsIO { ds_id: JobId(1010), guest_id: 19, work: WriteUnwritten { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, Skipped]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
64592023-09-22T23:15:07.940ZERROcrucible: [1] Reports error GenericError("bad") on job 1010, DownstairsIO { ds_id: JobId(1010), guest_id: 19, work: WriteUnwritten { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, Skipped]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
64602023-09-22T23:15:07.940ZINFOcrucible: All done
6461 test test::up_test::write_unwritten_fail_and_skip ... ok
64622023-09-22T23:15:07.941ZINFOcrucible: Crucible stats registered with UUID: 20675499-ba9b-4cc2-afb6-7b2326fde76c
64632023-09-22T23:15:07.941ZINFOcrucible: Crucible 20675499-ba9b-4cc2-afb6-7b2326fde76c has session id: f2aa5184-d49b-48c4-8cb3-d685de3e6c52
64642023-09-22T23:15:07.941ZINFOcrucible: 20675499-ba9b-4cc2-afb6-7b2326fde76c is now active with session: acddad9f-857e-4f36-a46d-d669897fa456
64652023-09-22T23:15:07.941ZINFOcrucible: [0] 20675499-ba9b-4cc2-afb6-7b2326fde76c (acddad9f-857e-4f36-a46d-d669897fa456) New New New ds_transition to WaitActive
64662023-09-22T23:15:07.941ZINFOcrucible: [0] Transition from New to WaitActive
64672023-09-22T23:15:07.941ZINFOcrucible: [0] 20675499-ba9b-4cc2-afb6-7b2326fde76c (acddad9f-857e-4f36-a46d-d669897fa456) WaitActive New New ds_transition to WaitQuorum
64682023-09-22T23:15:07.941ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
64692023-09-22T23:15:07.941ZINFOcrucible: [0] 20675499-ba9b-4cc2-afb6-7b2326fde76c (acddad9f-857e-4f36-a46d-d669897fa456) WaitQuorum New New ds_transition to Active
64702023-09-22T23:15:07.941ZINFOcrucible: [0] Transition from WaitQuorum to Active
64712023-09-22T23:15:07.941ZINFOcrucible: [1] 20675499-ba9b-4cc2-afb6-7b2326fde76c (acddad9f-857e-4f36-a46d-d669897fa456) Active New New ds_transition to WaitActive
64722023-09-22T23:15:07.941ZINFOcrucible: [1] Transition from New to WaitActive
64732023-09-22T23:15:07.941ZINFOcrucible: [1] 20675499-ba9b-4cc2-afb6-7b2326fde76c (acddad9f-857e-4f36-a46d-d669897fa456) Active WaitActive New ds_transition to WaitQuorum
64742023-09-22T23:15:07.941ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
64752023-09-22T23:15:07.941ZINFOcrucible: [1] 20675499-ba9b-4cc2-afb6-7b2326fde76c (acddad9f-857e-4f36-a46d-d669897fa456) Active WaitQuorum New ds_transition to Active
64762023-09-22T23:15:07.941ZINFOcrucible: [1] Transition from WaitQuorum to Active
64772023-09-22T23:15:07.941ZINFOcrucible: [2] 20675499-ba9b-4cc2-afb6-7b2326fde76c (acddad9f-857e-4f36-a46d-d669897fa456) Active Active New ds_transition to WaitActive
64782023-09-22T23:15:07.941ZINFOcrucible: [2] Transition from New to WaitActive
64792023-09-22T23:15:07.941ZINFOcrucible: [2] 20675499-ba9b-4cc2-afb6-7b2326fde76c (acddad9f-857e-4f36-a46d-d669897fa456) Active Active WaitActive ds_transition to WaitQuorum
64802023-09-22T23:15:07.941ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
64812023-09-22T23:15:07.941ZINFOcrucible: [2] 20675499-ba9b-4cc2-afb6-7b2326fde76c (acddad9f-857e-4f36-a46d-d669897fa456) Active Active WaitQuorum ds_transition to Active
64822023-09-22T23:15:07.941ZINFOcrucible: [2] Transition from WaitQuorum to Active
64832023-09-22T23:15:07.941ZINFOcrucible: [1] 20675499-ba9b-4cc2-afb6-7b2326fde76c (acddad9f-857e-4f36-a46d-d669897fa456) Active Active Active ds_transition to Faulted
64842023-09-22T23:15:07.941ZINFOcrucible: [1] Transition from Active to Faulted
6485 test test::up_test::write_unwritten_single_skip ... ok
6486 test test_buffer_len ... ok
6487 test test_buffer_len_after_clone ... ok
6488 test test_buffer_len_index_overflow - should panic ... ok
6489 test test_buffer_len_over_block_size ... ok
6490 test test_return_iops ... ok
6491 test volume::test::construct_file_block_io ... ok
6492 test volume::test::test_affected_subvolumes ... ok
6493 test volume::test::test_correct_blocks_returned_multiple_subvolumes_1 ... ok
6494 test volume::test::test_correct_blocks_returned_multiple_subvolumes_2 ... ok
6495 test volume::test::test_correct_blocks_returned_multiple_subvolumes_3 ... ok
6496 test volume::test::test_correct_blocks_returned_one_subvolume ... ok
6497 test volume::test::test_correct_blocks_returned_three_subvolumes ... ok
6498 test volume::test::test_drop_then_recreate_test ... ok
6499 test volume::test::test_in_memory_block_io ... ok
6500 test volume::test::test_no_read_only_parent_for_lba_range ... ok
6501 test volume::test::test_out_of_bounds ... ok
65022023-09-22T23:15:08.580ZINFOcrucible: responded to ping downstairs = 1
65032023-09-22T23:15:08.582ZINFOcrucible: responded to ping downstairs = 1
65042023-09-22T23:15:08.594ZINFOcrucible: Waiting for 4 jobs (currently 3)
65052023-09-22T23:15:08.594ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
65062023-09-22T23:15:08.721ZINFOcrucible: Finally, move the ReOpen job forward
65072023-09-22T23:15:08.721ZINFOcrucible: Now ACK the reopen job
65082023-09-22T23:15:08.721ZWARNcrucible: RE:0 Bailing with error
65092023-09-22T23:15:08.721ZINFOcrucible: err:1 or:0
65102023-09-22T23:15:08.721ZINFOcrucible: Crucible stats registered with UUID: 8918c699-f710-46f0-9eef-70ed6bb4c99f
65112023-09-22T23:15:08.722ZINFOcrucible: Crucible 8918c699-f710-46f0-9eef-70ed6bb4c99f has session id: d8fd5279-1bc0-4827-b800-f11efbd0b44a
65122023-09-22T23:15:08.722ZINFOcrucible: [0] 8918c699-f710-46f0-9eef-70ed6bb4c99f (a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b) New New New ds_transition to WaitActive
65132023-09-22T23:15:08.722ZINFOcrucible: [0] Transition from New to WaitActive
65142023-09-22T23:15:08.722ZINFOcrucible: [0] 8918c699-f710-46f0-9eef-70ed6bb4c99f (a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b) WaitActive New New ds_transition to WaitQuorum
65152023-09-22T23:15:08.722ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
65162023-09-22T23:15:08.722ZINFOcrucible: [0] 8918c699-f710-46f0-9eef-70ed6bb4c99f (a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b) WaitQuorum New New ds_transition to Active
65172023-09-22T23:15:08.722ZINFOcrucible: [0] Transition from WaitQuorum to Active
65182023-09-22T23:15:08.722ZINFOcrucible: [1] 8918c699-f710-46f0-9eef-70ed6bb4c99f (a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b) Active New New ds_transition to WaitActive
65192023-09-22T23:15:08.722ZINFOcrucible: [1] Transition from New to WaitActive
65202023-09-22T23:15:08.722ZINFOcrucible: [1] 8918c699-f710-46f0-9eef-70ed6bb4c99f (a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b) Active WaitActive New ds_transition to WaitQuorum
65212023-09-22T23:15:08.722ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
65222023-09-22T23:15:08.722ZINFOcrucible: [1] 8918c699-f710-46f0-9eef-70ed6bb4c99f (a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b) Active WaitQuorum New ds_transition to Active
65232023-09-22T23:15:08.722ZINFOcrucible: [1] Transition from WaitQuorum to Active
65242023-09-22T23:15:08.722ZINFOcrucible: [2] 8918c699-f710-46f0-9eef-70ed6bb4c99f (a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b) Active Active New ds_transition to WaitActive
65252023-09-22T23:15:08.722ZINFOcrucible: [2] Transition from New to WaitActive
65262023-09-22T23:15:08.722ZINFOcrucible: [2] 8918c699-f710-46f0-9eef-70ed6bb4c99f (a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b) Active Active WaitActive ds_transition to WaitQuorum
65272023-09-22T23:15:08.722ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
65282023-09-22T23:15:08.722ZINFOcrucible: [2] 8918c699-f710-46f0-9eef-70ed6bb4c99f (a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b) Active Active WaitQuorum ds_transition to Active
65292023-09-22T23:15:08.722ZINFOcrucible: [2] Transition from WaitQuorum to Active
65302023-09-22T23:15:08.722ZINFOcrucible: 8918c699-f710-46f0-9eef-70ed6bb4c99f is now active with session: a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b
65312023-09-22T23:15:08.722ZINFOcrucible: [0] 8918c699-f710-46f0-9eef-70ed6bb4c99f (a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b) Active Active Active ds_transition to Faulted
65322023-09-22T23:15:08.722ZINFOcrucible: [0] Transition from Active to Faulted
65332023-09-22T23:15:08.722ZINFOcrucible: [0] 8918c699-f710-46f0-9eef-70ed6bb4c99f (a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b) Faulted Active Active ds_transition to LiveRepairReady
65342023-09-22T23:15:08.722ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
65352023-09-22T23:15:08.722ZINFOcrucible: [0] 8918c699-f710-46f0-9eef-70ed6bb4c99f (a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b) LiveRepairReady Active Active ds_transition to LiveRepair
65362023-09-22T23:15:08.723ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
65372023-09-22T23:15:08.723ZINFOcrucible: Waiting for Close + ReOpen jobs
65382023-09-22T23:15:08.723ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
65392023-09-22T23:15:08.723ZINFOcrucible: RE:0 close id:1000 queued, notify DS
65402023-09-22T23:15:08.723ZINFOcrucible: RE:0 Wait for result from close command 1000:1
65412023-09-22T23:15:08.725ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(2), repair_downstairs: [ClientId(1)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
65422023-09-22T23:15:08.725ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(2), repair_downstairs: [ClientId(1)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
65432023-09-22T23:15:08.725ZINFOcrucible: [0] client skip 2 in process jobs because fault = downstairs
65442023-09-22T23:15:08.725ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
65452023-09-22T23:15:08.725ZINFOcrucible: [0] 98c53138-edfb-4bb6-9deb-ec8ddf307957 (9c9d5503-b2e8-4f16-b53b-34eee863d6b8) Active LiveRepair Active ds_transition to Faulted
65462023-09-22T23:15:08.725ZINFOcrucible: [0] Transition from Active to Faulted
65472023-09-22T23:15:08.725ZINFOcrucible: Now ACK the close job
65482023-09-22T23:15:08.725ZINFOcrucible: Waiting for 3 jobs (currently 2)
65492023-09-22T23:15:08.725ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
65502023-09-22T23:15:08.725ZINFOcrucible: [1] client skip 2 in process jobs because fault = downstairs
65512023-09-22T23:15:08.726ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
65522023-09-22T23:15:08.726ZINFOcrucible: [1] 98c53138-edfb-4bb6-9deb-ec8ddf307957 (9c9d5503-b2e8-4f16-b53b-34eee863d6b8) Faulted LiveRepair Active ds_transition to Faulted
65532023-09-22T23:15:08.726ZINFOcrucible: [1] Transition from LiveRepair to Faulted
65542023-09-22T23:15:08.726ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
65552023-09-22T23:15:08.728ZINFOcrucible: Waiting for 3 jobs (currently 2)
65562023-09-22T23:15:08.728ZINFOcrucible: No repair needed for extent 0 = downstairs
65572023-09-22T23:15:08.728ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
65582023-09-22T23:15:08.853ZINFOcrucible: Now ACK the close job
65592023-09-22T23:15:08.853ZINFOcrucible: Waiting for 3 jobs (currently 2)
65602023-09-22T23:15:08.854ZINFOcrucible: No repair needed for extent 0 = downstairs
65612023-09-22T23:15:08.854ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
6562 test volume::test::test_parent_initialized_read_only_region_one_subvolume ... ok
65632023-09-22T23:15:09.722ZINFOcrucible: Waiting for 3 jobs (currently 2)
65642023-09-22T23:15:09.722ZINFOcrucible: No repair needed for extent 0 = downstairs
65652023-09-22T23:15:09.722ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
65662023-09-22T23:15:09.726ZINFOcrucible: Waiting for 4 jobs (currently 3)
65672023-09-22T23:15:09.726ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
65682023-09-22T23:15:09.726ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
65692023-09-22T23:15:09.729ZINFOcrucible: Waiting for 4 jobs (currently 3)
65702023-09-22T23:15:09.729ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
65712023-09-22T23:15:09.855ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
65722023-09-22T23:15:09.855ZERROcrucible: [2] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
65732023-09-22T23:15:09.855ZINFOcrucible: [2] client skip 3 in process jobs because fault = downstairs
65742023-09-22T23:15:09.855ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
65752023-09-22T23:15:09.855ZINFOcrucible: [2] 36f6e1bc-5305-47e3-91bd-fe997b9223d6 (98b45c36-052b-4d2c-8209-f6db7df5fa0e) LiveRepair Active Active ds_transition to Faulted
65762023-09-22T23:15:09.855ZINFOcrucible: [2] Transition from Active to Faulted
65772023-09-22T23:15:09.855ZINFOcrucible: Waiting for 4 jobs (currently 3)
65782023-09-22T23:15:09.855ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
65792023-09-22T23:15:09.855ZINFOcrucible: [0] client skip 3 in process jobs because fault = downstairs
65802023-09-22T23:15:09.855ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
65812023-09-22T23:15:09.855ZINFOcrucible: [0] 36f6e1bc-5305-47e3-91bd-fe997b9223d6 (98b45c36-052b-4d2c-8209-f6db7df5fa0e) LiveRepair Active Faulted ds_transition to Faulted
65822023-09-22T23:15:09.855ZINFOcrucible: [0] Transition from LiveRepair to Faulted
65832023-09-22T23:15:09.855ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
65842023-09-22T23:15:10.723ZINFOcrucible: Waiting for 4 jobs (currently 3)
65852023-09-22T23:15:10.724ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
65862023-09-22T23:15:10.727ZINFOcrucible: Now move the NoOp job forward
65872023-09-22T23:15:10.727ZINFOcrucible: Now ACK the NoOp job
65882023-09-22T23:15:10.727ZINFOcrucible: Finally, move the ReOpen job forward
65892023-09-22T23:15:10.727ZINFOcrucible: Now ACK the Reopen job
65902023-09-22T23:15:10.727ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
65912023-09-22T23:15:10.727ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
65922023-09-22T23:15:10.727ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
65932023-09-22T23:15:10.727ZWARNcrucible: RE:0 Bailing with error
6594 ----------------------------------------------------------------
6595 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
6596 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
6597 1 Acked 1000 FClose 0 Err Done Done false
6598 2 Acked 1001 NoOp 0 Skip Skip Done false
6599 3 Acked 1002 NoOp 0 Skip Skip Done false
6600 4 Acked 1003 Reopen 0 Skip Skip Done false
6601 STATES DS:0 DS:1 DS:2 TOTAL
6602 New 0 0 0 0
6603 Sent 0 0 0 0
6604 Done 0 1 4 5
6605 Skipped 3 3 0 6
6606 Error 1 0 0 1
6607 Last Flush: 0 0 0
6608 Downstairs last five completed:
6609 Upstairs last five completed: 4 3 2 1
66102023-09-22T23:15:10.728ZINFOcrucible: Crucible stats registered with UUID: 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1
66112023-09-22T23:15:10.728ZINFOcrucible: Crucible 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 has session id: 6e2c3b77-caac-4366-908f-9b395378044a
66122023-09-22T23:15:10.728ZINFOcrucible: [0] 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 (d294a151-6695-434f-afd6-fb1516966e23) New New New ds_transition to WaitActive
66132023-09-22T23:15:10.728ZINFOcrucible: [0] Transition from New to WaitActive
66142023-09-22T23:15:10.728ZINFOcrucible: [0] 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 (d294a151-6695-434f-afd6-fb1516966e23) WaitActive New New ds_transition to WaitQuorum
66152023-09-22T23:15:10.728ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
66162023-09-22T23:15:10.728ZINFOcrucible: [0] 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 (d294a151-6695-434f-afd6-fb1516966e23) WaitQuorum New New ds_transition to Active
66172023-09-22T23:15:10.728ZINFOcrucible: [0] Transition from WaitQuorum to Active
66182023-09-22T23:15:10.728ZINFOcrucible: [1] 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 (d294a151-6695-434f-afd6-fb1516966e23) Active New New ds_transition to WaitActive
66192023-09-22T23:15:10.728ZINFOcrucible: [1] Transition from New to WaitActive
66202023-09-22T23:15:10.728ZINFOcrucible: [1] 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 (d294a151-6695-434f-afd6-fb1516966e23) Active WaitActive New ds_transition to WaitQuorum
66212023-09-22T23:15:10.728ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
66222023-09-22T23:15:10.728ZINFOcrucible: [1] 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 (d294a151-6695-434f-afd6-fb1516966e23) Active WaitQuorum New ds_transition to Active
66232023-09-22T23:15:10.728ZINFOcrucible: [1] Transition from WaitQuorum to Active
66242023-09-22T23:15:10.728ZINFOcrucible: [2] 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 (d294a151-6695-434f-afd6-fb1516966e23) Active Active New ds_transition to WaitActive
66252023-09-22T23:15:10.728ZINFOcrucible: [2] Transition from New to WaitActive
66262023-09-22T23:15:10.728ZINFOcrucible: [2] 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 (d294a151-6695-434f-afd6-fb1516966e23) Active Active WaitActive ds_transition to WaitQuorum
66272023-09-22T23:15:10.728ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
66282023-09-22T23:15:10.728ZINFOcrucible: [2] 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 (d294a151-6695-434f-afd6-fb1516966e23) Active Active WaitQuorum ds_transition to Active
66292023-09-22T23:15:10.728ZINFOcrucible: [2] Transition from WaitQuorum to Active
66302023-09-22T23:15:10.728ZINFOcrucible: 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 is now active with session: d294a151-6695-434f-afd6-fb1516966e23
66312023-09-22T23:15:10.729ZINFOcrucible: [1] 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 (d294a151-6695-434f-afd6-fb1516966e23) Active Active Active ds_transition to Faulted
66322023-09-22T23:15:10.729ZINFOcrucible: [1] Transition from Active to Faulted
66332023-09-22T23:15:10.729ZINFOcrucible: [1] 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 (d294a151-6695-434f-afd6-fb1516966e23) Active Faulted Active ds_transition to LiveRepairReady
66342023-09-22T23:15:10.729ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
66352023-09-22T23:15:10.729ZINFOcrucible: [1] 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 (d294a151-6695-434f-afd6-fb1516966e23) Active LiveRepairReady Active ds_transition to LiveRepair
66362023-09-22T23:15:10.729ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
66372023-09-22T23:15:10.729ZINFOcrucible: Waiting for Close + ReOpen jobs
66382023-09-22T23:15:10.729ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
66392023-09-22T23:15:10.729ZINFOcrucible: RE:0 close id:1000 queued, notify DS
66402023-09-22T23:15:10.729ZINFOcrucible: RE:0 Wait for result from close command 1000:1
66412023-09-22T23:15:10.731ZINFOcrucible: Now move the NoOp job forward
66422023-09-22T23:15:10.731ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
66432023-09-22T23:15:10.731ZERROcrucible: [0] Reports error GenericError("bad") on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
66442023-09-22T23:15:10.731ZINFOcrucible: [0] client skip 4 in process jobs because fault = downstairs
66452023-09-22T23:15:10.731ZINFOcrucible: [0] changed 0 jobs to fault skipped = downstairs
66462023-09-22T23:15:10.731ZINFOcrucible: [0] 73140564-9a43-4886-9974-6360c31b2d26 (521e1392-21bb-441e-9084-601140ec5551) Active LiveRepair Active ds_transition to Faulted
66472023-09-22T23:15:10.731ZINFOcrucible: [0] Transition from Active to Faulted
66482023-09-22T23:15:10.731ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
66492023-09-22T23:15:10.731ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
66502023-09-22T23:15:10.731ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
66512023-09-22T23:15:10.731ZINFOcrucible: [1] changed 0 jobs to fault skipped = downstairs
66522023-09-22T23:15:10.731ZINFOcrucible: [1] 73140564-9a43-4886-9974-6360c31b2d26 (521e1392-21bb-441e-9084-601140ec5551) Faulted LiveRepair Active ds_transition to Faulted
66532023-09-22T23:15:10.731ZINFOcrucible: [1] Transition from LiveRepair to Faulted
66542023-09-22T23:15:10.731ZWARNcrucible: RE:0 Bailing with error
66552023-09-22T23:15:10.732ZINFOcrucible: Crucible stats registered with UUID: e2730307-dcf6-4644-b256-08168ac114e1
66562023-09-22T23:15:10.732ZINFOcrucible: Crucible e2730307-dcf6-4644-b256-08168ac114e1 has session id: 5d134bb6-5135-4266-a695-1be78e146b86
66572023-09-22T23:15:10.732ZINFOcrucible: [0] e2730307-dcf6-4644-b256-08168ac114e1 (8d751494-4fad-49f3-9d80-50dcfbbc3677) New New New ds_transition to WaitActive
66582023-09-22T23:15:10.732ZINFOcrucible: [0] Transition from New to WaitActive
66592023-09-22T23:15:10.732ZINFOcrucible: [0] e2730307-dcf6-4644-b256-08168ac114e1 (8d751494-4fad-49f3-9d80-50dcfbbc3677) WaitActive New New ds_transition to WaitQuorum
66602023-09-22T23:15:10.732ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
66612023-09-22T23:15:10.732ZINFOcrucible: [0] e2730307-dcf6-4644-b256-08168ac114e1 (8d751494-4fad-49f3-9d80-50dcfbbc3677) WaitQuorum New New ds_transition to Active
66622023-09-22T23:15:10.732ZINFOcrucible: [0] Transition from WaitQuorum to Active
66632023-09-22T23:15:10.732ZINFOcrucible: [1] e2730307-dcf6-4644-b256-08168ac114e1 (8d751494-4fad-49f3-9d80-50dcfbbc3677) Active New New ds_transition to WaitActive
66642023-09-22T23:15:10.732ZINFOcrucible: [1] Transition from New to WaitActive
66652023-09-22T23:15:10.732ZINFOcrucible: [1] e2730307-dcf6-4644-b256-08168ac114e1 (8d751494-4fad-49f3-9d80-50dcfbbc3677) Active WaitActive New ds_transition to WaitQuorum
66662023-09-22T23:15:10.732ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
66672023-09-22T23:15:10.732ZINFOcrucible: [1] e2730307-dcf6-4644-b256-08168ac114e1 (8d751494-4fad-49f3-9d80-50dcfbbc3677) Active WaitQuorum New ds_transition to Active
66682023-09-22T23:15:10.732ZINFOcrucible: [1] Transition from WaitQuorum to Active
66692023-09-22T23:15:10.732ZINFOcrucible: [2] e2730307-dcf6-4644-b256-08168ac114e1 (8d751494-4fad-49f3-9d80-50dcfbbc3677) Active Active New ds_transition to WaitActive
66702023-09-22T23:15:10.732ZINFOcrucible: [2] Transition from New to WaitActive
66712023-09-22T23:15:10.732ZINFOcrucible: [2] e2730307-dcf6-4644-b256-08168ac114e1 (8d751494-4fad-49f3-9d80-50dcfbbc3677) Active Active WaitActive ds_transition to WaitQuorum
66722023-09-22T23:15:10.732ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
66732023-09-22T23:15:10.732ZINFOcrucible: [2] e2730307-dcf6-4644-b256-08168ac114e1 (8d751494-4fad-49f3-9d80-50dcfbbc3677) Active Active WaitQuorum ds_transition to Active
66742023-09-22T23:15:10.732ZINFOcrucible: [2] Transition from WaitQuorum to Active
66752023-09-22T23:15:10.732ZINFOcrucible: e2730307-dcf6-4644-b256-08168ac114e1 is now active with session: 8d751494-4fad-49f3-9d80-50dcfbbc3677
66762023-09-22T23:15:10.732ZINFOcrucible: [1] e2730307-dcf6-4644-b256-08168ac114e1 (8d751494-4fad-49f3-9d80-50dcfbbc3677) Active Active Active ds_transition to Faulted
66772023-09-22T23:15:10.732ZINFOcrucible: [1] Transition from Active to Faulted
66782023-09-22T23:15:10.732ZINFOcrucible: [1] e2730307-dcf6-4644-b256-08168ac114e1 (8d751494-4fad-49f3-9d80-50dcfbbc3677) Active Faulted Active ds_transition to LiveRepairReady
66792023-09-22T23:15:10.732ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
66802023-09-22T23:15:10.732ZINFOcrucible: [1] e2730307-dcf6-4644-b256-08168ac114e1 (8d751494-4fad-49f3-9d80-50dcfbbc3677) Active LiveRepairReady Active ds_transition to LiveRepair
66812023-09-22T23:15:10.732ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
66822023-09-22T23:15:10.733ZINFOcrucible: Waiting for Close + ReOpen jobs
66832023-09-22T23:15:10.733ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
66842023-09-22T23:15:10.733ZINFOcrucible: RE:0 close id:1000 queued, notify DS
66852023-09-22T23:15:10.733ZINFOcrucible: RE:0 Wait for result from close command 1000:1
66862023-09-22T23:15:10.856ZINFOcrucible: Now move the NoOp job forward
66872023-09-22T23:15:10.856ZINFOcrucible: Now ACK the NoOp job
66882023-09-22T23:15:10.856ZINFOcrucible: Finally, move the ReOpen job forward
66892023-09-22T23:15:10.857ZINFOcrucible: Now ACK the Reopen job
66902023-09-22T23:15:10.857ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
66912023-09-22T23:15:10.857ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
66922023-09-22T23:15:10.857ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
66932023-09-22T23:15:10.857ZWARNcrucible: RE:0 Bailing with error
66942023-09-22T23:15:10.857ZINFOcrucible: Crucible stats registered with UUID: b256d998-7399-488e-9e7c-fec98f8e0755
66952023-09-22T23:15:10.857ZINFOcrucible: Crucible b256d998-7399-488e-9e7c-fec98f8e0755 has session id: eca80382-6af4-4870-9732-7b0243eee46e
66962023-09-22T23:15:10.857ZINFOcrucible: [0] b256d998-7399-488e-9e7c-fec98f8e0755 (87e8b462-0c21-434d-84d7-49817a811b80) New New New ds_transition to WaitActive
66972023-09-22T23:15:10.857ZINFOcrucible: [0] Transition from New to WaitActive
66982023-09-22T23:15:10.857ZINFOcrucible: [0] b256d998-7399-488e-9e7c-fec98f8e0755 (87e8b462-0c21-434d-84d7-49817a811b80) WaitActive New New ds_transition to WaitQuorum
66992023-09-22T23:15:10.857ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
67002023-09-22T23:15:10.857ZINFOcrucible: [0] b256d998-7399-488e-9e7c-fec98f8e0755 (87e8b462-0c21-434d-84d7-49817a811b80) WaitQuorum New New ds_transition to Active
67012023-09-22T23:15:10.857ZINFOcrucible: [0] Transition from WaitQuorum to Active
67022023-09-22T23:15:10.857ZINFOcrucible: [1] b256d998-7399-488e-9e7c-fec98f8e0755 (87e8b462-0c21-434d-84d7-49817a811b80) Active New New ds_transition to WaitActive
67032023-09-22T23:15:10.857ZINFOcrucible: [1] Transition from New to WaitActive
67042023-09-22T23:15:10.857ZINFOcrucible: [1] b256d998-7399-488e-9e7c-fec98f8e0755 (87e8b462-0c21-434d-84d7-49817a811b80) Active WaitActive New ds_transition to WaitQuorum
67052023-09-22T23:15:10.857ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
67062023-09-22T23:15:10.857ZINFOcrucible: [1] b256d998-7399-488e-9e7c-fec98f8e0755 (87e8b462-0c21-434d-84d7-49817a811b80) Active WaitQuorum New ds_transition to Active
67072023-09-22T23:15:10.858ZINFOcrucible: [1] Transition from WaitQuorum to Active
67082023-09-22T23:15:10.858ZINFOcrucible: [2] b256d998-7399-488e-9e7c-fec98f8e0755 (87e8b462-0c21-434d-84d7-49817a811b80) Active Active New ds_transition to WaitActive
67092023-09-22T23:15:10.858ZINFOcrucible: [2] Transition from New to WaitActive
67102023-09-22T23:15:10.858ZINFOcrucible: [2] b256d998-7399-488e-9e7c-fec98f8e0755 (87e8b462-0c21-434d-84d7-49817a811b80) Active Active WaitActive ds_transition to WaitQuorum
67112023-09-22T23:15:10.858ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
67122023-09-22T23:15:10.858ZINFOcrucible: [2] b256d998-7399-488e-9e7c-fec98f8e0755 (87e8b462-0c21-434d-84d7-49817a811b80) Active Active WaitQuorum ds_transition to Active
67132023-09-22T23:15:10.858ZINFOcrucible: [2] Transition from WaitQuorum to Active
67142023-09-22T23:15:10.858ZINFOcrucible: b256d998-7399-488e-9e7c-fec98f8e0755 is now active with session: 87e8b462-0c21-434d-84d7-49817a811b80
67152023-09-22T23:15:10.858ZINFOcrucible: [1] b256d998-7399-488e-9e7c-fec98f8e0755 (87e8b462-0c21-434d-84d7-49817a811b80) Active Active Active ds_transition to Faulted
67162023-09-22T23:15:10.858ZINFOcrucible: [1] Transition from Active to Faulted
67172023-09-22T23:15:10.858ZINFOcrucible: [1] b256d998-7399-488e-9e7c-fec98f8e0755 (87e8b462-0c21-434d-84d7-49817a811b80) Active Faulted Active ds_transition to LiveRepairReady
67182023-09-22T23:15:10.858ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
67192023-09-22T23:15:10.858ZINFOcrucible: [1] b256d998-7399-488e-9e7c-fec98f8e0755 (87e8b462-0c21-434d-84d7-49817a811b80) Active LiveRepairReady Active ds_transition to LiveRepair
67202023-09-22T23:15:10.858ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
67212023-09-22T23:15:10.858ZINFOcrucible: Waiting for Close + ReOpen jobs
67222023-09-22T23:15:10.858ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
67232023-09-22T23:15:10.858ZINFOcrucible: RE:0 close id:1000 queued, notify DS
67242023-09-22T23:15:10.858ZINFOcrucible: RE:0 Wait for result from close command 1000:1
6725 test volume::test::test_parent_initialized_read_only_region_with_multiple_sub_volumes_1 ... ok
6726 ----------------------------------------------------------------
6727 Crucible gen:0 GIO:true work queues: Upstairs:2 downstairs:4
6728 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
6729 1 Acked 1000 FClose 0 Done Done Done false
6730 2 Acked 1001 NoOp 0 Done Done Done false
6731 3 NotAcked 1002 NoOp 0 New New New false
6732 4 NotAcked 1003 Reopen 0 New New New false
6733 STATES DS:0 DS:1 DS:2 TOTAL
6734 New 2 2 2 6
6735 Sent 0 0 0 0
6736 Done 2 2 2 6
6737 Skipped 0 0 0 0
6738 Error 0 0 0 0
6739 Last Flush: 0 0 0
6740 Downstairs last five completed:
6741 Upstairs last five completed: 2 1
67422023-09-22T23:15:11.595ZINFOcrucible: Now move the NoOp job forward
67432023-09-22T23:15:11.595ZINFOcrucible: Finally, move the ReOpen job forward
67442023-09-22T23:15:11.595ZINFOcrucible: Now ACK the reopen job
67452023-09-22T23:15:11.595ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
67462023-09-22T23:15:11.595ZINFOcrucible: jobs are: 4
67472023-09-22T23:15:11.596ZINFOcrucible: Crucible stats registered with UUID: 68feb627-ebe6-44ef-be69-9d655f75db90
67482023-09-22T23:15:11.596ZINFOcrucible: Crucible 68feb627-ebe6-44ef-be69-9d655f75db90 has session id: acbf6d56-1701-497b-bbc7-f335c3a54a62
67492023-09-22T23:15:11.596ZINFOcrucible: [0] 68feb627-ebe6-44ef-be69-9d655f75db90 (64856612-cf41-4003-9ffd-c17c761cd49c) New New New ds_transition to WaitActive
67502023-09-22T23:15:11.596ZINFOcrucible: [0] Transition from New to WaitActive
67512023-09-22T23:15:11.596ZINFOcrucible: [0] 68feb627-ebe6-44ef-be69-9d655f75db90 (64856612-cf41-4003-9ffd-c17c761cd49c) WaitActive New New ds_transition to WaitQuorum
67522023-09-22T23:15:11.596ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
67532023-09-22T23:15:11.596ZINFOcrucible: [0] 68feb627-ebe6-44ef-be69-9d655f75db90 (64856612-cf41-4003-9ffd-c17c761cd49c) WaitQuorum New New ds_transition to Active
67542023-09-22T23:15:11.596ZINFOcrucible: [0] Transition from WaitQuorum to Active
67552023-09-22T23:15:11.596ZINFOcrucible: [1] 68feb627-ebe6-44ef-be69-9d655f75db90 (64856612-cf41-4003-9ffd-c17c761cd49c) Active New New ds_transition to WaitActive
67562023-09-22T23:15:11.596ZINFOcrucible: [1] Transition from New to WaitActive
67572023-09-22T23:15:11.596ZINFOcrucible: [1] 68feb627-ebe6-44ef-be69-9d655f75db90 (64856612-cf41-4003-9ffd-c17c761cd49c) Active WaitActive New ds_transition to WaitQuorum
67582023-09-22T23:15:11.596ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
67592023-09-22T23:15:11.596ZINFOcrucible: [1] 68feb627-ebe6-44ef-be69-9d655f75db90 (64856612-cf41-4003-9ffd-c17c761cd49c) Active WaitQuorum New ds_transition to Active
67602023-09-22T23:15:11.596ZINFOcrucible: [1] Transition from WaitQuorum to Active
67612023-09-22T23:15:11.597ZINFOcrucible: [2] 68feb627-ebe6-44ef-be69-9d655f75db90 (64856612-cf41-4003-9ffd-c17c761cd49c) Active Active New ds_transition to WaitActive
67622023-09-22T23:15:11.597ZINFOcrucible: [2] Transition from New to WaitActive
67632023-09-22T23:15:11.597ZINFOcrucible: [2] 68feb627-ebe6-44ef-be69-9d655f75db90 (64856612-cf41-4003-9ffd-c17c761cd49c) Active Active WaitActive ds_transition to WaitQuorum
67642023-09-22T23:15:11.597ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
67652023-09-22T23:15:11.597ZINFOcrucible: [2] 68feb627-ebe6-44ef-be69-9d655f75db90 (64856612-cf41-4003-9ffd-c17c761cd49c) Active Active WaitQuorum ds_transition to Active
67662023-09-22T23:15:11.597ZINFOcrucible: [2] Transition from WaitQuorum to Active
67672023-09-22T23:15:11.597ZINFOcrucible: 68feb627-ebe6-44ef-be69-9d655f75db90 is now active with session: 64856612-cf41-4003-9ffd-c17c761cd49c
67682023-09-22T23:15:11.597ZINFOcrucible: [2] 68feb627-ebe6-44ef-be69-9d655f75db90 (64856612-cf41-4003-9ffd-c17c761cd49c) Active Active Active ds_transition to Faulted
67692023-09-22T23:15:11.597ZINFOcrucible: [2] Transition from Active to Faulted
67702023-09-22T23:15:11.597ZINFOcrucible: [2] 68feb627-ebe6-44ef-be69-9d655f75db90 (64856612-cf41-4003-9ffd-c17c761cd49c) Active Active Faulted ds_transition to LiveRepairReady
67712023-09-22T23:15:11.597ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
67722023-09-22T23:15:11.597ZINFOcrucible: [2] 68feb627-ebe6-44ef-be69-9d655f75db90 (64856612-cf41-4003-9ffd-c17c761cd49c) Active Active LiveRepairReady ds_transition to LiveRepair
67732023-09-22T23:15:11.597ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
67742023-09-22T23:15:11.597ZINFOcrucible: Waiting for Close + ReOpen jobs
67752023-09-22T23:15:11.597ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
67762023-09-22T23:15:11.597ZINFOcrucible: RE:0 close id:1000 queued, notify DS
67772023-09-22T23:15:11.597ZINFOcrucible: RE:0 Wait for result from close command 1000:1
67782023-09-22T23:15:11.725ZINFOcrucible: Now move the NoOp job forward
67792023-09-22T23:15:11.725ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
67802023-09-22T23:15:11.725ZERROcrucible: [2] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
67812023-09-22T23:15:11.725ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
67822023-09-22T23:15:11.725ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
67832023-09-22T23:15:11.725ZINFOcrucible: [2] 8918c699-f710-46f0-9eef-70ed6bb4c99f (a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b) LiveRepair Active Active ds_transition to Faulted
67842023-09-22T23:15:11.725ZINFOcrucible: [2] Transition from Active to Faulted
67852023-09-22T23:15:11.725ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
67862023-09-22T23:15:11.725ZINFOcrucible: [0] client skip 4 in process jobs because fault = downstairs
67872023-09-22T23:15:11.725ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
67882023-09-22T23:15:11.725ZINFOcrucible: [0] 8918c699-f710-46f0-9eef-70ed6bb4c99f (a0a1b590-9fa5-47d1-aa6b-921e6ac1a59b) LiveRepair Active Faulted ds_transition to Faulted
67892023-09-22T23:15:11.725ZINFOcrucible: [0] Transition from LiveRepair to Faulted
67902023-09-22T23:15:11.725ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
67912023-09-22T23:15:11.729ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(2), repair_downstairs: [ClientId(1)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
67922023-09-22T23:15:11.730ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(2), repair_downstairs: [ClientId(1)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
67932023-09-22T23:15:11.730ZINFOcrucible: [1] client skip 2 in process jobs because fault = downstairs
67942023-09-22T23:15:11.730ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
67952023-09-22T23:15:11.730ZINFOcrucible: [1] 8bffad4b-ac22-4ea5-a8bb-6ce70b6d15e1 (d294a151-6695-434f-afd6-fb1516966e23) Active LiveRepair Active ds_transition to Faulted
67962023-09-22T23:15:11.730ZINFOcrucible: [1] Transition from LiveRepair to Faulted
67972023-09-22T23:15:11.730ZINFOcrucible: Now ACK the close job
67982023-09-22T23:15:11.730ZINFOcrucible: Waiting for 3 jobs (currently 2)
67992023-09-22T23:15:11.730ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
68002023-09-22T23:15:11.730ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
68012023-09-22T23:15:11.732ZINFOcrucible: Waiting for 3 jobs (currently 2)
68022023-09-22T23:15:11.732ZINFOcrucible: No repair needed for extent 0 = downstairs
68032023-09-22T23:15:11.732ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
68042023-09-22T23:15:11.859ZINFOcrucible: Now ACK the close job
68052023-09-22T23:15:11.859ZINFOcrucible: Waiting for 3 jobs (currently 2)
68062023-09-22T23:15:11.859ZINFOcrucible: No repair needed for extent 0 = downstairs
68072023-09-22T23:15:11.859ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
6808 test volume::test::test_parent_initialized_read_only_region_with_multiple_sub_volumes_2 ... ok
6809 test volume::test::test_parent_uninitialized_read_only_region_one_subvolume ... ok
6810 test volume::test::test_parent_uninitialized_read_only_region_with_multiple_sub_volumes_1 ... ok
6811 test volume::test::test_parent_uninitialized_read_only_region_with_multiple_sub_volumes_2 ... ok
6812 test volume::test::test_read_only_parent_for_lba_range ... ok
6813 block 0 < scrub_point 0. Check with your parent
6814 block 1 < scrub_point 0. Check with your parent
6815 block 2 < scrub_point 0. Check with your parent
6816 block 3 < scrub_point 0. Check with your parent
6817 block 4 < scrub_point 0. Check with your parent
6818 block 5 < scrub_point 0. Check with your parent
6819 block 6 < scrub_point 0. Check with your parent
6820 block 7 < scrub_point 0. Check with your parent
6821 block 8 < scrub_point 0. Check with your parent
6822 block 9 < scrub_point 0. Check with your parent
6823 block 0+1 <= scrub_point 1. No parent check
6824 block 1 < scrub_point 1. Check with your parent
6825 block 2 < scrub_point 1. Check with your parent
6826 block 3 < scrub_point 1. Check with your parent
6827 block 4 < scrub_point 1. Check with your parent
6828 block 5 < scrub_point 1. Check with your parent
6829 block 6 < scrub_point 1. Check with your parent
6830 block 7 < scrub_point 1. Check with your parent
6831 block 8 < scrub_point 1. Check with your parent
6832 block 9 < scrub_point 1. Check with your parent
6833 block 0+1 <= scrub_point 2. No parent check
6834 block 1+1 <= scrub_point 2. No parent check
6835 block 2 < scrub_point 2. Check with your parent
6836 block 3 < scrub_point 2. Check with your parent
6837 block 4 < scrub_point 2. Check with your parent
6838 block 5 < scrub_point 2. Check with your parent
6839 block 6 < scrub_point 2. Check with your parent
6840 block 7 < scrub_point 2. Check with your parent
6841 block 8 < scrub_point 2. Check with your parent
6842 block 9 < scrub_point 2. Check with your parent
6843 block 0+1 <= scrub_point 3. No parent check
6844 block 1+1 <= scrub_point 3. No parent check
6845 block 2+1 <= scrub_point 3. No parent check
6846 block 3 < scrub_point 3. Check with your parent
6847 block 4 < scrub_point 3. Check with your parent
6848 block 5 < scrub_point 3. Check with your parent
6849 block 6 < scrub_point 3. Check with your parent
6850 block 7 < scrub_point 3. Check with your parent
6851 block 8 < scrub_point 3. Check with your parent
6852 block 9 < scrub_point 3. Check with your parent
6853 block 0+1 <= scrub_point 4. No parent check
6854 block 1+1 <= scrub_point 4. No parent check
6855 block 2+1 <= scrub_point 4. No parent check
6856 block 3+1 <= scrub_point 4. No parent check
6857 block 4 < scrub_point 4. Check with your parent
6858 block 5 < scrub_point 4. Check with your parent
6859 block 6 < scrub_point 4. Check with your parent
6860 block 7 < scrub_point 4. Check with your parent
6861 block 8 < scrub_point 4. Check with your parent
6862 block 9 < scrub_point 4. Check with your parent
6863 block 0+1 <= scrub_point 5. No parent check
6864 block 1+1 <= scrub_point 5. No parent check
6865 block 2+1 <= scrub_point 5. No parent check
6866 block 3+1 <= scrub_point 5. No parent check
6867 block 4+1 <= scrub_point 5. No parent check
6868 block 5 < scrub_point 5. Check with your parent
6869 block 6 < scrub_point 5. Check with your parent
6870 block 7 < scrub_point 5. Check with your parent
6871 block 8 < scrub_point 5. Check with your parent
6872 block 9 < scrub_point 5. Check with your parent
6873 block 0+1 <= scrub_point 6. No parent check
6874 block 1+1 <= scrub_point 6. No parent check
6875 block 2+1 <= scrub_point 6. No parent check
6876 block 3+1 <= scrub_point 6. No parent check
6877 block 4+1 <= scrub_point 6. No parent check
6878 block 5+1 <= scrub_point 6. No parent check
6879 block 6 < scrub_point 6. Check with your parent
6880 block 7 < scrub_point 6. Check with your parent
6881 block 8 < scrub_point 6. Check with your parent
6882 block 9 < scrub_point 6. Check with your parent
6883 block 0+1 <= scrub_point 7. No parent check
6884 block 1+1 <= scrub_point 7. No parent check
6885 block 2+1 <= scrub_point 7. No parent check
6886 block 3+1 <= scrub_point 7. No parent check
6887 block 4+1 <= scrub_point 7. No parent check
6888 block 5+1 <= scrub_point 7. No parent check
6889 block 6+1 <= scrub_point 7. No parent check
6890 block 7 < scrub_point 7. Check with your parent
6891 block 8 < scrub_point 7. Check with your parent
6892 block 9 < scrub_point 7. Check with your parent
6893 block 0+1 <= scrub_point 8. No parent check
6894 block 1+1 <= scrub_point 8. No parent check
6895 block 2+1 <= scrub_point 8. No parent check
6896 block 3+1 <= scrub_point 8. No parent check
6897 block 4+1 <= scrub_point 8. No parent check
6898 block 5+1 <= scrub_point 8. No parent check
6899 block 6+1 <= scrub_point 8. No parent check
6900 block 7+1 <= scrub_point 8. No parent check
6901 block 8 < scrub_point 8. Check with your parent
6902 block 9 < scrub_point 8. Check with your parent
6903 block 0+1 <= scrub_point 9. No parent check
6904 block 1+1 <= scrub_point 9. No parent check
6905 block 2+1 <= scrub_point 9. No parent check
6906 block 3+1 <= scrub_point 9. No parent check
6907 block 4+1 <= scrub_point 9. No parent check
6908 block 5+1 <= scrub_point 9. No parent check
6909 block 6+1 <= scrub_point 9. No parent check
6910 block 7+1 <= scrub_point 9. No parent check
6911 block 8+1 <= scrub_point 9. No parent check
6912 block 9 < scrub_point 9. Check with your parent
6913 block 0+1 <= scrub_point 10. No parent check
6914 block 1+1 <= scrub_point 10. No parent check
6915 block 2+1 <= scrub_point 10. No parent check
6916 block 3+1 <= scrub_point 10. No parent check
6917 block 4+1 <= scrub_point 10. No parent check
6918 block 5+1 <= scrub_point 10. No parent check
6919 block 6+1 <= scrub_point 10. No parent check
6920 block 7+1 <= scrub_point 10. No parent check
6921 block 8+1 <= scrub_point 10. No parent check
6922 block 9+1 <= scrub_point 10. No parent check
6923 block 0 < scrub_point 0. Check with your parent
6924 block 1 < scrub_point 0. Check with your parent
6925 block 2 < scrub_point 0. Check with your parent
6926 block 3 < scrub_point 0. Check with your parent
6927 block 4 < scrub_point 0. Check with your parent
6928 block 5 < scrub_point 0. Check with your parent
6929 block 6 < scrub_point 0. Check with your parent
6930 block 7 < scrub_point 0. Check with your parent
6931 block 8 < scrub_point 0. Check with your parent
6932 block 0 < scrub_point 1. Check with your parent
6933 block 1 < scrub_point 1. Check with your parent
6934 block 2 < scrub_point 1. Check with your parent
6935 block 3 < scrub_point 1. Check with your parent
6936 block 4 < scrub_point 1. Check with your parent
6937 block 5 < scrub_point 1. Check with your parent
6938 block 6 < scrub_point 1. Check with your parent
6939 block 7 < scrub_point 1. Check with your parent
6940 block 8 < scrub_point 1. Check with your parent
6941 block 0+2 <= scrub_point 2. No parent check
6942 block 1 < scrub_point 2. Check with your parent
6943 block 2 < scrub_point 2. Check with your parent
6944 block 3 < scrub_point 2. Check with your parent
6945 block 4 < scrub_point 2. Check with your parent
6946 block 5 < scrub_point 2. Check with your parent
6947 block 6 < scrub_point 2. Check with your parent
6948 block 7 < scrub_point 2. Check with your parent
6949 block 8 < scrub_point 2. Check with your parent
6950 block 0+2 <= scrub_point 3. No parent check
6951 block 1+2 <= scrub_point 3. No parent check
6952 block 2 < scrub_point 3. Check with your parent
6953 block 3 < scrub_point 3. Check with your parent
6954 block 4 < scrub_point 3. Check with your parent
6955 block 5 < scrub_point 3. Check with your parent
6956 block 6 < scrub_point 3. Check with your parent
6957 block 7 < scrub_point 3. Check with your parent
6958 block 8 < scrub_point 3. Check with your parent
6959 block 0+2 <= scrub_point 4. No parent check
6960 block 1+2 <= scrub_point 4. No parent check
6961 block 2+2 <= scrub_point 4. No parent check
6962 block 3 < scrub_point 4. Check with your parent
6963 block 4 < scrub_point 4. Check with your parent
6964 block 5 < scrub_point 4. Check with your parent
6965 block 6 < scrub_point 4. Check with your parent
6966 block 7 < scrub_point 4. Check with your parent
6967 block 8 < scrub_point 4. Check with your parent
6968 block 0+2 <= scrub_point 5. No parent check
6969 block 1+2 <= scrub_point 5. No parent check
6970 block 2+2 <= scrub_point 5. No parent check
6971 block 3+2 <= scrub_point 5. No parent check
6972 block 4 < scrub_point 5. Check with your parent
6973 block 5 < scrub_point 5. Check with your parent
6974 block 6 < scrub_point 5. Check with your parent
6975 block 7 < scrub_point 5. Check with your parent
6976 block 8 < scrub_point 5. Check with your parent
6977 block 0+2 <= scrub_point 6. No parent check
6978 block 1+2 <= scrub_point 6. No parent check
6979 block 2+2 <= scrub_point 6. No parent check
6980 block 3+2 <= scrub_point 6. No parent check
6981 block 4+2 <= scrub_point 6. No parent check
6982 block 5 < scrub_point 6. Check with your parent
6983 block 6 < scrub_point 6. Check with your parent
6984 block 7 < scrub_point 6. Check with your parent
6985 block 8 < scrub_point 6. Check with your parent
6986 block 0+2 <= scrub_point 7. No parent check
6987 block 1+2 <= scrub_point 7. No parent check
6988 block 2+2 <= scrub_point 7. No parent check
6989 block 3+2 <= scrub_point 7. No parent check
6990 block 4+2 <= scrub_point 7. No parent check
6991 block 5+2 <= scrub_point 7. No parent check
6992 block 6 < scrub_point 7. Check with your parent
6993 block 7 < scrub_point 7. Check with your parent
6994 block 8 < scrub_point 7. Check with your parent
6995 block 0+2 <= scrub_point 8. No parent check
6996 block 1+2 <= scrub_point 8. No parent check
6997 block 2+2 <= scrub_point 8. No parent check
6998 block 3+2 <= scrub_point 8. No parent check
6999 block 4+2 <= scrub_point 8. No parent check
7000 block 5+2 <= scrub_point 8. No parent check
7001 block 6+2 <= scrub_point 8. No parent check
7002 block 7 < scrub_point 8. Check with your parent
7003 block 8 < scrub_point 8. Check with your parent
7004 block 0+2 <= scrub_point 9. No parent check
7005 block 1+2 <= scrub_point 9. No parent check
7006 block 2+2 <= scrub_point 9. No parent check
7007 block 3+2 <= scrub_point 9. No parent check
7008 block 4+2 <= scrub_point 9. No parent check
7009 block 5+2 <= scrub_point 9. No parent check
7010 block 6+2 <= scrub_point 9. No parent check
7011 block 7+2 <= scrub_point 9. No parent check
7012 block 8 < scrub_point 9. Check with your parent
7013 block 0+2 <= scrub_point 10. No parent check
7014 block 1+2 <= scrub_point 10. No parent check
7015 block 2+2 <= scrub_point 10. No parent check
7016 block 3+2 <= scrub_point 10. No parent check
7017 block 4+2 <= scrub_point 10. No parent check
7018 block 5+2 <= scrub_point 10. No parent check
7019 block 6+2 <= scrub_point 10. No parent check
7020 block 7+2 <= scrub_point 10. No parent check
7021 block 8+2 <= scrub_point 10. No parent check
7022 block 0 < scrub_point 0. Check with your parent
7023 block 1 < scrub_point 0. Check with your parent
7024 block 2 < scrub_point 0. Check with your parent
7025 block 3 < scrub_point 0. Check with your parent
7026 block 4 < scrub_point 0. Check with your parent
7027 block 5 < scrub_point 0. Check with your parent
7028 block 6 < scrub_point 0. Check with your parent
7029 block 7 < scrub_point 0. Check with your parent
7030 block 0 < scrub_point 1. Check with your parent
7031 block 1 < scrub_point 1. Check with your parent
7032 block 2 < scrub_point 1. Check with your parent
7033 block 3 < scrub_point 1. Check with your parent
7034 block 4 < scrub_point 1. Check with your parent
7035 block 5 < scrub_point 1. Check with your parent
7036 block 6 < scrub_point 1. Check with your parent
7037 block 7 < scrub_point 1. Check with your parent
7038 block 0 < scrub_point 2. Check with your parent
7039 block 1 < scrub_point 2. Check with your parent
7040 block 2 < scrub_point 2. Check with your parent
7041 block 3 < scrub_point 2. Check with your parent
7042 block 4 < scrub_point 2. Check with your parent
7043 block 5 < scrub_point 2. Check with your parent
7044 block 6 < scrub_point 2. Check with your parent
7045 block 7 < scrub_point 2. Check with your parent
7046 block 0+3 <= scrub_point 3. No parent check
7047 block 1 < scrub_point 3. Check with your parent
7048 block 2 < scrub_point 3. Check with your parent
7049 block 3 < scrub_point 3. Check with your parent
7050 block 4 < scrub_point 3. Check with your parent
7051 block 5 < scrub_point 3. Check with your parent
7052 block 6 < scrub_point 3. Check with your parent
7053 block 7 < scrub_point 3. Check with your parent
7054 block 0+3 <= scrub_point 4. No parent check
7055 block 1+3 <= scrub_point 4. No parent check
7056 block 2 < scrub_point 4. Check with your parent
7057 block 3 < scrub_point 4. Check with your parent
7058 block 4 < scrub_point 4. Check with your parent
7059 block 5 < scrub_point 4. Check with your parent
7060 block 6 < scrub_point 4. Check with your parent
7061 block 7 < scrub_point 4. Check with your parent
7062 block 0+3 <= scrub_point 5. No parent check
7063 block 1+3 <= scrub_point 5. No parent check
7064 block 2+3 <= scrub_point 5. No parent check
7065 block 3 < scrub_point 5. Check with your parent
7066 block 4 < scrub_point 5. Check with your parent
7067 block 5 < scrub_point 5. Check with your parent
7068 block 6 < scrub_point 5. Check with your parent
7069 block 7 < scrub_point 5. Check with your parent
7070 block 0+3 <= scrub_point 6. No parent check
7071 block 1+3 <= scrub_point 6. No parent check
7072 block 2+3 <= scrub_point 6. No parent check
7073 block 3+3 <= scrub_point 6. No parent check
7074 block 4 < scrub_point 6. Check with your parent
7075 block 5 < scrub_point 6. Check with your parent
7076 block 6 < scrub_point 6. Check with your parent
7077 block 7 < scrub_point 6. Check with your parent
7078 block 0+3 <= scrub_point 7. No parent check
7079 block 1+3 <= scrub_point 7. No parent check
7080 block 2+3 <= scrub_point 7. No parent check
7081 block 3+3 <= scrub_point 7. No parent check
7082 block 4+3 <= scrub_point 7. No parent check
7083 block 5 < scrub_point 7. Check with your parent
7084 block 6 < scrub_point 7. Check with your parent
7085 block 7 < scrub_point 7. Check with your parent
7086 block 0+3 <= scrub_point 8. No parent check
7087 block 1+3 <= scrub_point 8. No parent check
7088 block 2+3 <= scrub_point 8. No parent check
7089 block 3+3 <= scrub_point 8. No parent check
7090 block 4+3 <= scrub_point 8. No parent check
7091 block 5+3 <= scrub_point 8. No parent check
7092 block 6 < scrub_point 8. Check with your parent
7093 block 7 < scrub_point 8. Check with your parent
7094 block 0+3 <= scrub_point 9. No parent check
7095 block 1+3 <= scrub_point 9. No parent check
7096 block 2+3 <= scrub_point 9. No parent check
7097 block 3+3 <= scrub_point 9. No parent check
7098 block 4+3 <= scrub_point 9. No parent check
7099 block 5+3 <= scrub_point 9. No parent check
7100 block 6+3 <= scrub_point 9. No parent check
7101 block 7 < scrub_point 9. Check with your parent
7102 block 0+3 <= scrub_point 10. No parent check
7103 block 1+3 <= scrub_point 10. No parent check
7104 block 2+3 <= scrub_point 10. No parent check
7105 block 3+3 <= scrub_point 10. No parent check
7106 block 4+3 <= scrub_point 10. No parent check
7107 block 5+3 <= scrub_point 10. No parent check
7108 block 6+3 <= scrub_point 10. No parent check
7109 block 7+3 <= scrub_point 10. No parent check
7110 block 0 < scrub_point 0. Check with your parent
7111 block 1 < scrub_point 0. Check with your parent
7112 block 2 < scrub_point 0. Check with your parent
7113 block 3 < scrub_point 0. Check with your parent
7114 block 4 < scrub_point 0. Check with your parent
7115 block 5 < scrub_point 0. Check with your parent
7116 block 6 < scrub_point 0. Check with your parent
7117 block 0 < scrub_point 1. Check with your parent
7118 block 1 < scrub_point 1. Check with your parent
7119 block 2 < scrub_point 1. Check with your parent
7120 block 3 < scrub_point 1. Check with your parent
7121 block 4 < scrub_point 1. Check with your parent
7122 block 5 < scrub_point 1. Check with your parent
7123 block 6 < scrub_point 1. Check with your parent
7124 block 0 < scrub_point 2. Check with your parent
7125 block 1 < scrub_point 2. Check with your parent
7126 block 2 < scrub_point 2. Check with your parent
7127 block 3 < scrub_point 2. Check with your parent
7128 block 4 < scrub_point 2. Check with your parent
7129 block 5 < scrub_point 2. Check with your parent
7130 block 6 < scrub_point 2. Check with your parent
7131 block 0 < scrub_point 3. Check with your parent
7132 block 1 < scrub_point 3. Check with your parent
7133 block 2 < scrub_point 3. Check with your parent
7134 block 3 < scrub_point 3. Check with your parent
7135 block 4 < scrub_point 3. Check with your parent
7136 block 5 < scrub_point 3. Check with your parent
7137 block 6 < scrub_point 3. Check with your parent
7138 block 0+4 <= scrub_point 4. No parent check
7139 block 1 < scrub_point 4. Check with your parent
7140 block 2 < scrub_point 4. Check with your parent
7141 block 3 < scrub_point 4. Check with your parent
7142 block 4 < scrub_point 4. Check with your parent
7143 block 5 < scrub_point 4. Check with your parent
7144 block 6 < scrub_point 4. Check with your parent
7145 block 0+4 <= scrub_point 5. No parent check
7146 block 1+4 <= scrub_point 5. No parent check
7147 block 2 < scrub_point 5. Check with your parent
7148 block 3 < scrub_point 5. Check with your parent
7149 block 4 < scrub_point 5. Check with your parent
7150 block 5 < scrub_point 5. Check with your parent
7151 block 6 < scrub_point 5. Check with your parent
7152 block 0+4 <= scrub_point 6. No parent check
7153 block 1+4 <= scrub_point 6. No parent check
7154 block 2+4 <= scrub_point 6. No parent check
7155 block 3 < scrub_point 6. Check with your parent
7156 block 4 < scrub_point 6. Check with your parent
7157 block 5 < scrub_point 6. Check with your parent
7158 block 6 < scrub_point 6. Check with your parent
7159 block 0+4 <= scrub_point 7. No parent check
7160 block 1+4 <= scrub_point 7. No parent check
7161 block 2+4 <= scrub_point 7. No parent check
7162 block 3+4 <= scrub_point 7. No parent check
7163 block 4 < scrub_point 7. Check with your parent
7164 block 5 < scrub_point 7. Check with your parent
7165 block 6 < scrub_point 7. Check with your parent
7166 block 0+4 <= scrub_point 8. No parent check
7167 block 1+4 <= scrub_point 8. No parent check
7168 block 2+4 <= scrub_point 8. No parent check
7169 block 3+4 <= scrub_point 8. No parent check
7170 block 4+4 <= scrub_point 8. No parent check
7171 block 5 < scrub_point 8. Check with your parent
7172 block 6 < scrub_point 8. Check with your parent
7173 block 0+4 <= scrub_point 9. No parent check
7174 block 1+4 <= scrub_point 9. No parent check
7175 block 2+4 <= scrub_point 9. No parent check
7176 block 3+4 <= scrub_point 9. No parent check
7177 block 4+4 <= scrub_point 9. No parent check
7178 block 5+4 <= scrub_point 9. No parent check
7179 block 6 < scrub_point 9. Check with your parent
7180 block 0+4 <= scrub_point 10. No parent check
7181 block 1+4 <= scrub_point 10. No parent check
7182 block 2+4 <= scrub_point 10. No parent check
7183 block 3+4 <= scrub_point 10. No parent check
7184 block 4+4 <= scrub_point 10. No parent check
7185 block 5+4 <= scrub_point 10. No parent check
7186 block 6+4 <= scrub_point 10. No parent check
7187 block 0 < scrub_point 0. Check with your parent
7188 block 1 < scrub_point 0. Check with your parent
7189 block 2 < scrub_point 0. Check with your parent
7190 block 3 < scrub_point 0. Check with your parent
7191 block 4 < scrub_point 0. Check with your parent
7192 block 5 < scrub_point 0. Check with your parent
7193 block 0 < scrub_point 1. Check with your parent
7194 block 1 < scrub_point 1. Check with your parent
7195 block 2 < scrub_point 1. Check with your parent
7196 block 3 < scrub_point 1. Check with your parent
7197 block 4 < scrub_point 1. Check with your parent
7198 block 5 < scrub_point 1. Check with your parent
7199 block 0 < scrub_point 2. Check with your parent
7200 block 1 < scrub_point 2. Check with your parent
7201 block 2 < scrub_point 2. Check with your parent
7202 block 3 < scrub_point 2. Check with your parent
7203 block 4 < scrub_point 2. Check with your parent
7204 block 5 < scrub_point 2. Check with your parent
7205 block 0 < scrub_point 3. Check with your parent
7206 block 1 < scrub_point 3. Check with your parent
7207 block 2 < scrub_point 3. Check with your parent
7208 block 3 < scrub_point 3. Check with your parent
7209 block 4 < scrub_point 3. Check with your parent
7210 block 5 < scrub_point 3. Check with your parent
7211 block 0 < scrub_point 4. Check with your parent
7212 block 1 < scrub_point 4. Check with your parent
7213 block 2 < scrub_point 4. Check with your parent
7214 block 3 < scrub_point 4. Check with your parent
7215 block 4 < scrub_point 4. Check with your parent
7216 block 5 < scrub_point 4. Check with your parent
7217 block 0+5 <= scrub_point 5. No parent check
7218 block 1 < scrub_point 5. Check with your parent
7219 block 2 < scrub_point 5. Check with your parent
7220 block 3 < scrub_point 5. Check with your parent
7221 block 4 < scrub_point 5. Check with your parent
7222 block 5 < scrub_point 5. Check with your parent
7223 block 0+5 <= scrub_point 6. No parent check
7224 block 1+5 <= scrub_point 6. No parent check
7225 block 2 < scrub_point 6. Check with your parent
7226 block 3 < scrub_point 6. Check with your parent
7227 block 4 < scrub_point 6. Check with your parent
7228 block 5 < scrub_point 6. Check with your parent
7229 block 0+5 <= scrub_point 7. No parent check
7230 block 1+5 <= scrub_point 7. No parent check
7231 block 2+5 <= scrub_point 7. No parent check
7232 block 3 < scrub_point 7. Check with your parent
7233 block 4 < scrub_point 7. Check with your parent
7234 block 5 < scrub_point 7. Check with your parent
7235 block 0+5 <= scrub_point 8. No parent check
7236 block 1+5 <= scrub_point 8. No parent check
7237 block 2+5 <= scrub_point 8. No parent check
7238 block 3+5 <= scrub_point 8. No parent check
7239 block 4 < scrub_point 8. Check with your parent
7240 block 5 < scrub_point 8. Check with your parent
7241 block 0+5 <= scrub_point 9. No parent check
7242 block 1+5 <= scrub_point 9. No parent check
7243 block 2+5 <= scrub_point 9. No parent check
7244 block 3+5 <= scrub_point 9. No parent check
7245 block 4+5 <= scrub_point 9. No parent check
7246 block 5 < scrub_point 9. Check with your parent
7247 block 0+5 <= scrub_point 10. No parent check
7248 block 1+5 <= scrub_point 10. No parent check
7249 block 2+5 <= scrub_point 10. No parent check
7250 block 3+5 <= scrub_point 10. No parent check
7251 block 4+5 <= scrub_point 10. No parent check
7252 block 5+5 <= scrub_point 10. No parent check
7253 block 0 < scrub_point 0. Check with your parent
7254 block 1 < scrub_point 0. Check with your parent
7255 block 2 < scrub_point 0. Check with your parent
7256 block 3 < scrub_point 0. Check with your parent
7257 block 4 < scrub_point 0. Check with your parent
7258 block 0 < scrub_point 1. Check with your parent
7259 block 1 < scrub_point 1. Check with your parent
7260 block 2 < scrub_point 1. Check with your parent
7261 block 3 < scrub_point 1. Check with your parent
7262 block 4 < scrub_point 1. Check with your parent
7263 block 0 < scrub_point 2. Check with your parent
7264 block 1 < scrub_point 2. Check with your parent
7265 block 2 < scrub_point 2. Check with your parent
7266 block 3 < scrub_point 2. Check with your parent
7267 block 4 < scrub_point 2. Check with your parent
7268 block 0 < scrub_point 3. Check with your parent
7269 block 1 < scrub_point 3. Check with your parent
7270 block 2 < scrub_point 3. Check with your parent
7271 block 3 < scrub_point 3. Check with your parent
7272 block 4 < scrub_point 3. Check with your parent
7273 block 0 < scrub_point 4. Check with your parent
7274 block 1 < scrub_point 4. Check with your parent
7275 block 2 < scrub_point 4. Check with your parent
7276 block 3 < scrub_point 4. Check with your parent
7277 block 4 < scrub_point 4. Check with your parent
7278 block 0 < scrub_point 5. Check with your parent
7279 block 1 < scrub_point 5. Check with your parent
7280 block 2 < scrub_point 5. Check with your parent
7281 block 3 < scrub_point 5. Check with your parent
7282 block 4 < scrub_point 5. Check with your parent
7283 block 0+6 <= scrub_point 6. No parent check
7284 block 1 < scrub_point 6. Check with your parent
7285 block 2 < scrub_point 6. Check with your parent
7286 block 3 < scrub_point 6. Check with your parent
7287 block 4 < scrub_point 6. Check with your parent
7288 block 0+6 <= scrub_point 7. No parent check
7289 block 1+6 <= scrub_point 7. No parent check
7290 block 2 < scrub_point 7. Check with your parent
7291 block 3 < scrub_point 7. Check with your parent
7292 block 4 < scrub_point 7. Check with your parent
7293 block 0+6 <= scrub_point 8. No parent check
7294 block 1+6 <= scrub_point 8. No parent check
7295 block 2+6 <= scrub_point 8. No parent check
7296 block 3 < scrub_point 8. Check with your parent
7297 block 4 < scrub_point 8. Check with your parent
7298 block 0+6 <= scrub_point 9. No parent check
7299 block 1+6 <= scrub_point 9. No parent check
7300 block 2+6 <= scrub_point 9. No parent check
7301 block 3+6 <= scrub_point 9. No parent check
7302 block 4 < scrub_point 9. Check with your parent
7303 block 0+6 <= scrub_point 10. No parent check
7304 block 1+6 <= scrub_point 10. No parent check
7305 block 2+6 <= scrub_point 10. No parent check
7306 block 3+6 <= scrub_point 10. No parent check
7307 block 4+6 <= scrub_point 10. No parent check
7308 block 0 < scrub_point 0. Check with your parent
7309 block 1 < scrub_point 0. Check with your parent
7310 block 2 < scrub_point 0. Check with your parent
7311 block 3 < scrub_point 0. Check with your parent
7312 block 0 < scrub_point 1. Check with your parent
7313 block 1 < scrub_point 1. Check with your parent
7314 block 2 < scrub_point 1. Check with your parent
7315 block 3 < scrub_point 1. Check with your parent
7316 block 0 < scrub_point 2. Check with your parent
7317 block 1 < scrub_point 2. Check with your parent
7318 block 2 < scrub_point 2. Check with your parent
7319 block 3 < scrub_point 2. Check with your parent
7320 block 0 < scrub_point 3. Check with your parent
7321 block 1 < scrub_point 3. Check with your parent
7322 block 2 < scrub_point 3. Check with your parent
7323 block 3 < scrub_point 3. Check with your parent
7324 block 0 < scrub_point 4. Check with your parent
7325 block 1 < scrub_point 4. Check with your parent
7326 block 2 < scrub_point 4. Check with your parent
7327 block 3 < scrub_point 4. Check with your parent
7328 block 0 < scrub_point 5. Check with your parent
7329 block 1 < scrub_point 5. Check with your parent
7330 block 2 < scrub_point 5. Check with your parent
7331 block 3 < scrub_point 5. Check with your parent
7332 block 0 < scrub_point 6. Check with your parent
7333 block 1 < scrub_point 6. Check with your parent
7334 block 2 < scrub_point 6. Check with your parent
7335 block 3 < scrub_point 6. Check with your parent
7336 block 0+7 <= scrub_point 7. No parent check
7337 block 1 < scrub_point 7. Check with your parent
7338 block 2 < scrub_point 7. Check with your parent
7339 block 3 < scrub_point 7. Check with your parent
7340 block 0+7 <= scrub_point 8. No parent check
7341 block 1+7 <= scrub_point 8. No parent check
7342 block 2 < scrub_point 8. Check with your parent
7343 block 3 < scrub_point 8. Check with your parent
7344 block 0+7 <= scrub_point 9. No parent check
7345 block 1+7 <= scrub_point 9. No parent check
7346 block 2+7 <= scrub_point 9. No parent check
7347 block 3 < scrub_point 9. Check with your parent
7348 block 0+7 <= scrub_point 10. No parent check
7349 block 1+7 <= scrub_point 10. No parent check
7350 block 2+7 <= scrub_point 10. No parent check
7351 block 3+7 <= scrub_point 10. No parent check
7352 block 0 < scrub_point 0. Check with your parent
7353 block 1 < scrub_point 0. Check with your parent
7354 block 2 < scrub_point 0. Check with your parent
7355 block 0 < scrub_point 1. Check with your parent
7356 block 1 < scrub_point 1. Check with your parent
7357 block 2 < scrub_point 1. Check with your parent
7358 block 0 < scrub_point 2. Check with your parent
7359 block 1 < scrub_point 2. Check with your parent
7360 block 2 < scrub_point 2. Check with your parent
7361 block 0 < scrub_point 3. Check with your parent
7362 block 1 < scrub_point 3. Check with your parent
7363 block 2 < scrub_point 3. Check with your parent
7364 block 0 < scrub_point 4. Check with your parent
7365 block 1 < scrub_point 4. Check with your parent
7366 block 2 < scrub_point 4. Check with your parent
7367 block 0 < scrub_point 5. Check with your parent
7368 block 1 < scrub_point 5. Check with your parent
7369 block 2 < scrub_point 5. Check with your parent
7370 block 0 < scrub_point 6. Check with your parent
7371 block 1 < scrub_point 6. Check with your parent
7372 block 2 < scrub_point 6. Check with your parent
7373 block 0 < scrub_point 7. Check with your parent
7374 block 1 < scrub_point 7. Check with your parent
7375 block 2 < scrub_point 7. Check with your parent
7376 block 0+8 <= scrub_point 8. No parent check
7377 block 1 < scrub_point 8. Check with your parent
7378 block 2 < scrub_point 8. Check with your parent
7379 block 0+8 <= scrub_point 9. No parent check
7380 block 1+8 <= scrub_point 9. No parent check
7381 block 2 < scrub_point 9. Check with your parent
7382 block 0+8 <= scrub_point 10. No parent check
7383 block 1+8 <= scrub_point 10. No parent check
7384 block 2+8 <= scrub_point 10. No parent check
7385 block 0 < scrub_point 0. Check with your parent
7386 block 1 < scrub_point 0. Check with your parent
7387 block 0 < scrub_point 1. Check with your parent
7388 block 1 < scrub_point 1. Check with your parent
7389 block 0 < scrub_point 2. Check with your parent
7390 block 1 < scrub_point 2. Check with your parent
7391 block 0 < scrub_point 3. Check with your parent
7392 block 1 < scrub_point 3. Check with your parent
7393 block 0 < scrub_point 4. Check with your parent
7394 block 1 < scrub_point 4. Check with your parent
7395 block 0 < scrub_point 5. Check with your parent
7396 block 1 < scrub_point 5. Check with your parent
7397 block 0 < scrub_point 6. Check with your parent
7398 block 1 < scrub_point 6. Check with your parent
7399 block 0 < scrub_point 7. Check with your parent
7400 block 1 < scrub_point 7. Check with your parent
7401 block 0 < scrub_point 8. Check with your parent
7402 block 1 < scrub_point 8. Check with your parent
7403 block 0+9 <= scrub_point 9. No parent check
7404 block 1 < scrub_point 9. Check with your parent
7405 block 0+9 <= scrub_point 10. No parent check
7406 block 1+9 <= scrub_point 10. No parent check
7407 test volume::test::test_scrub_point_subvolume_equal ... ok
7408 block 0 < scrub_point 0. Check with your parent
7409 block 1 < scrub_point 0. Check with your parent
7410 block 2 < scrub_point 0. Check with your parent
7411 block 3 < scrub_point 0. Check with your parent
7412 block 4 < scrub_point 0. Check with your parent
7413 block 5 > parent 5. Go to SubVolume
7414 block 6 > parent 5. Go to SubVolume
7415 block 7 > parent 5. Go to SubVolume
7416 block 8 > parent 5. Go to SubVolume
7417 block 9 > parent 5. Go to SubVolume
7418 block 0+1 <= scrub_point 1. No parent check
7419 block 1 < scrub_point 1. Check with your parent
7420 block 2 < scrub_point 1. Check with your parent
7421 block 3 < scrub_point 1. Check with your parent
7422 block 4 < scrub_point 1. Check with your parent
7423 block 5 > parent 5. Go to SubVolume
7424 block 6 > parent 5. Go to SubVolume
7425 block 7 > parent 5. Go to SubVolume
7426 block 8 > parent 5. Go to SubVolume
7427 block 9 > parent 5. Go to SubVolume
7428 block 0+1 <= scrub_point 2. No parent check
7429 block 1+1 <= scrub_point 2. No parent check
7430 block 2 < scrub_point 2. Check with your parent
7431 block 3 < scrub_point 2. Check with your parent
7432 block 4 < scrub_point 2. Check with your parent
7433 block 5 > parent 5. Go to SubVolume
7434 block 6 > parent 5. Go to SubVolume
7435 block 7 > parent 5. Go to SubVolume
7436 block 8 > parent 5. Go to SubVolume
7437 block 9 > parent 5. Go to SubVolume
7438 block 0+1 <= scrub_point 3. No parent check
7439 block 1+1 <= scrub_point 3. No parent check
7440 block 2+1 <= scrub_point 3. No parent check
7441 block 3 < scrub_point 3. Check with your parent
7442 block 4 < scrub_point 3. Check with your parent
7443 block 5 > parent 5. Go to SubVolume
7444 block 6 > parent 5. Go to SubVolume
7445 block 7 > parent 5. Go to SubVolume
7446 block 8 > parent 5. Go to SubVolume
7447 block 9 > parent 5. Go to SubVolume
7448 block 0+1 <= scrub_point 4. No parent check
7449 block 1+1 <= scrub_point 4. No parent check
7450 block 2+1 <= scrub_point 4. No parent check
7451 block 3+1 <= scrub_point 4. No parent check
7452 block 4 < scrub_point 4. Check with your parent
7453 block 5 > parent 5. Go to SubVolume
7454 block 6 > parent 5. Go to SubVolume
7455 block 7 > parent 5. Go to SubVolume
7456 block 8 > parent 5. Go to SubVolume
7457 block 9 > parent 5. Go to SubVolume
7458 block 0+1 <= scrub_point 5. No parent check
7459 block 1+1 <= scrub_point 5. No parent check
7460 block 2+1 <= scrub_point 5. No parent check
7461 block 3+1 <= scrub_point 5. No parent check
7462 block 4+1 <= scrub_point 5. No parent check
7463 block 5 > parent 5. Go to SubVolume
7464 block 6 > parent 5. Go to SubVolume
7465 block 7 > parent 5. Go to SubVolume
7466 block 8 > parent 5. Go to SubVolume
7467 block 9 > parent 5. Go to SubVolume
7468 block 0 < scrub_point 0. Check with your parent
7469 block 1 < scrub_point 0. Check with your parent
7470 block 2 < scrub_point 0. Check with your parent
7471 block 3 < scrub_point 0. Check with your parent
7472 block 4 < scrub_point 0. Check with your parent
7473 block 5 > parent 5. Go to SubVolume
7474 block 6 > parent 5. Go to SubVolume
7475 block 7 > parent 5. Go to SubVolume
7476 block 8 > parent 5. Go to SubVolume
7477 block 0 < scrub_point 1. Check with your parent
7478 block 1 < scrub_point 1. Check with your parent
7479 block 2 < scrub_point 1. Check with your parent
7480 block 3 < scrub_point 1. Check with your parent
7481 block 4 < scrub_point 1. Check with your parent
7482 block 5 > parent 5. Go to SubVolume
7483 block 6 > parent 5. Go to SubVolume
7484 block 7 > parent 5. Go to SubVolume
7485 block 8 > parent 5. Go to SubVolume
7486 block 0+2 <= scrub_point 2. No parent check
7487 block 1 < scrub_point 2. Check with your parent
7488 block 2 < scrub_point 2. Check with your parent
7489 block 3 < scrub_point 2. Check with your parent
7490 block 4 < scrub_point 2. Check with your parent
7491 block 5 > parent 5. Go to SubVolume
7492 block 6 > parent 5. Go to SubVolume
7493 block 7 > parent 5. Go to SubVolume
7494 block 8 > parent 5. Go to SubVolume
7495 block 0+2 <= scrub_point 3. No parent check
7496 block 1+2 <= scrub_point 3. No parent check
7497 block 2 < scrub_point 3. Check with your parent
7498 block 3 < scrub_point 3. Check with your parent
7499 block 4 < scrub_point 3. Check with your parent
7500 block 5 > parent 5. Go to SubVolume
7501 block 6 > parent 5. Go to SubVolume
7502 block 7 > parent 5. Go to SubVolume
7503 block 8 > parent 5. Go to SubVolume
7504 block 0+2 <= scrub_point 4. No parent check
7505 block 1+2 <= scrub_point 4. No parent check
7506 block 2+2 <= scrub_point 4. No parent check
7507 block 3 < scrub_point 4. Check with your parent
7508 block 4 < scrub_point 4. Check with your parent
7509 block 5 > parent 5. Go to SubVolume
7510 block 6 > parent 5. Go to SubVolume
7511 block 7 > parent 5. Go to SubVolume
7512 block 8 > parent 5. Go to SubVolume
7513 block 0+2 <= scrub_point 5. No parent check
7514 block 1+2 <= scrub_point 5. No parent check
7515 block 2+2 <= scrub_point 5. No parent check
7516 block 3+2 <= scrub_point 5. No parent check
7517 block 4 < scrub_point 5. Check with your parent
7518 block 5 > parent 5. Go to SubVolume
7519 block 6 > parent 5. Go to SubVolume
7520 block 7 > parent 5. Go to SubVolume
7521 block 8 > parent 5. Go to SubVolume
7522 block 0 < scrub_point 0. Check with your parent
7523 block 1 < scrub_point 0. Check with your parent
7524 block 2 < scrub_point 0. Check with your parent
7525 block 3 < scrub_point 0. Check with your parent
7526 block 4 < scrub_point 0. Check with your parent
7527 block 5 > parent 5. Go to SubVolume
7528 block 6 > parent 5. Go to SubVolume
7529 block 7 > parent 5. Go to SubVolume
7530 block 0 < scrub_point 1. Check with your parent
7531 block 1 < scrub_point 1. Check with your parent
7532 block 2 < scrub_point 1. Check with your parent
7533 block 3 < scrub_point 1. Check with your parent
7534 block 4 < scrub_point 1. Check with your parent
7535 block 5 > parent 5. Go to SubVolume
7536 block 6 > parent 5. Go to SubVolume
7537 block 7 > parent 5. Go to SubVolume
7538 block 0 < scrub_point 2. Check with your parent
7539 block 1 < scrub_point 2. Check with your parent
7540 block 2 < scrub_point 2. Check with your parent
7541 block 3 < scrub_point 2. Check with your parent
7542 block 4 < scrub_point 2. Check with your parent
7543 block 5 > parent 5. Go to SubVolume
7544 block 6 > parent 5. Go to SubVolume
7545 block 7 > parent 5. Go to SubVolume
7546 block 0+3 <= scrub_point 3. No parent check
7547 block 1 < scrub_point 3. Check with your parent
7548 block 2 < scrub_point 3. Check with your parent
7549 block 3 < scrub_point 3. Check with your parent
7550 block 4 < scrub_point 3. Check with your parent
7551 block 5 > parent 5. Go to SubVolume
7552 block 6 > parent 5. Go to SubVolume
7553 block 7 > parent 5. Go to SubVolume
7554 block 0+3 <= scrub_point 4. No parent check
7555 block 1+3 <= scrub_point 4. No parent check
7556 block 2 < scrub_point 4. Check with your parent
7557 block 3 < scrub_point 4. Check with your parent
7558 block 4 < scrub_point 4. Check with your parent
7559 block 5 > parent 5. Go to SubVolume
7560 block 6 > parent 5. Go to SubVolume
7561 block 7 > parent 5. Go to SubVolume
7562 block 0+3 <= scrub_point 5. No parent check
7563 block 1+3 <= scrub_point 5. No parent check
7564 block 2+3 <= scrub_point 5. No parent check
7565 block 3 < scrub_point 5. Check with your parent
7566 block 4 < scrub_point 5. Check with your parent
7567 block 5 > parent 5. Go to SubVolume
7568 block 6 > parent 5. Go to SubVolume
7569 block 7 > parent 5. Go to SubVolume
7570 block 0 < scrub_point 0. Check with your parent
7571 block 1 < scrub_point 0. Check with your parent
7572 block 2 < scrub_point 0. Check with your parent
7573 block 3 < scrub_point 0. Check with your parent
7574 block 4 < scrub_point 0. Check with your parent
7575 block 5 > parent 5. Go to SubVolume
7576 block 6 > parent 5. Go to SubVolume
7577 block 0 < scrub_point 1. Check with your parent
7578 block 1 < scrub_point 1. Check with your parent
7579 block 2 < scrub_point 1. Check with your parent
7580 block 3 < scrub_point 1. Check with your parent
7581 block 4 < scrub_point 1. Check with your parent
7582 block 5 > parent 5. Go to SubVolume
7583 block 6 > parent 5. Go to SubVolume
7584 block 0 < scrub_point 2. Check with your parent
7585 block 1 < scrub_point 2. Check with your parent
7586 block 2 < scrub_point 2. Check with your parent
7587 block 3 < scrub_point 2. Check with your parent
7588 block 4 < scrub_point 2. Check with your parent
7589 block 5 > parent 5. Go to SubVolume
7590 block 6 > parent 5. Go to SubVolume
7591 block 0 < scrub_point 3. Check with your parent
7592 block 1 < scrub_point 3. Check with your parent
7593 block 2 < scrub_point 3. Check with your parent
7594 block 3 < scrub_point 3. Check with your parent
7595 block 4 < scrub_point 3. Check with your parent
7596 block 5 > parent 5. Go to SubVolume
7597 block 6 > parent 5. Go to SubVolume
7598 block 0+4 <= scrub_point 4. No parent check
7599 block 1 < scrub_point 4. Check with your parent
7600 block 2 < scrub_point 4. Check with your parent
7601 block 3 < scrub_point 4. Check with your parent
7602 block 4 < scrub_point 4. Check with your parent
7603 block 5 > parent 5. Go to SubVolume
7604 block 6 > parent 5. Go to SubVolume
7605 block 0+4 <= scrub_point 5. No parent check
7606 block 1+4 <= scrub_point 5. No parent check
7607 block 2 < scrub_point 5. Check with your parent
7608 block 3 < scrub_point 5. Check with your parent
7609 block 4 < scrub_point 5. Check with your parent
7610 block 5 > parent 5. Go to SubVolume
7611 block 6 > parent 5. Go to SubVolume
7612 block 0 < scrub_point 0. Check with your parent
7613 block 1 < scrub_point 0. Check with your parent
7614 block 2 < scrub_point 0. Check with your parent
7615 block 3 < scrub_point 0. Check with your parent
7616 block 4 < scrub_point 0. Check with your parent
7617 block 5 > parent 5. Go to SubVolume
7618 block 0 < scrub_point 1. Check with your parent
7619 block 1 < scrub_point 1. Check with your parent
7620 block 2 < scrub_point 1. Check with your parent
7621 block 3 < scrub_point 1. Check with your parent
7622 block 4 < scrub_point 1. Check with your parent
7623 block 5 > parent 5. Go to SubVolume
7624 block 0 < scrub_point 2. Check with your parent
7625 block 1 < scrub_point 2. Check with your parent
7626 block 2 < scrub_point 2. Check with your parent
7627 block 3 < scrub_point 2. Check with your parent
7628 block 4 < scrub_point 2. Check with your parent
7629 block 5 > parent 5. Go to SubVolume
7630 block 0 < scrub_point 3. Check with your parent
7631 block 1 < scrub_point 3. Check with your parent
7632 block 2 < scrub_point 3. Check with your parent
7633 block 3 < scrub_point 3. Check with your parent
7634 block 4 < scrub_point 3. Check with your parent
7635 block 5 > parent 5. Go to SubVolume
7636 block 0 < scrub_point 4. Check with your parent
7637 block 1 < scrub_point 4. Check with your parent
7638 block 2 < scrub_point 4. Check with your parent
7639 block 3 < scrub_point 4. Check with your parent
7640 block 4 < scrub_point 4. Check with your parent
7641 block 5 > parent 5. Go to SubVolume
7642 block 0+5 <= scrub_point 5. No parent check
7643 block 1 < scrub_point 5. Check with your parent
7644 block 2 < scrub_point 5. Check with your parent
7645 block 3 < scrub_point 5. Check with your parent
7646 block 4 < scrub_point 5. Check with your parent
7647 block 5 > parent 5. Go to SubVolume
7648 block 0 < scrub_point 0. Check with your parent
7649 block 1 < scrub_point 0. Check with your parent
7650 block 2 < scrub_point 0. Check with your parent
7651 block 3 < scrub_point 0. Check with your parent
7652 block 4 < scrub_point 0. Check with your parent
7653 block 0 < scrub_point 1. Check with your parent
7654 block 1 < scrub_point 1. Check with your parent
7655 block 2 < scrub_point 1. Check with your parent
7656 block 3 < scrub_point 1. Check with your parent
7657 block 4 < scrub_point 1. Check with your parent
7658 block 0 < scrub_point 2. Check with your parent
7659 block 1 < scrub_point 2. Check with your parent
7660 block 2 < scrub_point 2. Check with your parent
7661 block 3 < scrub_point 2. Check with your parent
7662 block 4 < scrub_point 2. Check with your parent
7663 block 0 < scrub_point 3. Check with your parent
7664 block 1 < scrub_point 3. Check with your parent
7665 block 2 < scrub_point 3. Check with your parent
7666 block 3 < scrub_point 3. Check with your parent
7667 block 4 < scrub_point 3. Check with your parent
7668 block 0 < scrub_point 4. Check with your parent
7669 block 1 < scrub_point 4. Check with your parent
7670 block 2 < scrub_point 4. Check with your parent
7671 block 3 < scrub_point 4. Check with your parent
7672 block 4 < scrub_point 4. Check with your parent
7673 block 0 < scrub_point 5. Check with your parent
7674 block 1 < scrub_point 5. Check with your parent
7675 block 2 < scrub_point 5. Check with your parent
7676 block 3 < scrub_point 5. Check with your parent
7677 block 4 < scrub_point 5. Check with your parent
7678 block 0 < scrub_point 0. Check with your parent
7679 block 1 < scrub_point 0. Check with your parent
7680 block 2 < scrub_point 0. Check with your parent
7681 block 3 < scrub_point 0. Check with your parent
7682 block 0 < scrub_point 1. Check with your parent
7683 block 1 < scrub_point 1. Check with your parent
7684 block 2 < scrub_point 1. Check with your parent
7685 block 3 < scrub_point 1. Check with your parent
7686 block 0 < scrub_point 2. Check with your parent
7687 block 1 < scrub_point 2. Check with your parent
7688 block 2 < scrub_point 2. Check with your parent
7689 block 3 < scrub_point 2. Check with your parent
7690 block 0 < scrub_point 3. Check with your parent
7691 block 1 < scrub_point 3. Check with your parent
7692 block 2 < scrub_point 3. Check with your parent
7693 block 3 < scrub_point 3. Check with your parent
7694 block 0 < scrub_point 4. Check with your parent
7695 block 1 < scrub_point 4. Check with your parent
7696 block 2 < scrub_point 4. Check with your parent
7697 block 3 < scrub_point 4. Check with your parent
7698 block 0 < scrub_point 5. Check with your parent
7699 block 1 < scrub_point 5. Check with your parent
7700 block 2 < scrub_point 5. Check with your parent
7701 block 3 < scrub_point 5. Check with your parent
7702 block 0 < scrub_point 0. Check with your parent
7703 block 1 < scrub_point 0. Check with your parent
7704 block 2 < scrub_point 0. Check with your parent
7705 block 0 < scrub_point 1. Check with your parent
7706 block 1 < scrub_point 1. Check with your parent
7707 block 2 < scrub_point 1. Check with your parent
7708 block 0 < scrub_point 2. Check with your parent
7709 block 1 < scrub_point 2. Check with your parent
7710 block 2 < scrub_point 2. Check with your parent
7711 block 0 < scrub_point 3. Check with your parent
7712 block 1 < scrub_point 3. Check with your parent
7713 block 2 < scrub_point 3. Check with your parent
7714 block 0 < scrub_point 4. Check with your parent
7715 block 1 < scrub_point 4. Check with your parent
7716 block 2 < scrub_point 4. Check with your parent
7717 block 0 < scrub_point 5. Check with your parent
7718 block 1 < scrub_point 5. Check with your parent
7719 block 2 < scrub_point 5. Check with your parent
7720 block 0 < scrub_point 0. Check with your parent
7721 block 1 < scrub_point 0. Check with your parent
7722 block 0 < scrub_point 1. Check with your parent
7723 block 1 < scrub_point 1. Check with your parent
7724 block 0 < scrub_point 2. Check with your parent
7725 block 1 < scrub_point 2. Check with your parent
7726 block 0 < scrub_point 3. Check with your parent
7727 block 1 < scrub_point 3. Check with your parent
7728 block 0 < scrub_point 4. Check with your parent
7729 block 1 < scrub_point 4. Check with your parent
7730 block 0 < scrub_point 5. Check with your parent
7731 block 1 < scrub_point 5. Check with your parent
7732 test volume::test::test_scrub_point_subvolume_smaller ... ok
7733 block 0 < scrub_point 0. Check with your parent
7734 block 1 < scrub_point 0. Check with your parent
7735 block 2 < scrub_point 0. Check with your parent
7736 block 3 < scrub_point 0. Check with your parent
7737 block 4 < scrub_point 0. Check with your parent
7738 block 5 < scrub_point 0. Check with your parent
7739 block 6 < scrub_point 0. Check with your parent
7740 block 7 < scrub_point 0. Check with your parent
7741 block 8 > parent 8. Go to SubVolume
7742 block 9 > parent 8. Go to SubVolume
7743 block 0+1 <= scrub_point 1. No parent check
7744 block 1 < scrub_point 1. Check with your parent
7745 block 2 < scrub_point 1. Check with your parent
7746 block 3 < scrub_point 1. Check with your parent
7747 block 4 < scrub_point 1. Check with your parent
7748 block 5 < scrub_point 1. Check with your parent
7749 block 6 < scrub_point 1. Check with your parent
7750 block 7 < scrub_point 1. Check with your parent
7751 block 8 > parent 8. Go to SubVolume
7752 block 9 > parent 8. Go to SubVolume
7753 block 0+1 <= scrub_point 2. No parent check
7754 block 1+1 <= scrub_point 2. No parent check
7755 block 2 < scrub_point 2. Check with your parent
7756 block 3 < scrub_point 2. Check with your parent
7757 block 4 < scrub_point 2. Check with your parent
7758 block 5 < scrub_point 2. Check with your parent
7759 block 6 < scrub_point 2. Check with your parent
7760 block 7 < scrub_point 2. Check with your parent
7761 block 8 > parent 8. Go to SubVolume
7762 block 9 > parent 8. Go to SubVolume
7763 block 0+1 <= scrub_point 3. No parent check
7764 block 1+1 <= scrub_point 3. No parent check
7765 block 2+1 <= scrub_point 3. No parent check
7766 block 3 < scrub_point 3. Check with your parent
7767 block 4 < scrub_point 3. Check with your parent
7768 block 5 < scrub_point 3. Check with your parent
7769 block 6 < scrub_point 3. Check with your parent
7770 block 7 < scrub_point 3. Check with your parent
7771 block 8 > parent 8. Go to SubVolume
7772 block 9 > parent 8. Go to SubVolume
7773 block 0+1 <= scrub_point 4. No parent check
7774 block 1+1 <= scrub_point 4. No parent check
7775 block 2+1 <= scrub_point 4. No parent check
7776 block 3+1 <= scrub_point 4. No parent check
7777 block 4 < scrub_point 4. Check with your parent
7778 block 5 < scrub_point 4. Check with your parent
7779 block 6 < scrub_point 4. Check with your parent
7780 block 7 < scrub_point 4. Check with your parent
7781 block 8 > parent 8. Go to SubVolume
7782 block 9 > parent 8. Go to SubVolume
7783 block 0+1 <= scrub_point 5. No parent check
7784 block 1+1 <= scrub_point 5. No parent check
7785 block 2+1 <= scrub_point 5. No parent check
7786 block 3+1 <= scrub_point 5. No parent check
7787 block 4+1 <= scrub_point 5. No parent check
7788 block 5 < scrub_point 5. Check with your parent
7789 block 6 < scrub_point 5. Check with your parent
7790 block 7 < scrub_point 5. Check with your parent
7791 block 8 > parent 8. Go to SubVolume
7792 block 9 > parent 8. Go to SubVolume
7793 block 0+1 <= scrub_point 6. No parent check
7794 block 1+1 <= scrub_point 6. No parent check
7795 block 2+1 <= scrub_point 6. No parent check
7796 block 3+1 <= scrub_point 6. No parent check
7797 block 4+1 <= scrub_point 6. No parent check
7798 block 5+1 <= scrub_point 6. No parent check
7799 block 6 < scrub_point 6. Check with your parent
7800 block 7 < scrub_point 6. Check with your parent
7801 block 8 > parent 8. Go to SubVolume
7802 block 9 > parent 8. Go to SubVolume
7803 block 0+1 <= scrub_point 7. No parent check
7804 block 1+1 <= scrub_point 7. No parent check
7805 block 2+1 <= scrub_point 7. No parent check
7806 block 3+1 <= scrub_point 7. No parent check
7807 block 4+1 <= scrub_point 7. No parent check
7808 block 5+1 <= scrub_point 7. No parent check
7809 block 6+1 <= scrub_point 7. No parent check
7810 block 7 < scrub_point 7. Check with your parent
7811 block 8 > parent 8. Go to SubVolume
7812 block 9 > parent 8. Go to SubVolume
7813 block 0+1 <= scrub_point 8. No parent check
7814 block 1+1 <= scrub_point 8. No parent check
7815 block 2+1 <= scrub_point 8. No parent check
7816 block 3+1 <= scrub_point 8. No parent check
7817 block 4+1 <= scrub_point 8. No parent check
7818 block 5+1 <= scrub_point 8. No parent check
7819 block 6+1 <= scrub_point 8. No parent check
7820 block 7+1 <= scrub_point 8. No parent check
7821 block 8 > parent 8. Go to SubVolume
7822 block 9 > parent 8. Go to SubVolume
7823 block 0 < scrub_point 0. Check with your parent
7824 block 1 < scrub_point 0. Check with your parent
7825 block 2 < scrub_point 0. Check with your parent
7826 block 3 < scrub_point 0. Check with your parent
7827 block 4 < scrub_point 0. Check with your parent
7828 block 5 < scrub_point 0. Check with your parent
7829 block 6 < scrub_point 0. Check with your parent
7830 block 7 < scrub_point 0. Check with your parent
7831 block 8 > parent 8. Go to SubVolume
7832 block 0 < scrub_point 1. Check with your parent
7833 block 1 < scrub_point 1. Check with your parent
7834 block 2 < scrub_point 1. Check with your parent
7835 block 3 < scrub_point 1. Check with your parent
7836 block 4 < scrub_point 1. Check with your parent
7837 block 5 < scrub_point 1. Check with your parent
7838 block 6 < scrub_point 1. Check with your parent
7839 block 7 < scrub_point 1. Check with your parent
7840 block 8 > parent 8. Go to SubVolume
7841 block 0+2 <= scrub_point 2. No parent check
7842 block 1 < scrub_point 2. Check with your parent
7843 block 2 < scrub_point 2. Check with your parent
7844 block 3 < scrub_point 2. Check with your parent
7845 block 4 < scrub_point 2. Check with your parent
7846 block 5 < scrub_point 2. Check with your parent
7847 block 6 < scrub_point 2. Check with your parent
7848 block 7 < scrub_point 2. Check with your parent
7849 block 8 > parent 8. Go to SubVolume
7850 block 0+2 <= scrub_point 3. No parent check
7851 block 1+2 <= scrub_point 3. No parent check
7852 block 2 < scrub_point 3. Check with your parent
7853 block 3 < scrub_point 3. Check with your parent
7854 block 4 < scrub_point 3. Check with your parent
7855 block 5 < scrub_point 3. Check with your parent
7856 block 6 < scrub_point 3. Check with your parent
7857 block 7 < scrub_point 3. Check with your parent
7858 block 8 > parent 8. Go to SubVolume
7859 block 0+2 <= scrub_point 4. No parent check
7860 block 1+2 <= scrub_point 4. No parent check
7861 block 2+2 <= scrub_point 4. No parent check
7862 block 3 < scrub_point 4. Check with your parent
7863 block 4 < scrub_point 4. Check with your parent
7864 block 5 < scrub_point 4. Check with your parent
7865 block 6 < scrub_point 4. Check with your parent
7866 block 7 < scrub_point 4. Check with your parent
7867 block 8 > parent 8. Go to SubVolume
7868 block 0+2 <= scrub_point 5. No parent check
7869 block 1+2 <= scrub_point 5. No parent check
7870 block 2+2 <= scrub_point 5. No parent check
7871 block 3+2 <= scrub_point 5. No parent check
7872 block 4 < scrub_point 5. Check with your parent
7873 block 5 < scrub_point 5. Check with your parent
7874 block 6 < scrub_point 5. Check with your parent
7875 block 7 < scrub_point 5. Check with your parent
7876 block 8 > parent 8. Go to SubVolume
7877 block 0+2 <= scrub_point 6. No parent check
7878 block 1+2 <= scrub_point 6. No parent check
7879 block 2+2 <= scrub_point 6. No parent check
7880 block 3+2 <= scrub_point 6. No parent check
7881 block 4+2 <= scrub_point 6. No parent check
7882 block 5 < scrub_point 6. Check with your parent
7883 block 6 < scrub_point 6. Check with your parent
7884 block 7 < scrub_point 6. Check with your parent
7885 block 8 > parent 8. Go to SubVolume
7886 block 0+2 <= scrub_point 7. No parent check
7887 block 1+2 <= scrub_point 7. No parent check
7888 block 2+2 <= scrub_point 7. No parent check
7889 block 3+2 <= scrub_point 7. No parent check
7890 block 4+2 <= scrub_point 7. No parent check
7891 block 5+2 <= scrub_point 7. No parent check
7892 block 6 < scrub_point 7. Check with your parent
7893 block 7 < scrub_point 7. Check with your parent
7894 block 8 > parent 8. Go to SubVolume
7895 block 0+2 <= scrub_point 8. No parent check
7896 block 1+2 <= scrub_point 8. No parent check
7897 block 2+2 <= scrub_point 8. No parent check
7898 block 3+2 <= scrub_point 8. No parent check
7899 block 4+2 <= scrub_point 8. No parent check
7900 block 5+2 <= scrub_point 8. No parent check
7901 block 6+2 <= scrub_point 8. No parent check
7902 block 7 < scrub_point 8. Check with your parent
7903 block 8 > parent 8. Go to SubVolume
7904 block 0 < scrub_point 0. Check with your parent
7905 block 1 < scrub_point 0. Check with your parent
7906 block 2 < scrub_point 0. Check with your parent
7907 block 3 < scrub_point 0. Check with your parent
7908 block 4 < scrub_point 0. Check with your parent
7909 block 5 < scrub_point 0. Check with your parent
7910 block 6 < scrub_point 0. Check with your parent
7911 block 7 < scrub_point 0. Check with your parent
7912 block 0 < scrub_point 1. Check with your parent
7913 block 1 < scrub_point 1. Check with your parent
7914 block 2 < scrub_point 1. Check with your parent
7915 block 3 < scrub_point 1. Check with your parent
7916 block 4 < scrub_point 1. Check with your parent
7917 block 5 < scrub_point 1. Check with your parent
7918 block 6 < scrub_point 1. Check with your parent
7919 block 7 < scrub_point 1. Check with your parent
7920 block 0 < scrub_point 2. Check with your parent
7921 block 1 < scrub_point 2. Check with your parent
7922 block 2 < scrub_point 2. Check with your parent
7923 block 3 < scrub_point 2. Check with your parent
7924 block 4 < scrub_point 2. Check with your parent
7925 block 5 < scrub_point 2. Check with your parent
7926 block 6 < scrub_point 2. Check with your parent
7927 block 7 < scrub_point 2. Check with your parent
7928 block 0+3 <= scrub_point 3. No parent check
7929 block 1 < scrub_point 3. Check with your parent
7930 block 2 < scrub_point 3. Check with your parent
7931 block 3 < scrub_point 3. Check with your parent
7932 block 4 < scrub_point 3. Check with your parent
7933 block 5 < scrub_point 3. Check with your parent
7934 block 6 < scrub_point 3. Check with your parent
7935 block 7 < scrub_point 3. Check with your parent
7936 block 0+3 <= scrub_point 4. No parent check
7937 block 1+3 <= scrub_point 4. No parent check
7938 block 2 < scrub_point 4. Check with your parent
7939 block 3 < scrub_point 4. Check with your parent
7940 block 4 < scrub_point 4. Check with your parent
7941 block 5 < scrub_point 4. Check with your parent
7942 block 6 < scrub_point 4. Check with your parent
7943 block 7 < scrub_point 4. Check with your parent
7944 block 0+3 <= scrub_point 5. No parent check
7945 block 1+3 <= scrub_point 5. No parent check
7946 block 2+3 <= scrub_point 5. No parent check
7947 block 3 < scrub_point 5. Check with your parent
7948 block 4 < scrub_point 5. Check with your parent
7949 block 5 < scrub_point 5. Check with your parent
7950 block 6 < scrub_point 5. Check with your parent
7951 block 7 < scrub_point 5. Check with your parent
7952 block 0+3 <= scrub_point 6. No parent check
7953 block 1+3 <= scrub_point 6. No parent check
7954 block 2+3 <= scrub_point 6. No parent check
7955 block 3+3 <= scrub_point 6. No parent check
7956 block 4 < scrub_point 6. Check with your parent
7957 block 5 < scrub_point 6. Check with your parent
7958 block 6 < scrub_point 6. Check with your parent
7959 block 7 < scrub_point 6. Check with your parent
7960 block 0+3 <= scrub_point 7. No parent check
7961 block 1+3 <= scrub_point 7. No parent check
7962 block 2+3 <= scrub_point 7. No parent check
7963 block 3+3 <= scrub_point 7. No parent check
7964 block 4+3 <= scrub_point 7. No parent check
7965 block 5 < scrub_point 7. Check with your parent
7966 block 6 < scrub_point 7. Check with your parent
7967 block 7 < scrub_point 7. Check with your parent
7968 block 0+3 <= scrub_point 8. No parent check
7969 block 1+3 <= scrub_point 8. No parent check
7970 block 2+3 <= scrub_point 8. No parent check
7971 block 3+3 <= scrub_point 8. No parent check
7972 block 4+3 <= scrub_point 8. No parent check
7973 block 5+3 <= scrub_point 8. No parent check
7974 block 6 < scrub_point 8. Check with your parent
7975 block 7 < scrub_point 8. Check with your parent
7976 block 0 < scrub_point 0. Check with your parent
7977 block 1 < scrub_point 0. Check with your parent
7978 block 2 < scrub_point 0. Check with your parent
7979 block 3 < scrub_point 0. Check with your parent
7980 block 4 < scrub_point 0. Check with your parent
7981 block 5 < scrub_point 0. Check with your parent
7982 block 6 < scrub_point 0. Check with your parent
7983 block 0 < scrub_point 1. Check with your parent
7984 block 1 < scrub_point 1. Check with your parent
7985 block 2 < scrub_point 1. Check with your parent
7986 block 3 < scrub_point 1. Check with your parent
7987 block 4 < scrub_point 1. Check with your parent
7988 block 5 < scrub_point 1. Check with your parent
7989 block 6 < scrub_point 1. Check with your parent
7990 block 0 < scrub_point 2. Check with your parent
7991 block 1 < scrub_point 2. Check with your parent
7992 block 2 < scrub_point 2. Check with your parent
7993 block 3 < scrub_point 2. Check with your parent
7994 block 4 < scrub_point 2. Check with your parent
7995 block 5 < scrub_point 2. Check with your parent
7996 block 6 < scrub_point 2. Check with your parent
7997 block 0 < scrub_point 3. Check with your parent
7998 block 1 < scrub_point 3. Check with your parent
7999 block 2 < scrub_point 3. Check with your parent
8000 block 3 < scrub_point 3. Check with your parent
8001 block 4 < scrub_point 3. Check with your parent
8002 block 5 < scrub_point 3. Check with your parent
8003 block 6 < scrub_point 3. Check with your parent
8004 block 0+4 <= scrub_point 4. No parent check
8005 block 1 < scrub_point 4. Check with your parent
8006 block 2 < scrub_point 4. Check with your parent
8007 block 3 < scrub_point 4. Check with your parent
8008 block 4 < scrub_point 4. Check with your parent
8009 block 5 < scrub_point 4. Check with your parent
8010 block 6 < scrub_point 4. Check with your parent
8011 block 0+4 <= scrub_point 5. No parent check
8012 block 1+4 <= scrub_point 5. No parent check
8013 block 2 < scrub_point 5. Check with your parent
8014 block 3 < scrub_point 5. Check with your parent
8015 block 4 < scrub_point 5. Check with your parent
8016 block 5 < scrub_point 5. Check with your parent
8017 block 6 < scrub_point 5. Check with your parent
8018 block 0+4 <= scrub_point 6. No parent check
8019 block 1+4 <= scrub_point 6. No parent check
8020 block 2+4 <= scrub_point 6. No parent check
8021 block 3 < scrub_point 6. Check with your parent
8022 block 4 < scrub_point 6. Check with your parent
8023 block 5 < scrub_point 6. Check with your parent
8024 block 6 < scrub_point 6. Check with your parent
8025 block 0+4 <= scrub_point 7. No parent check
8026 block 1+4 <= scrub_point 7. No parent check
8027 block 2+4 <= scrub_point 7. No parent check
8028 block 3+4 <= scrub_point 7. No parent check
8029 block 4 < scrub_point 7. Check with your parent
8030 block 5 < scrub_point 7. Check with your parent
8031 block 6 < scrub_point 7. Check with your parent
8032 block 0+4 <= scrub_point 8. No parent check
8033 block 1+4 <= scrub_point 8. No parent check
8034 block 2+4 <= scrub_point 8. No parent check
8035 block 3+4 <= scrub_point 8. No parent check
8036 block 4+4 <= scrub_point 8. No parent check
8037 block 5 < scrub_point 8. Check with your parent
8038 block 6 < scrub_point 8. Check with your parent
8039 block 0 < scrub_point 0. Check with your parent
8040 block 1 < scrub_point 0. Check with your parent
8041 block 2 < scrub_point 0. Check with your parent
8042 block 3 < scrub_point 0. Check with your parent
8043 block 4 < scrub_point 0. Check with your parent
8044 block 5 < scrub_point 0. Check with your parent
8045 block 0 < scrub_point 1. Check with your parent
8046 block 1 < scrub_point 1. Check with your parent
8047 block 2 < scrub_point 1. Check with your parent
8048 block 3 < scrub_point 1. Check with your parent
8049 block 4 < scrub_point 1. Check with your parent
8050 block 5 < scrub_point 1. Check with your parent
8051 block 0 < scrub_point 2. Check with your parent
8052 block 1 < scrub_point 2. Check with your parent
8053 block 2 < scrub_point 2. Check with your parent
8054 block 3 < scrub_point 2. Check with your parent
8055 block 4 < scrub_point 2. Check with your parent
8056 block 5 < scrub_point 2. Check with your parent
8057 block 0 < scrub_point 3. Check with your parent
8058 block 1 < scrub_point 3. Check with your parent
8059 block 2 < scrub_point 3. Check with your parent
8060 block 3 < scrub_point 3. Check with your parent
8061 block 4 < scrub_point 3. Check with your parent
8062 block 5 < scrub_point 3. Check with your parent
8063 block 0 < scrub_point 4. Check with your parent
8064 block 1 < scrub_point 4. Check with your parent
8065 block 2 < scrub_point 4. Check with your parent
8066 block 3 < scrub_point 4. Check with your parent
8067 block 4 < scrub_point 4. Check with your parent
8068 block 5 < scrub_point 4. Check with your parent
8069 block 0+5 <= scrub_point 5. No parent check
8070 block 1 < scrub_point 5. Check with your parent
8071 block 2 < scrub_point 5. Check with your parent
8072 block 3 < scrub_point 5. Check with your parent
8073 block 4 < scrub_point 5. Check with your parent
8074 block 5 < scrub_point 5. Check with your parent
8075 block 0+5 <= scrub_point 6. No parent check
8076 block 1+5 <= scrub_point 6. No parent check
8077 block 2 < scrub_point 6. Check with your parent
8078 block 3 < scrub_point 6. Check with your parent
8079 block 4 < scrub_point 6. Check with your parent
8080 block 5 < scrub_point 6. Check with your parent
8081 block 0+5 <= scrub_point 7. No parent check
8082 block 1+5 <= scrub_point 7. No parent check
8083 block 2+5 <= scrub_point 7. No parent check
8084 block 3 < scrub_point 7. Check with your parent
8085 block 4 < scrub_point 7. Check with your parent
8086 block 5 < scrub_point 7. Check with your parent
8087 block 0+5 <= scrub_point 8. No parent check
8088 block 1+5 <= scrub_point 8. No parent check
8089 block 2+5 <= scrub_point 8. No parent check
8090 block 3+5 <= scrub_point 8. No parent check
8091 block 4 < scrub_point 8. Check with your parent
8092 block 5 < scrub_point 8. Check with your parent
8093 block 0 < scrub_point 0. Check with your parent
8094 block 1 < scrub_point 0. Check with your parent
8095 block 2 < scrub_point 0. Check with your parent
8096 block 3 < scrub_point 0. Check with your parent
8097 block 4 < scrub_point 0. Check with your parent
8098 block 0 < scrub_point 1. Check with your parent
8099 block 1 < scrub_point 1. Check with your parent
8100 block 2 < scrub_point 1. Check with your parent
8101 block 3 < scrub_point 1. Check with your parent
8102 block 4 < scrub_point 1. Check with your parent
8103 block 0 < scrub_point 2. Check with your parent
8104 block 1 < scrub_point 2. Check with your parent
8105 block 2 < scrub_point 2. Check with your parent
8106 block 3 < scrub_point 2. Check with your parent
8107 block 4 < scrub_point 2. Check with your parent
8108 block 0 < scrub_point 3. Check with your parent
8109 block 1 < scrub_point 3. Check with your parent
8110 block 2 < scrub_point 3. Check with your parent
8111 block 3 < scrub_point 3. Check with your parent
8112 block 4 < scrub_point 3. Check with your parent
8113 block 0 < scrub_point 4. Check with your parent
8114 block 1 < scrub_point 4. Check with your parent
8115 block 2 < scrub_point 4. Check with your parent
8116 block 3 < scrub_point 4. Check with your parent
8117 block 4 < scrub_point 4. Check with your parent
8118 block 0 < scrub_point 5. Check with your parent
8119 block 1 < scrub_point 5. Check with your parent
8120 block 2 < scrub_point 5. Check with your parent
8121 block 3 < scrub_point 5. Check with your parent
8122 block 4 < scrub_point 5. Check with your parent
8123 block 0+6 <= scrub_point 6. No parent check
8124 block 1 < scrub_point 6. Check with your parent
8125 block 2 < scrub_point 6. Check with your parent
8126 block 3 < scrub_point 6. Check with your parent
8127 block 4 < scrub_point 6. Check with your parent
8128 block 0+6 <= scrub_point 7. No parent check
8129 block 1+6 <= scrub_point 7. No parent check
8130 block 2 < scrub_point 7. Check with your parent
8131 block 3 < scrub_point 7. Check with your parent
8132 block 4 < scrub_point 7. Check with your parent
8133 block 0+6 <= scrub_point 8. No parent check
8134 block 1+6 <= scrub_point 8. No parent check
8135 block 2+6 <= scrub_point 8. No parent check
8136 block 3 < scrub_point 8. Check with your parent
8137 block 4 < scrub_point 8. Check with your parent
8138 block 0 < scrub_point 0. Check with your parent
8139 block 1 < scrub_point 0. Check with your parent
8140 block 2 < scrub_point 0. Check with your parent
8141 block 3 < scrub_point 0. Check with your parent
8142 block 0 < scrub_point 1. Check with your parent
8143 block 1 < scrub_point 1. Check with your parent
8144 block 2 < scrub_point 1. Check with your parent
8145 block 3 < scrub_point 1. Check with your parent
8146 block 0 < scrub_point 2. Check with your parent
8147 block 1 < scrub_point 2. Check with your parent
8148 block 2 < scrub_point 2. Check with your parent
8149 block 3 < scrub_point 2. Check with your parent
8150 block 0 < scrub_point 3. Check with your parent
8151 block 1 < scrub_point 3. Check with your parent
8152 block 2 < scrub_point 3. Check with your parent
8153 block 3 < scrub_point 3. Check with your parent
8154 block 0 < scrub_point 4. Check with your parent
8155 block 1 < scrub_point 4. Check with your parent
8156 block 2 < scrub_point 4. Check with your parent
8157 block 3 < scrub_point 4. Check with your parent
8158 block 0 < scrub_point 5. Check with your parent
8159 block 1 < scrub_point 5. Check with your parent
8160 block 2 < scrub_point 5. Check with your parent
8161 block 3 < scrub_point 5. Check with your parent
8162 block 0 < scrub_point 6. Check with your parent
8163 block 1 < scrub_point 6. Check with your parent
8164 block 2 < scrub_point 6. Check with your parent
8165 block 3 < scrub_point 6. Check with your parent
8166 block 0+7 <= scrub_point 7. No parent check
8167 block 1 < scrub_point 7. Check with your parent
8168 block 2 < scrub_point 7. Check with your parent
8169 block 3 < scrub_point 7. Check with your parent
8170 block 0+7 <= scrub_point 8. No parent check
8171 block 1+7 <= scrub_point 8. No parent check
8172 block 2 < scrub_point 8. Check with your parent
8173 block 3 < scrub_point 8. Check with your parent
8174 block 0 < scrub_point 0. Check with your parent
8175 block 1 < scrub_point 0. Check with your parent
8176 block 2 < scrub_point 0. Check with your parent
8177 block 0 < scrub_point 1. Check with your parent
8178 block 1 < scrub_point 1. Check with your parent
8179 block 2 < scrub_point 1. Check with your parent
8180 block 0 < scrub_point 2. Check with your parent
8181 block 1 < scrub_point 2. Check with your parent
8182 block 2 < scrub_point 2. Check with your parent
8183 block 0 < scrub_point 3. Check with your parent
8184 block 1 < scrub_point 3. Check with your parent
8185 block 2 < scrub_point 3. Check with your parent
8186 block 0 < scrub_point 4. Check with your parent
8187 block 1 < scrub_point 4. Check with your parent
8188 block 2 < scrub_point 4. Check with your parent
8189 block 0 < scrub_point 5. Check with your parent
8190 block 1 < scrub_point 5. Check with your parent
8191 block 2 < scrub_point 5. Check with your parent
8192 block 0 < scrub_point 6. Check with your parent
8193 block 1 < scrub_point 6. Check with your parent
8194 block 2 < scrub_point 6. Check with your parent
8195 block 0 < scrub_point 7. Check with your parent
8196 block 1 < scrub_point 7. Check with your parent
8197 block 2 < scrub_point 7. Check with your parent
8198 block 0+8 <= scrub_point 8. No parent check
8199 block 1 < scrub_point 8. Check with your parent
8200 block 2 < scrub_point 8. Check with your parent
8201 block 0 < scrub_point 0. Check with your parent
8202 block 1 < scrub_point 0. Check with your parent
8203 block 0 < scrub_point 1. Check with your parent
8204 block 1 < scrub_point 1. Check with your parent
8205 block 0 < scrub_point 2. Check with your parent
8206 block 1 < scrub_point 2. Check with your parent
8207 block 0 < scrub_point 3. Check with your parent
8208 block 1 < scrub_point 3. Check with your parent
8209 block 0 < scrub_point 4. Check with your parent
8210 block 1 < scrub_point 4. Check with your parent
8211 block 0 < scrub_point 5. Check with your parent
8212 block 1 < scrub_point 5. Check with your parent
8213 block 0 < scrub_point 6. Check with your parent
8214 block 1 < scrub_point 6. Check with your parent
8215 block 0 < scrub_point 7. Check with your parent
8216 block 1 < scrub_point 7. Check with your parent
8217 block 0 < scrub_point 8. Check with your parent
8218 block 1 < scrub_point 8. Check with your parent
8219 test volume::test::test_scrub_point_two_subvolume_equal ... ok
8220 block 0 < scrub_point 0. Check with your parent
8221 block 1 < scrub_point 0. Check with your parent
8222 block 2 < scrub_point 0. Check with your parent
8223 block 3 < scrub_point 0. Check with your parent
8224 block 4 < scrub_point 0. Check with your parent
8225 block 5 > parent 5. Go to SubVolume
8226 block 6 > parent 5. Go to SubVolume
8227 block 7 > parent 5. Go to SubVolume
8228 block 8 > parent 5. Go to SubVolume
8229 block 9 > parent 5. Go to SubVolume
8230 block 10 > parent 5. Go to SubVolume
8231 block 11 > parent 5. Go to SubVolume
8232 block 12 > parent 5. Go to SubVolume
8233 block 13 > parent 5. Go to SubVolume
8234 block 14 > parent 5. Go to SubVolume
8235 block 0+1 <= scrub_point 1. No parent check
8236 block 1 < scrub_point 1. Check with your parent
8237 block 2 < scrub_point 1. Check with your parent
8238 block 3 < scrub_point 1. Check with your parent
8239 block 4 < scrub_point 1. Check with your parent
8240 block 5 > parent 5. Go to SubVolume
8241 block 6 > parent 5. Go to SubVolume
8242 block 7 > parent 5. Go to SubVolume
8243 block 8 > parent 5. Go to SubVolume
8244 block 9 > parent 5. Go to SubVolume
8245 block 10 > parent 5. Go to SubVolume
8246 block 11 > parent 5. Go to SubVolume
8247 block 12 > parent 5. Go to SubVolume
8248 block 13 > parent 5. Go to SubVolume
8249 block 14 > parent 5. Go to SubVolume
8250 block 0+1 <= scrub_point 2. No parent check
8251 block 1+1 <= scrub_point 2. No parent check
8252 block 2 < scrub_point 2. Check with your parent
8253 block 3 < scrub_point 2. Check with your parent
8254 block 4 < scrub_point 2. Check with your parent
8255 block 5 > parent 5. Go to SubVolume
8256 block 6 > parent 5. Go to SubVolume
8257 block 7 > parent 5. Go to SubVolume
8258 block 8 > parent 5. Go to SubVolume
8259 block 9 > parent 5. Go to SubVolume
8260 block 10 > parent 5. Go to SubVolume
8261 block 11 > parent 5. Go to SubVolume
8262 block 12 > parent 5. Go to SubVolume
8263 block 13 > parent 5. Go to SubVolume
8264 block 14 > parent 5. Go to SubVolume
8265 block 0+1 <= scrub_point 3. No parent check
8266 block 1+1 <= scrub_point 3. No parent check
8267 block 2+1 <= scrub_point 3. No parent check
8268 block 3 < scrub_point 3. Check with your parent
8269 block 4 < scrub_point 3. Check with your parent
8270 block 5 > parent 5. Go to SubVolume
8271 block 6 > parent 5. Go to SubVolume
8272 block 7 > parent 5. Go to SubVolume
8273 block 8 > parent 5. Go to SubVolume
8274 block 9 > parent 5. Go to SubVolume
8275 block 10 > parent 5. Go to SubVolume
8276 block 11 > parent 5. Go to SubVolume
8277 block 12 > parent 5. Go to SubVolume
8278 block 13 > parent 5. Go to SubVolume
8279 block 14 > parent 5. Go to SubVolume
8280 block 0+1 <= scrub_point 4. No parent check
8281 block 1+1 <= scrub_point 4. No parent check
8282 block 2+1 <= scrub_point 4. No parent check
8283 block 3+1 <= scrub_point 4. No parent check
8284 block 4 < scrub_point 4. Check with your parent
8285 block 5 > parent 5. Go to SubVolume
8286 block 6 > parent 5. Go to SubVolume
8287 block 7 > parent 5. Go to SubVolume
8288 block 8 > parent 5. Go to SubVolume
8289 block 9 > parent 5. Go to SubVolume
8290 block 10 > parent 5. Go to SubVolume
8291 block 11 > parent 5. Go to SubVolume
8292 block 12 > parent 5. Go to SubVolume
8293 block 13 > parent 5. Go to SubVolume
8294 block 14 > parent 5. Go to SubVolume
8295 block 0+1 <= scrub_point 5. No parent check
8296 block 1+1 <= scrub_point 5. No parent check
8297 block 2+1 <= scrub_point 5. No parent check
8298 block 3+1 <= scrub_point 5. No parent check
8299 block 4+1 <= scrub_point 5. No parent check
8300 block 5 > parent 5. Go to SubVolume
8301 block 6 > parent 5. Go to SubVolume
8302 block 7 > parent 5. Go to SubVolume
8303 block 8 > parent 5. Go to SubVolume
8304 block 9 > parent 5. Go to SubVolume
8305 block 10 > parent 5. Go to SubVolume
8306 block 11 > parent 5. Go to SubVolume
8307 block 12 > parent 5. Go to SubVolume
8308 block 13 > parent 5. Go to SubVolume
8309 block 14 > parent 5. Go to SubVolume
8310 block 0 < scrub_point 0. Check with your parent
8311 block 1 < scrub_point 0. Check with your parent
8312 block 2 < scrub_point 0. Check with your parent
8313 block 3 < scrub_point 0. Check with your parent
8314 block 4 < scrub_point 0. Check with your parent
8315 block 5 > parent 5. Go to SubVolume
8316 block 6 > parent 5. Go to SubVolume
8317 block 7 > parent 5. Go to SubVolume
8318 block 8 > parent 5. Go to SubVolume
8319 block 9 > parent 5. Go to SubVolume
8320 block 10 > parent 5. Go to SubVolume
8321 block 11 > parent 5. Go to SubVolume
8322 block 12 > parent 5. Go to SubVolume
8323 block 13 > parent 5. Go to SubVolume
8324 block 0 < scrub_point 1. Check with your parent
8325 block 1 < scrub_point 1. Check with your parent
8326 block 2 < scrub_point 1. Check with your parent
8327 block 3 < scrub_point 1. Check with your parent
8328 block 4 < scrub_point 1. Check with your parent
8329 block 5 > parent 5. Go to SubVolume
8330 block 6 > parent 5. Go to SubVolume
8331 block 7 > parent 5. Go to SubVolume
8332 block 8 > parent 5. Go to SubVolume
8333 block 9 > parent 5. Go to SubVolume
8334 block 10 > parent 5. Go to SubVolume
8335 block 11 > parent 5. Go to SubVolume
8336 block 12 > parent 5. Go to SubVolume
8337 block 13 > parent 5. Go to SubVolume
8338 block 0+2 <= scrub_point 2. No parent check
8339 block 1 < scrub_point 2. Check with your parent
8340 block 2 < scrub_point 2. Check with your parent
8341 block 3 < scrub_point 2. Check with your parent
8342 block 4 < scrub_point 2. Check with your parent
8343 block 5 > parent 5. Go to SubVolume
8344 block 6 > parent 5. Go to SubVolume
8345 block 7 > parent 5. Go to SubVolume
8346 block 8 > parent 5. Go to SubVolume
8347 block 9 > parent 5. Go to SubVolume
8348 block 10 > parent 5. Go to SubVolume
8349 block 11 > parent 5. Go to SubVolume
8350 block 12 > parent 5. Go to SubVolume
8351 block 13 > parent 5. Go to SubVolume
8352 block 0+2 <= scrub_point 3. No parent check
8353 block 1+2 <= scrub_point 3. No parent check
8354 block 2 < scrub_point 3. Check with your parent
8355 block 3 < scrub_point 3. Check with your parent
8356 block 4 < scrub_point 3. Check with your parent
8357 block 5 > parent 5. Go to SubVolume
8358 block 6 > parent 5. Go to SubVolume
8359 block 7 > parent 5. Go to SubVolume
8360 block 8 > parent 5. Go to SubVolume
8361 block 9 > parent 5. Go to SubVolume
8362 block 10 > parent 5. Go to SubVolume
8363 block 11 > parent 5. Go to SubVolume
8364 block 12 > parent 5. Go to SubVolume
8365 block 13 > parent 5. Go to SubVolume
8366 block 0+2 <= scrub_point 4. No parent check
8367 block 1+2 <= scrub_point 4. No parent check
8368 block 2+2 <= scrub_point 4. No parent check
8369 block 3 < scrub_point 4. Check with your parent
8370 block 4 < scrub_point 4. Check with your parent
8371 block 5 > parent 5. Go to SubVolume
8372 block 6 > parent 5. Go to SubVolume
8373 block 7 > parent 5. Go to SubVolume
8374 block 8 > parent 5. Go to SubVolume
8375 block 9 > parent 5. Go to SubVolume
8376 block 10 > parent 5. Go to SubVolume
8377 block 11 > parent 5. Go to SubVolume
8378 block 12 > parent 5. Go to SubVolume
8379 block 13 > parent 5. Go to SubVolume
8380 block 0+2 <= scrub_point 5. No parent check
8381 block 1+2 <= scrub_point 5. No parent check
8382 block 2+2 <= scrub_point 5. No parent check
8383 block 3+2 <= scrub_point 5. No parent check
8384 block 4 < scrub_point 5. Check with your parent
8385 block 5 > parent 5. Go to SubVolume
8386 block 6 > parent 5. Go to SubVolume
8387 block 7 > parent 5. Go to SubVolume
8388 block 8 > parent 5. Go to SubVolume
8389 block 9 > parent 5. Go to SubVolume
8390 block 10 > parent 5. Go to SubVolume
8391 block 11 > parent 5. Go to SubVolume
8392 block 12 > parent 5. Go to SubVolume
8393 block 13 > parent 5. Go to SubVolume
8394 block 0 < scrub_point 0. Check with your parent
8395 block 1 < scrub_point 0. Check with your parent
8396 block 2 < scrub_point 0. Check with your parent
8397 block 3 < scrub_point 0. Check with your parent
8398 block 4 < scrub_point 0. Check with your parent
8399 block 5 > parent 5. Go to SubVolume
8400 block 6 > parent 5. Go to SubVolume
8401 block 7 > parent 5. Go to SubVolume
8402 block 8 > parent 5. Go to SubVolume
8403 block 9 > parent 5. Go to SubVolume
8404 block 10 > parent 5. Go to SubVolume
8405 block 11 > parent 5. Go to SubVolume
8406 block 12 > parent 5. Go to SubVolume
8407 block 0 < scrub_point 1. Check with your parent
8408 block 1 < scrub_point 1. Check with your parent
8409 block 2 < scrub_point 1. Check with your parent
8410 block 3 < scrub_point 1. Check with your parent
8411 block 4 < scrub_point 1. Check with your parent
8412 block 5 > parent 5. Go to SubVolume
8413 block 6 > parent 5. Go to SubVolume
8414 block 7 > parent 5. Go to SubVolume
8415 block 8 > parent 5. Go to SubVolume
8416 block 9 > parent 5. Go to SubVolume
8417 block 10 > parent 5. Go to SubVolume
8418 block 11 > parent 5. Go to SubVolume
8419 block 12 > parent 5. Go to SubVolume
8420 block 0 < scrub_point 2. Check with your parent
8421 block 1 < scrub_point 2. Check with your parent
8422 block 2 < scrub_point 2. Check with your parent
8423 block 3 < scrub_point 2. Check with your parent
8424 block 4 < scrub_point 2. Check with your parent
8425 block 5 > parent 5. Go to SubVolume
8426 block 6 > parent 5. Go to SubVolume
8427 block 7 > parent 5. Go to SubVolume
8428 block 8 > parent 5. Go to SubVolume
8429 block 9 > parent 5. Go to SubVolume
8430 block 10 > parent 5. Go to SubVolume
8431 block 11 > parent 5. Go to SubVolume
8432 block 12 > parent 5. Go to SubVolume
8433 block 0+3 <= scrub_point 3. No parent check
8434 block 1 < scrub_point 3. Check with your parent
8435 block 2 < scrub_point 3. Check with your parent
8436 block 3 < scrub_point 3. Check with your parent
8437 block 4 < scrub_point 3. Check with your parent
8438 block 5 > parent 5. Go to SubVolume
8439 block 6 > parent 5. Go to SubVolume
8440 block 7 > parent 5. Go to SubVolume
8441 block 8 > parent 5. Go to SubVolume
8442 block 9 > parent 5. Go to SubVolume
8443 block 10 > parent 5. Go to SubVolume
8444 block 11 > parent 5. Go to SubVolume
8445 block 12 > parent 5. Go to SubVolume
8446 block 0+3 <= scrub_point 4. No parent check
8447 block 1+3 <= scrub_point 4. No parent check
8448 block 2 < scrub_point 4. Check with your parent
8449 block 3 < scrub_point 4. Check with your parent
8450 block 4 < scrub_point 4. Check with your parent
8451 block 5 > parent 5. Go to SubVolume
8452 block 6 > parent 5. Go to SubVolume
8453 block 7 > parent 5. Go to SubVolume
8454 block 8 > parent 5. Go to SubVolume
8455 block 9 > parent 5. Go to SubVolume
8456 block 10 > parent 5. Go to SubVolume
8457 block 11 > parent 5. Go to SubVolume
8458 block 12 > parent 5. Go to SubVolume
8459 block 0+3 <= scrub_point 5. No parent check
8460 block 1+3 <= scrub_point 5. No parent check
8461 block 2+3 <= scrub_point 5. No parent check
8462 block 3 < scrub_point 5. Check with your parent
8463 block 4 < scrub_point 5. Check with your parent
8464 block 5 > parent 5. Go to SubVolume
8465 block 6 > parent 5. Go to SubVolume
8466 block 7 > parent 5. Go to SubVolume
8467 block 8 > parent 5. Go to SubVolume
8468 block 9 > parent 5. Go to SubVolume
8469 block 10 > parent 5. Go to SubVolume
8470 block 11 > parent 5. Go to SubVolume
8471 block 12 > parent 5. Go to SubVolume
8472 block 0 < scrub_point 0. Check with your parent
8473 block 1 < scrub_point 0. Check with your parent
8474 block 2 < scrub_point 0. Check with your parent
8475 block 3 < scrub_point 0. Check with your parent
8476 block 4 < scrub_point 0. Check with your parent
8477 block 5 > parent 5. Go to SubVolume
8478 block 6 > parent 5. Go to SubVolume
8479 block 7 > parent 5. Go to SubVolume
8480 block 8 > parent 5. Go to SubVolume
8481 block 9 > parent 5. Go to SubVolume
8482 block 10 > parent 5. Go to SubVolume
8483 block 11 > parent 5. Go to SubVolume
8484 block 0 < scrub_point 1. Check with your parent
8485 block 1 < scrub_point 1. Check with your parent
8486 block 2 < scrub_point 1. Check with your parent
8487 block 3 < scrub_point 1. Check with your parent
8488 block 4 < scrub_point 1. Check with your parent
8489 block 5 > parent 5. Go to SubVolume
8490 block 6 > parent 5. Go to SubVolume
8491 block 7 > parent 5. Go to SubVolume
8492 block 8 > parent 5. Go to SubVolume
8493 block 9 > parent 5. Go to SubVolume
8494 block 10 > parent 5. Go to SubVolume
8495 block 11 > parent 5. Go to SubVolume
8496 block 0 < scrub_point 2. Check with your parent
8497 block 1 < scrub_point 2. Check with your parent
8498 block 2 < scrub_point 2. Check with your parent
8499 block 3 < scrub_point 2. Check with your parent
8500 block 4 < scrub_point 2. Check with your parent
8501 block 5 > parent 5. Go to SubVolume
8502 block 6 > parent 5. Go to SubVolume
8503 block 7 > parent 5. Go to SubVolume
8504 block 8 > parent 5. Go to SubVolume
8505 block 9 > parent 5. Go to SubVolume
8506 block 10 > parent 5. Go to SubVolume
8507 block 11 > parent 5. Go to SubVolume
8508 block 0 < scrub_point 3. Check with your parent
8509 block 1 < scrub_point 3. Check with your parent
8510 block 2 < scrub_point 3. Check with your parent
8511 block 3 < scrub_point 3. Check with your parent
8512 block 4 < scrub_point 3. Check with your parent
8513 block 5 > parent 5. Go to SubVolume
8514 block 6 > parent 5. Go to SubVolume
8515 block 7 > parent 5. Go to SubVolume
8516 block 8 > parent 5. Go to SubVolume
8517 block 9 > parent 5. Go to SubVolume
8518 block 10 > parent 5. Go to SubVolume
8519 block 11 > parent 5. Go to SubVolume
8520 block 0+4 <= scrub_point 4. No parent check
8521 block 1 < scrub_point 4. Check with your parent
8522 block 2 < scrub_point 4. Check with your parent
8523 block 3 < scrub_point 4. Check with your parent
8524 block 4 < scrub_point 4. Check with your parent
8525 block 5 > parent 5. Go to SubVolume
8526 block 6 > parent 5. Go to SubVolume
8527 block 7 > parent 5. Go to SubVolume
8528 block 8 > parent 5. Go to SubVolume
8529 block 9 > parent 5. Go to SubVolume
8530 block 10 > parent 5. Go to SubVolume
8531 block 11 > parent 5. Go to SubVolume
8532 block 0+4 <= scrub_point 5. No parent check
8533 block 1+4 <= scrub_point 5. No parent check
8534 block 2 < scrub_point 5. Check with your parent
8535 block 3 < scrub_point 5. Check with your parent
8536 block 4 < scrub_point 5. Check with your parent
8537 block 5 > parent 5. Go to SubVolume
8538 block 6 > parent 5. Go to SubVolume
8539 block 7 > parent 5. Go to SubVolume
8540 block 8 > parent 5. Go to SubVolume
8541 block 9 > parent 5. Go to SubVolume
8542 block 10 > parent 5. Go to SubVolume
8543 block 11 > parent 5. Go to SubVolume
8544 block 0 < scrub_point 0. Check with your parent
8545 block 1 < scrub_point 0. Check with your parent
8546 block 2 < scrub_point 0. Check with your parent
8547 block 3 < scrub_point 0. Check with your parent
8548 block 4 < scrub_point 0. Check with your parent
8549 block 5 > parent 5. Go to SubVolume
8550 block 6 > parent 5. Go to SubVolume
8551 block 7 > parent 5. Go to SubVolume
8552 block 8 > parent 5. Go to SubVolume
8553 block 9 > parent 5. Go to SubVolume
8554 block 10 > parent 5. Go to SubVolume
8555 block 0 < scrub_point 1. Check with your parent
8556 block 1 < scrub_point 1. Check with your parent
8557 block 2 < scrub_point 1. Check with your parent
8558 block 3 < scrub_point 1. Check with your parent
8559 block 4 < scrub_point 1. Check with your parent
8560 block 5 > parent 5. Go to SubVolume
8561 block 6 > parent 5. Go to SubVolume
8562 block 7 > parent 5. Go to SubVolume
8563 block 8 > parent 5. Go to SubVolume
8564 block 9 > parent 5. Go to SubVolume
8565 block 10 > parent 5. Go to SubVolume
8566 block 0 < scrub_point 2. Check with your parent
8567 block 1 < scrub_point 2. Check with your parent
8568 block 2 < scrub_point 2. Check with your parent
8569 block 3 < scrub_point 2. Check with your parent
8570 block 4 < scrub_point 2. Check with your parent
8571 block 5 > parent 5. Go to SubVolume
8572 block 6 > parent 5. Go to SubVolume
8573 block 7 > parent 5. Go to SubVolume
8574 block 8 > parent 5. Go to SubVolume
8575 block 9 > parent 5. Go to SubVolume
8576 block 10 > parent 5. Go to SubVolume
8577 block 0 < scrub_point 3. Check with your parent
8578 block 1 < scrub_point 3. Check with your parent
8579 block 2 < scrub_point 3. Check with your parent
8580 block 3 < scrub_point 3. Check with your parent
8581 block 4 < scrub_point 3. Check with your parent
8582 block 5 > parent 5. Go to SubVolume
8583 block 6 > parent 5. Go to SubVolume
8584 block 7 > parent 5. Go to SubVolume
8585 block 8 > parent 5. Go to SubVolume
8586 block 9 > parent 5. Go to SubVolume
8587 block 10 > parent 5. Go to SubVolume
8588 block 0 < scrub_point 4. Check with your parent
8589 block 1 < scrub_point 4. Check with your parent
8590 block 2 < scrub_point 4. Check with your parent
8591 block 3 < scrub_point 4. Check with your parent
8592 block 4 < scrub_point 4. Check with your parent
8593 block 5 > parent 5. Go to SubVolume
8594 block 6 > parent 5. Go to SubVolume
8595 block 7 > parent 5. Go to SubVolume
8596 block 8 > parent 5. Go to SubVolume
8597 block 9 > parent 5. Go to SubVolume
8598 block 10 > parent 5. Go to SubVolume
8599 block 0+5 <= scrub_point 5. No parent check
8600 block 1 < scrub_point 5. Check with your parent
8601 block 2 < scrub_point 5. Check with your parent
8602 block 3 < scrub_point 5. Check with your parent
8603 block 4 < scrub_point 5. Check with your parent
8604 block 5 > parent 5. Go to SubVolume
8605 block 6 > parent 5. Go to SubVolume
8606 block 7 > parent 5. Go to SubVolume
8607 block 8 > parent 5. Go to SubVolume
8608 block 9 > parent 5. Go to SubVolume
8609 block 10 > parent 5. Go to SubVolume
8610 block 0 < scrub_point 0. Check with your parent
8611 block 1 < scrub_point 0. Check with your parent
8612 block 2 < scrub_point 0. Check with your parent
8613 block 3 < scrub_point 0. Check with your parent
8614 block 4 < scrub_point 0. Check with your parent
8615 block 5 > parent 5. Go to SubVolume
8616 block 6 > parent 5. Go to SubVolume
8617 block 7 > parent 5. Go to SubVolume
8618 block 8 > parent 5. Go to SubVolume
8619 block 9 > parent 5. Go to SubVolume
8620 block 0 < scrub_point 1. Check with your parent
8621 block 1 < scrub_point 1. Check with your parent
8622 block 2 < scrub_point 1. Check with your parent
8623 block 3 < scrub_point 1. Check with your parent
8624 block 4 < scrub_point 1. Check with your parent
8625 block 5 > parent 5. Go to SubVolume
8626 block 6 > parent 5. Go to SubVolume
8627 block 7 > parent 5. Go to SubVolume
8628 block 8 > parent 5. Go to SubVolume
8629 block 9 > parent 5. Go to SubVolume
8630 block 0 < scrub_point 2. Check with your parent
8631 block 1 < scrub_point 2. Check with your parent
8632 block 2 < scrub_point 2. Check with your parent
8633 block 3 < scrub_point 2. Check with your parent
8634 block 4 < scrub_point 2. Check with your parent
8635 block 5 > parent 5. Go to SubVolume
8636 block 6 > parent 5. Go to SubVolume
8637 block 7 > parent 5. Go to SubVolume
8638 block 8 > parent 5. Go to SubVolume
8639 block 9 > parent 5. Go to SubVolume
8640 block 0 < scrub_point 3. Check with your parent
8641 block 1 < scrub_point 3. Check with your parent
8642 block 2 < scrub_point 3. Check with your parent
8643 block 3 < scrub_point 3. Check with your parent
8644 block 4 < scrub_point 3. Check with your parent
8645 block 5 > parent 5. Go to SubVolume
8646 block 6 > parent 5. Go to SubVolume
8647 block 7 > parent 5. Go to SubVolume
8648 block 8 > parent 5. Go to SubVolume
8649 block 9 > parent 5. Go to SubVolume
8650 block 0 < scrub_point 4. Check with your parent
8651 block 1 < scrub_point 4. Check with your parent
8652 block 2 < scrub_point 4. Check with your parent
8653 block 3 < scrub_point 4. Check with your parent
8654 block 4 < scrub_point 4. Check with your parent
8655 block 5 > parent 5. Go to SubVolume
8656 block 6 > parent 5. Go to SubVolume
8657 block 7 > parent 5. Go to SubVolume
8658 block 8 > parent 5. Go to SubVolume
8659 block 9 > parent 5. Go to SubVolume
8660 block 0 < scrub_point 5. Check with your parent
8661 block 1 < scrub_point 5. Check with your parent
8662 block 2 < scrub_point 5. Check with your parent
8663 block 3 < scrub_point 5. Check with your parent
8664 block 4 < scrub_point 5. Check with your parent
8665 block 5 > parent 5. Go to SubVolume
8666 block 6 > parent 5. Go to SubVolume
8667 block 7 > parent 5. Go to SubVolume
8668 block 8 > parent 5. Go to SubVolume
8669 block 9 > parent 5. Go to SubVolume
8670 block 0 < scrub_point 0. Check with your parent
8671 block 1 < scrub_point 0. Check with your parent
8672 block 2 < scrub_point 0. Check with your parent
8673 block 3 < scrub_point 0. Check with your parent
8674 block 4 < scrub_point 0. Check with your parent
8675 block 5 > parent 5. Go to SubVolume
8676 block 6 > parent 5. Go to SubVolume
8677 block 7 > parent 5. Go to SubVolume
8678 block 8 > parent 5. Go to SubVolume
8679 block 0 < scrub_point 1. Check with your parent
8680 block 1 < scrub_point 1. Check with your parent
8681 block 2 < scrub_point 1. Check with your parent
8682 block 3 < scrub_point 1. Check with your parent
8683 block 4 < scrub_point 1. Check with your parent
8684 block 5 > parent 5. Go to SubVolume
8685 block 6 > parent 5. Go to SubVolume
8686 block 7 > parent 5. Go to SubVolume
8687 block 8 > parent 5. Go to SubVolume
8688 block 0 < scrub_point 2. Check with your parent
8689 block 1 < scrub_point 2. Check with your parent
8690 block 2 < scrub_point 2. Check with your parent
8691 block 3 < scrub_point 2. Check with your parent
8692 block 4 < scrub_point 2. Check with your parent
8693 block 5 > parent 5. Go to SubVolume
8694 block 6 > parent 5. Go to SubVolume
8695 block 7 > parent 5. Go to SubVolume
8696 block 8 > parent 5. Go to SubVolume
8697 block 0 < scrub_point 3. Check with your parent
8698 block 1 < scrub_point 3. Check with your parent
8699 block 2 < scrub_point 3. Check with your parent
8700 block 3 < scrub_point 3. Check with your parent
8701 block 4 < scrub_point 3. Check with your parent
8702 block 5 > parent 5. Go to SubVolume
8703 block 6 > parent 5. Go to SubVolume
8704 block 7 > parent 5. Go to SubVolume
8705 block 8 > parent 5. Go to SubVolume
8706 block 0 < scrub_point 4. Check with your parent
8707 block 1 < scrub_point 4. Check with your parent
8708 block 2 < scrub_point 4. Check with your parent
8709 block 3 < scrub_point 4. Check with your parent
8710 block 4 < scrub_point 4. Check with your parent
8711 block 5 > parent 5. Go to SubVolume
8712 block 6 > parent 5. Go to SubVolume
8713 block 7 > parent 5. Go to SubVolume
8714 block 8 > parent 5. Go to SubVolume
8715 block 0 < scrub_point 5. Check with your parent
8716 block 1 < scrub_point 5. Check with your parent
8717 block 2 < scrub_point 5. Check with your parent
8718 block 3 < scrub_point 5. Check with your parent
8719 block 4 < scrub_point 5. Check with your parent
8720 block 5 > parent 5. Go to SubVolume
8721 block 6 > parent 5. Go to SubVolume
8722 block 7 > parent 5. Go to SubVolume
8723 block 8 > parent 5. Go to SubVolume
8724 block 0 < scrub_point 0. Check with your parent
8725 block 1 < scrub_point 0. Check with your parent
8726 block 2 < scrub_point 0. Check with your parent
8727 block 3 < scrub_point 0. Check with your parent
8728 block 4 < scrub_point 0. Check with your parent
8729 block 5 > parent 5. Go to SubVolume
8730 block 6 > parent 5. Go to SubVolume
8731 block 7 > parent 5. Go to SubVolume
8732 block 0 < scrub_point 1. Check with your parent
8733 block 1 < scrub_point 1. Check with your parent
8734 block 2 < scrub_point 1. Check with your parent
8735 block 3 < scrub_point 1. Check with your parent
8736 block 4 < scrub_point 1. Check with your parent
8737 block 5 > parent 5. Go to SubVolume
8738 block 6 > parent 5. Go to SubVolume
8739 block 7 > parent 5. Go to SubVolume
8740 block 0 < scrub_point 2. Check with your parent
8741 block 1 < scrub_point 2. Check with your parent
8742 block 2 < scrub_point 2. Check with your parent
8743 block 3 < scrub_point 2. Check with your parent
8744 block 4 < scrub_point 2. Check with your parent
8745 block 5 > parent 5. Go to SubVolume
8746 block 6 > parent 5. Go to SubVolume
8747 block 7 > parent 5. Go to SubVolume
8748 block 0 < scrub_point 3. Check with your parent
8749 block 1 < scrub_point 3. Check with your parent
8750 block 2 < scrub_point 3. Check with your parent
8751 block 3 < scrub_point 3. Check with your parent
8752 block 4 < scrub_point 3. Check with your parent
8753 block 5 > parent 5. Go to SubVolume
8754 block 6 > parent 5. Go to SubVolume
8755 block 7 > parent 5. Go to SubVolume
8756 block 0 < scrub_point 4. Check with your parent
8757 block 1 < scrub_point 4. Check with your parent
8758 block 2 < scrub_point 4. Check with your parent
8759 block 3 < scrub_point 4. Check with your parent
8760 block 4 < scrub_point 4. Check with your parent
8761 block 5 > parent 5. Go to SubVolume
8762 block 6 > parent 5. Go to SubVolume
8763 block 7 > parent 5. Go to SubVolume
8764 block 0 < scrub_point 5. Check with your parent
8765 block 1 < scrub_point 5. Check with your parent
8766 block 2 < scrub_point 5. Check with your parent
8767 block 3 < scrub_point 5. Check with your parent
8768 block 4 < scrub_point 5. Check with your parent
8769 block 5 > parent 5. Go to SubVolume
8770 block 6 > parent 5. Go to SubVolume
8771 block 7 > parent 5. Go to SubVolume
8772 block 0 < scrub_point 0. Check with your parent
8773 block 1 < scrub_point 0. Check with your parent
8774 block 2 < scrub_point 0. Check with your parent
8775 block 3 < scrub_point 0. Check with your parent
8776 block 4 < scrub_point 0. Check with your parent
8777 block 5 > parent 5. Go to SubVolume
8778 block 6 > parent 5. Go to SubVolume
8779 block 0 < scrub_point 1. Check with your parent
8780 block 1 < scrub_point 1. Check with your parent
8781 block 2 < scrub_point 1. Check with your parent
8782 block 3 < scrub_point 1. Check with your parent
8783 block 4 < scrub_point 1. Check with your parent
8784 block 5 > parent 5. Go to SubVolume
8785 block 6 > parent 5. Go to SubVolume
8786 block 0 < scrub_point 2. Check with your parent
8787 block 1 < scrub_point 2. Check with your parent
8788 block 2 < scrub_point 2. Check with your parent
8789 block 3 < scrub_point 2. Check with your parent
8790 block 4 < scrub_point 2. Check with your parent
8791 block 5 > parent 5. Go to SubVolume
8792 block 6 > parent 5. Go to SubVolume
8793 block 0 < scrub_point 3. Check with your parent
8794 block 1 < scrub_point 3. Check with your parent
8795 block 2 < scrub_point 3. Check with your parent
8796 block 3 < scrub_point 3. Check with your parent
8797 block 4 < scrub_point 3. Check with your parent
8798 block 5 > parent 5. Go to SubVolume
8799 block 6 > parent 5. Go to SubVolume
8800 block 0 < scrub_point 4. Check with your parent
8801 block 1 < scrub_point 4. Check with your parent
8802 block 2 < scrub_point 4. Check with your parent
8803 block 3 < scrub_point 4. Check with your parent
8804 block 4 < scrub_point 4. Check with your parent
8805 block 5 > parent 5. Go to SubVolume
8806 block 6 > parent 5. Go to SubVolume
8807 block 0 < scrub_point 5. Check with your parent
8808 block 1 < scrub_point 5. Check with your parent
8809 block 2 < scrub_point 5. Check with your parent
8810 block 3 < scrub_point 5. Check with your parent
8811 block 4 < scrub_point 5. Check with your parent
8812 block 5 > parent 5. Go to SubVolume
8813 block 6 > parent 5. Go to SubVolume
8814 block 0 < scrub_point 0. Check with your parent
8815 block 1 < scrub_point 0. Check with your parent
8816 block 2 < scrub_point 0. Check with your parent
8817 block 3 < scrub_point 0. Check with your parent
8818 block 4 < scrub_point 0. Check with your parent
8819 block 5 > parent 5. Go to SubVolume
8820 block 0 < scrub_point 1. Check with your parent
8821 block 1 < scrub_point 1. Check with your parent
8822 block 2 < scrub_point 1. Check with your parent
8823 block 3 < scrub_point 1. Check with your parent
8824 block 4 < scrub_point 1. Check with your parent
8825 block 5 > parent 5. Go to SubVolume
8826 block 0 < scrub_point 2. Check with your parent
8827 block 1 < scrub_point 2. Check with your parent
8828 block 2 < scrub_point 2. Check with your parent
8829 block 3 < scrub_point 2. Check with your parent
8830 block 4 < scrub_point 2. Check with your parent
8831 block 5 > parent 5. Go to SubVolume
8832 block 0 < scrub_point 3. Check with your parent
8833 block 1 < scrub_point 3. Check with your parent
8834 block 2 < scrub_point 3. Check with your parent
8835 block 3 < scrub_point 3. Check with your parent
8836 block 4 < scrub_point 3. Check with your parent
8837 block 5 > parent 5. Go to SubVolume
8838 block 0 < scrub_point 4. Check with your parent
8839 block 1 < scrub_point 4. Check with your parent
8840 block 2 < scrub_point 4. Check with your parent
8841 block 3 < scrub_point 4. Check with your parent
8842 block 4 < scrub_point 4. Check with your parent
8843 block 5 > parent 5. Go to SubVolume
8844 block 0 < scrub_point 5. Check with your parent
8845 block 1 < scrub_point 5. Check with your parent
8846 block 2 < scrub_point 5. Check with your parent
8847 block 3 < scrub_point 5. Check with your parent
8848 block 4 < scrub_point 5. Check with your parent
8849 block 5 > parent 5. Go to SubVolume
8850 block 0 < scrub_point 0. Check with your parent
8851 block 1 < scrub_point 0. Check with your parent
8852 block 2 < scrub_point 0. Check with your parent
8853 block 3 < scrub_point 0. Check with your parent
8854 block 4 < scrub_point 0. Check with your parent
8855 block 0 < scrub_point 1. Check with your parent
8856 block 1 < scrub_point 1. Check with your parent
8857 block 2 < scrub_point 1. Check with your parent
8858 block 3 < scrub_point 1. Check with your parent
8859 block 4 < scrub_point 1. Check with your parent
8860 block 0 < scrub_point 2. Check with your parent
8861 block 1 < scrub_point 2. Check with your parent
8862 block 2 < scrub_point 2. Check with your parent
8863 block 3 < scrub_point 2. Check with your parent
8864 block 4 < scrub_point 2. Check with your parent
8865 block 0 < scrub_point 3. Check with your parent
8866 block 1 < scrub_point 3. Check with your parent
8867 block 2 < scrub_point 3. Check with your parent
8868 block 3 < scrub_point 3. Check with your parent
8869 block 4 < scrub_point 3. Check with your parent
8870 block 0 < scrub_point 4. Check with your parent
8871 block 1 < scrub_point 4. Check with your parent
8872 block 2 < scrub_point 4. Check with your parent
8873 block 3 < scrub_point 4. Check with your parent
8874 block 4 < scrub_point 4. Check with your parent
8875 block 0 < scrub_point 5. Check with your parent
8876 block 1 < scrub_point 5. Check with your parent
8877 block 2 < scrub_point 5. Check with your parent
8878 block 3 < scrub_point 5. Check with your parent
8879 block 4 < scrub_point 5. Check with your parent
8880 block 0 < scrub_point 0. Check with your parent
8881 block 1 < scrub_point 0. Check with your parent
8882 block 2 < scrub_point 0. Check with your parent
8883 block 3 < scrub_point 0. Check with your parent
8884 block 0 < scrub_point 1. Check with your parent
8885 block 1 < scrub_point 1. Check with your parent
8886 block 2 < scrub_point 1. Check with your parent
8887 block 3 < scrub_point 1. Check with your parent
8888 block 0 < scrub_point 2. Check with your parent
8889 block 1 < scrub_point 2. Check with your parent
8890 block 2 < scrub_point 2. Check with your parent
8891 block 3 < scrub_point 2. Check with your parent
8892 block 0 < scrub_point 3. Check with your parent
8893 block 1 < scrub_point 3. Check with your parent
8894 block 2 < scrub_point 3. Check with your parent
8895 block 3 < scrub_point 3. Check with your parent
8896 block 0 < scrub_point 4. Check with your parent
8897 block 1 < scrub_point 4. Check with your parent
8898 block 2 < scrub_point 4. Check with your parent
8899 block 3 < scrub_point 4. Check with your parent
8900 block 0 < scrub_point 5. Check with your parent
8901 block 1 < scrub_point 5. Check with your parent
8902 block 2 < scrub_point 5. Check with your parent
8903 block 3 < scrub_point 5. Check with your parent
8904 block 0 < scrub_point 0. Check with your parent
8905 block 1 < scrub_point 0. Check with your parent
8906 block 2 < scrub_point 0. Check with your parent
8907 block 0 < scrub_point 1. Check with your parent
8908 block 1 < scrub_point 1. Check with your parent
8909 block 2 < scrub_point 1. Check with your parent
8910 block 0 < scrub_point 2. Check with your parent
8911 block 1 < scrub_point 2. Check with your parent
8912 block 2 < scrub_point 2. Check with your parent
8913 block 0 < scrub_point 3. Check with your parent
8914 block 1 < scrub_point 3. Check with your parent
8915 block 2 < scrub_point 3. Check with your parent
8916 block 0 < scrub_point 4. Check with your parent
8917 block 1 < scrub_point 4. Check with your parent
8918 block 2 < scrub_point 4. Check with your parent
8919 block 0 < scrub_point 5. Check with your parent
8920 block 1 < scrub_point 5. Check with your parent
8921 block 2 < scrub_point 5. Check with your parent
8922 block 0 < scrub_point 0. Check with your parent
8923 block 1 < scrub_point 0. Check with your parent
8924 block 0 < scrub_point 1. Check with your parent
8925 block 1 < scrub_point 1. Check with your parent
8926 block 0 < scrub_point 2. Check with your parent
8927 block 1 < scrub_point 2. Check with your parent
8928 block 0 < scrub_point 3. Check with your parent
8929 block 1 < scrub_point 3. Check with your parent
8930 block 0 < scrub_point 4. Check with your parent
8931 block 1 < scrub_point 4. Check with your parent
8932 block 0 < scrub_point 5. Check with your parent
8933 block 1 < scrub_point 5. Check with your parent
8934 test volume::test::test_scrub_point_two_subvolume_smaller_1 ... ok
8935 block 0 < scrub_point 0. Check with your parent
8936 block 1 < scrub_point 0. Check with your parent
8937 block 2 < scrub_point 0. Check with your parent
8938 block 3 < scrub_point 0. Check with your parent
8939 block 4 < scrub_point 0. Check with your parent
8940 block 5 > parent 5. Go to SubVolume
8941 block 6 > parent 5. Go to SubVolume
8942 block 7 > parent 5. Go to SubVolume
8943 block 8 > parent 5. Go to SubVolume
8944 block 9 > parent 5. Go to SubVolume
8945 block 0+1 <= scrub_point 1. No parent check
8946 block 1 < scrub_point 1. Check with your parent
8947 block 2 < scrub_point 1. Check with your parent
8948 block 3 < scrub_point 1. Check with your parent
8949 block 4 < scrub_point 1. Check with your parent
8950 block 5 > parent 5. Go to SubVolume
8951 block 6 > parent 5. Go to SubVolume
8952 block 7 > parent 5. Go to SubVolume
8953 block 8 > parent 5. Go to SubVolume
8954 block 9 > parent 5. Go to SubVolume
8955 block 0+1 <= scrub_point 2. No parent check
8956 block 1+1 <= scrub_point 2. No parent check
8957 block 2 < scrub_point 2. Check with your parent
8958 block 3 < scrub_point 2. Check with your parent
8959 block 4 < scrub_point 2. Check with your parent
8960 block 5 > parent 5. Go to SubVolume
8961 block 6 > parent 5. Go to SubVolume
8962 block 7 > parent 5. Go to SubVolume
8963 block 8 > parent 5. Go to SubVolume
8964 block 9 > parent 5. Go to SubVolume
8965 block 0+1 <= scrub_point 3. No parent check
8966 block 1+1 <= scrub_point 3. No parent check
8967 block 2+1 <= scrub_point 3. No parent check
8968 block 3 < scrub_point 3. Check with your parent
8969 block 4 < scrub_point 3. Check with your parent
8970 block 5 > parent 5. Go to SubVolume
8971 block 6 > parent 5. Go to SubVolume
8972 block 7 > parent 5. Go to SubVolume
8973 block 8 > parent 5. Go to SubVolume
8974 block 9 > parent 5. Go to SubVolume
8975 block 0+1 <= scrub_point 4. No parent check
8976 block 1+1 <= scrub_point 4. No parent check
8977 block 2+1 <= scrub_point 4. No parent check
8978 block 3+1 <= scrub_point 4. No parent check
8979 block 4 < scrub_point 4. Check with your parent
8980 block 5 > parent 5. Go to SubVolume
8981 block 6 > parent 5. Go to SubVolume
8982 block 7 > parent 5. Go to SubVolume
8983 block 8 > parent 5. Go to SubVolume
8984 block 9 > parent 5. Go to SubVolume
8985 block 0+1 <= scrub_point 5. No parent check
8986 block 1+1 <= scrub_point 5. No parent check
8987 block 2+1 <= scrub_point 5. No parent check
8988 block 3+1 <= scrub_point 5. No parent check
8989 block 4+1 <= scrub_point 5. No parent check
8990 block 5 > parent 5. Go to SubVolume
8991 block 6 > parent 5. Go to SubVolume
8992 block 7 > parent 5. Go to SubVolume
8993 block 8 > parent 5. Go to SubVolume
8994 block 9 > parent 5. Go to SubVolume
8995 block 0 < scrub_point 0. Check with your parent
8996 block 1 < scrub_point 0. Check with your parent
8997 block 2 < scrub_point 0. Check with your parent
8998 block 3 < scrub_point 0. Check with your parent
8999 block 4 < scrub_point 0. Check with your parent
9000 block 5 > parent 5. Go to SubVolume
9001 block 6 > parent 5. Go to SubVolume
9002 block 7 > parent 5. Go to SubVolume
9003 block 8 > parent 5. Go to SubVolume
9004 block 0 < scrub_point 1. Check with your parent
9005 block 1 < scrub_point 1. Check with your parent
9006 block 2 < scrub_point 1. Check with your parent
9007 block 3 < scrub_point 1. Check with your parent
9008 block 4 < scrub_point 1. Check with your parent
9009 block 5 > parent 5. Go to SubVolume
9010 block 6 > parent 5. Go to SubVolume
9011 block 7 > parent 5. Go to SubVolume
9012 block 8 > parent 5. Go to SubVolume
9013 block 0+2 <= scrub_point 2. No parent check
9014 block 1 < scrub_point 2. Check with your parent
9015 block 2 < scrub_point 2. Check with your parent
9016 block 3 < scrub_point 2. Check with your parent
9017 block 4 < scrub_point 2. Check with your parent
9018 block 5 > parent 5. Go to SubVolume
9019 block 6 > parent 5. Go to SubVolume
9020 block 7 > parent 5. Go to SubVolume
9021 block 8 > parent 5. Go to SubVolume
9022 block 0+2 <= scrub_point 3. No parent check
9023 block 1+2 <= scrub_point 3. No parent check
9024 block 2 < scrub_point 3. Check with your parent
9025 block 3 < scrub_point 3. Check with your parent
9026 block 4 < scrub_point 3. Check with your parent
9027 block 5 > parent 5. Go to SubVolume
9028 block 6 > parent 5. Go to SubVolume
9029 block 7 > parent 5. Go to SubVolume
9030 block 8 > parent 5. Go to SubVolume
9031 block 0+2 <= scrub_point 4. No parent check
9032 block 1+2 <= scrub_point 4. No parent check
9033 block 2+2 <= scrub_point 4. No parent check
9034 block 3 < scrub_point 4. Check with your parent
9035 block 4 < scrub_point 4. Check with your parent
9036 block 5 > parent 5. Go to SubVolume
9037 block 6 > parent 5. Go to SubVolume
9038 block 7 > parent 5. Go to SubVolume
9039 block 8 > parent 5. Go to SubVolume
9040 block 0+2 <= scrub_point 5. No parent check
9041 block 1+2 <= scrub_point 5. No parent check
9042 block 2+2 <= scrub_point 5. No parent check
9043 block 3+2 <= scrub_point 5. No parent check
9044 block 4 < scrub_point 5. Check with your parent
9045 block 5 > parent 5. Go to SubVolume
9046 block 6 > parent 5. Go to SubVolume
9047 block 7 > parent 5. Go to SubVolume
9048 block 8 > parent 5. Go to SubVolume
9049 block 0 < scrub_point 0. Check with your parent
9050 block 1 < scrub_point 0. Check with your parent
9051 block 2 < scrub_point 0. Check with your parent
9052 block 3 < scrub_point 0. Check with your parent
9053 block 4 < scrub_point 0. Check with your parent
9054 block 5 > parent 5. Go to SubVolume
9055 block 6 > parent 5. Go to SubVolume
9056 block 7 > parent 5. Go to SubVolume
9057 block 0 < scrub_point 1. Check with your parent
9058 block 1 < scrub_point 1. Check with your parent
9059 block 2 < scrub_point 1. Check with your parent
9060 block 3 < scrub_point 1. Check with your parent
9061 block 4 < scrub_point 1. Check with your parent
9062 block 5 > parent 5. Go to SubVolume
9063 block 6 > parent 5. Go to SubVolume
9064 block 7 > parent 5. Go to SubVolume
9065 block 0 < scrub_point 2. Check with your parent
9066 block 1 < scrub_point 2. Check with your parent
9067 block 2 < scrub_point 2. Check with your parent
9068 block 3 < scrub_point 2. Check with your parent
9069 block 4 < scrub_point 2. Check with your parent
9070 block 5 > parent 5. Go to SubVolume
9071 block 6 > parent 5. Go to SubVolume
9072 block 7 > parent 5. Go to SubVolume
9073 block 0+3 <= scrub_point 3. No parent check
9074 block 1 < scrub_point 3. Check with your parent
9075 block 2 < scrub_point 3. Check with your parent
9076 block 3 < scrub_point 3. Check with your parent
9077 block 4 < scrub_point 3. Check with your parent
9078 block 5 > parent 5. Go to SubVolume
9079 block 6 > parent 5. Go to SubVolume
9080 block 7 > parent 5. Go to SubVolume
9081 block 0+3 <= scrub_point 4. No parent check
9082 block 1+3 <= scrub_point 4. No parent check
9083 block 2 < scrub_point 4. Check with your parent
9084 block 3 < scrub_point 4. Check with your parent
9085 block 4 < scrub_point 4. Check with your parent
9086 block 5 > parent 5. Go to SubVolume
9087 block 6 > parent 5. Go to SubVolume
9088 block 7 > parent 5. Go to SubVolume
9089 block 0+3 <= scrub_point 5. No parent check
9090 block 1+3 <= scrub_point 5. No parent check
9091 block 2+3 <= scrub_point 5. No parent check
9092 block 3 < scrub_point 5. Check with your parent
9093 block 4 < scrub_point 5. Check with your parent
9094 block 5 > parent 5. Go to SubVolume
9095 block 6 > parent 5. Go to SubVolume
9096 block 7 > parent 5. Go to SubVolume
9097 block 0 < scrub_point 0. Check with your parent
9098 block 1 < scrub_point 0. Check with your parent
9099 block 2 < scrub_point 0. Check with your parent
9100 block 3 < scrub_point 0. Check with your parent
9101 block 4 < scrub_point 0. Check with your parent
9102 block 5 > parent 5. Go to SubVolume
9103 block 6 > parent 5. Go to SubVolume
9104 block 0 < scrub_point 1. Check with your parent
9105 block 1 < scrub_point 1. Check with your parent
9106 block 2 < scrub_point 1. Check with your parent
9107 block 3 < scrub_point 1. Check with your parent
9108 block 4 < scrub_point 1. Check with your parent
9109 block 5 > parent 5. Go to SubVolume
9110 block 6 > parent 5. Go to SubVolume
9111 block 0 < scrub_point 2. Check with your parent
9112 block 1 < scrub_point 2. Check with your parent
9113 block 2 < scrub_point 2. Check with your parent
9114 block 3 < scrub_point 2. Check with your parent
9115 block 4 < scrub_point 2. Check with your parent
9116 block 5 > parent 5. Go to SubVolume
9117 block 6 > parent 5. Go to SubVolume
9118 block 0 < scrub_point 3. Check with your parent
9119 block 1 < scrub_point 3. Check with your parent
9120 block 2 < scrub_point 3. Check with your parent
9121 block 3 < scrub_point 3. Check with your parent
9122 block 4 < scrub_point 3. Check with your parent
9123 block 5 > parent 5. Go to SubVolume
9124 block 6 > parent 5. Go to SubVolume
9125 block 0+4 <= scrub_point 4. No parent check
9126 block 1 < scrub_point 4. Check with your parent
9127 block 2 < scrub_point 4. Check with your parent
9128 block 3 < scrub_point 4. Check with your parent
9129 block 4 < scrub_point 4. Check with your parent
9130 block 5 > parent 5. Go to SubVolume
9131 block 6 > parent 5. Go to SubVolume
9132 block 0+4 <= scrub_point 5. No parent check
9133 block 1+4 <= scrub_point 5. No parent check
9134 block 2 < scrub_point 5. Check with your parent
9135 block 3 < scrub_point 5. Check with your parent
9136 block 4 < scrub_point 5. Check with your parent
9137 block 5 > parent 5. Go to SubVolume
9138 block 6 > parent 5. Go to SubVolume
9139 block 0 < scrub_point 0. Check with your parent
9140 block 1 < scrub_point 0. Check with your parent
9141 block 2 < scrub_point 0. Check with your parent
9142 block 3 < scrub_point 0. Check with your parent
9143 block 4 < scrub_point 0. Check with your parent
9144 block 5 > parent 5. Go to SubVolume
9145 block 0 < scrub_point 1. Check with your parent
9146 block 1 < scrub_point 1. Check with your parent
9147 block 2 < scrub_point 1. Check with your parent
9148 block 3 < scrub_point 1. Check with your parent
9149 block 4 < scrub_point 1. Check with your parent
9150 block 5 > parent 5. Go to SubVolume
9151 block 0 < scrub_point 2. Check with your parent
9152 block 1 < scrub_point 2. Check with your parent
9153 block 2 < scrub_point 2. Check with your parent
9154 block 3 < scrub_point 2. Check with your parent
9155 block 4 < scrub_point 2. Check with your parent
9156 block 5 > parent 5. Go to SubVolume
9157 block 0 < scrub_point 3. Check with your parent
9158 block 1 < scrub_point 3. Check with your parent
9159 block 2 < scrub_point 3. Check with your parent
9160 block 3 < scrub_point 3. Check with your parent
9161 block 4 < scrub_point 3. Check with your parent
9162 block 5 > parent 5. Go to SubVolume
9163 block 0 < scrub_point 4. Check with your parent
9164 block 1 < scrub_point 4. Check with your parent
9165 block 2 < scrub_point 4. Check with your parent
9166 block 3 < scrub_point 4. Check with your parent
9167 block 4 < scrub_point 4. Check with your parent
9168 block 5 > parent 5. Go to SubVolume
9169 block 0+5 <= scrub_point 5. No parent check
9170 block 1 < scrub_point 5. Check with your parent
9171 block 2 < scrub_point 5. Check with your parent
9172 block 3 < scrub_point 5. Check with your parent
9173 block 4 < scrub_point 5. Check with your parent
9174 block 5 > parent 5. Go to SubVolume
9175 block 0 < scrub_point 0. Check with your parent
9176 block 1 < scrub_point 0. Check with your parent
9177 block 2 < scrub_point 0. Check with your parent
9178 block 3 < scrub_point 0. Check with your parent
9179 block 4 < scrub_point 0. Check with your parent
9180 block 0 < scrub_point 1. Check with your parent
9181 block 1 < scrub_point 1. Check with your parent
9182 block 2 < scrub_point 1. Check with your parent
9183 block 3 < scrub_point 1. Check with your parent
9184 block 4 < scrub_point 1. Check with your parent
9185 block 0 < scrub_point 2. Check with your parent
9186 block 1 < scrub_point 2. Check with your parent
9187 block 2 < scrub_point 2. Check with your parent
9188 block 3 < scrub_point 2. Check with your parent
9189 block 4 < scrub_point 2. Check with your parent
9190 block 0 < scrub_point 3. Check with your parent
9191 block 1 < scrub_point 3. Check with your parent
9192 block 2 < scrub_point 3. Check with your parent
9193 block 3 < scrub_point 3. Check with your parent
9194 block 4 < scrub_point 3. Check with your parent
9195 block 0 < scrub_point 4. Check with your parent
9196 block 1 < scrub_point 4. Check with your parent
9197 block 2 < scrub_point 4. Check with your parent
9198 block 3 < scrub_point 4. Check with your parent
9199 block 4 < scrub_point 4. Check with your parent
9200 block 0 < scrub_point 5. Check with your parent
9201 block 1 < scrub_point 5. Check with your parent
9202 block 2 < scrub_point 5. Check with your parent
9203 block 3 < scrub_point 5. Check with your parent
9204 block 4 < scrub_point 5. Check with your parent
9205 block 0 < scrub_point 0. Check with your parent
9206 block 1 < scrub_point 0. Check with your parent
9207 block 2 < scrub_point 0. Check with your parent
9208 block 3 < scrub_point 0. Check with your parent
9209 block 0 < scrub_point 1. Check with your parent
9210 block 1 < scrub_point 1. Check with your parent
9211 block 2 < scrub_point 1. Check with your parent
9212 block 3 < scrub_point 1. Check with your parent
9213 block 0 < scrub_point 2. Check with your parent
9214 block 1 < scrub_point 2. Check with your parent
9215 block 2 < scrub_point 2. Check with your parent
9216 block 3 < scrub_point 2. Check with your parent
9217 block 0 < scrub_point 3. Check with your parent
9218 block 1 < scrub_point 3. Check with your parent
9219 block 2 < scrub_point 3. Check with your parent
9220 block 3 < scrub_point 3. Check with your parent
9221 block 0 < scrub_point 4. Check with your parent
9222 block 1 < scrub_point 4. Check with your parent
9223 block 2 < scrub_point 4. Check with your parent
9224 block 3 < scrub_point 4. Check with your parent
9225 block 0 < scrub_point 5. Check with your parent
9226 block 1 < scrub_point 5. Check with your parent
9227 block 2 < scrub_point 5. Check with your parent
9228 block 3 < scrub_point 5. Check with your parent
9229 block 0 < scrub_point 0. Check with your parent
9230 block 1 < scrub_point 0. Check with your parent
9231 block 2 < scrub_point 0. Check with your parent
9232 block 0 < scrub_point 1. Check with your parent
9233 block 1 < scrub_point 1. Check with your parent
9234 block 2 < scrub_point 1. Check with your parent
9235 block 0 < scrub_point 2. Check with your parent
9236 block 1 < scrub_point 2. Check with your parent
9237 block 2 < scrub_point 2. Check with your parent
9238 block 0 < scrub_point 3. Check with your parent
9239 block 1 < scrub_point 3. Check with your parent
9240 block 2 < scrub_point 3. Check with your parent
9241 block 0 < scrub_point 4. Check with your parent
9242 block 1 < scrub_point 4. Check with your parent
9243 block 2 < scrub_point 4. Check with your parent
9244 block 0 < scrub_point 5. Check with your parent
9245 block 1 < scrub_point 5. Check with your parent
9246 block 2 < scrub_point 5. Check with your parent
9247 block 0 < scrub_point 0. Check with your parent
9248 block 1 < scrub_point 0. Check with your parent
9249 block 0 < scrub_point 1. Check with your parent
9250 block 1 < scrub_point 1. Check with your parent
9251 block 0 < scrub_point 2. Check with your parent
9252 block 1 < scrub_point 2. Check with your parent
9253 block 0 < scrub_point 3. Check with your parent
9254 block 1 < scrub_point 3. Check with your parent
9255 block 0 < scrub_point 4. Check with your parent
9256 block 1 < scrub_point 4. Check with your parent
9257 block 0 < scrub_point 5. Check with your parent
9258 block 1 < scrub_point 5. Check with your parent
9259 test volume::test::test_scrub_point_two_subvolume_smaller_2 ... ok
9260 block 0 < scrub_point 0. Check with your parent
9261 block 1 < scrub_point 0. Check with your parent
9262 block 2 < scrub_point 0. Check with your parent
9263 block 3 < scrub_point 0. Check with your parent
9264 block 4 < scrub_point 0. Check with your parent
9265 block 5 < scrub_point 0. Check with your parent
9266 block 6 < scrub_point 0. Check with your parent
9267 block 7 < scrub_point 0. Check with your parent
9268 block 8 > parent 8. Go to SubVolume
9269 block 9 > parent 8. Go to SubVolume
9270 block 0+1 <= scrub_point 1. No parent check
9271 block 1 < scrub_point 1. Check with your parent
9272 block 2 < scrub_point 1. Check with your parent
9273 block 3 < scrub_point 1. Check with your parent
9274 block 4 < scrub_point 1. Check with your parent
9275 block 5 < scrub_point 1. Check with your parent
9276 block 6 < scrub_point 1. Check with your parent
9277 block 7 < scrub_point 1. Check with your parent
9278 block 8 > parent 8. Go to SubVolume
9279 block 9 > parent 8. Go to SubVolume
9280 block 0+1 <= scrub_point 2. No parent check
9281 block 1+1 <= scrub_point 2. No parent check
9282 block 2 < scrub_point 2. Check with your parent
9283 block 3 < scrub_point 2. Check with your parent
9284 block 4 < scrub_point 2. Check with your parent
9285 block 5 < scrub_point 2. Check with your parent
9286 block 6 < scrub_point 2. Check with your parent
9287 block 7 < scrub_point 2. Check with your parent
9288 block 8 > parent 8. Go to SubVolume
9289 block 9 > parent 8. Go to SubVolume
9290 block 0+1 <= scrub_point 3. No parent check
9291 block 1+1 <= scrub_point 3. No parent check
9292 block 2+1 <= scrub_point 3. No parent check
9293 block 3 < scrub_point 3. Check with your parent
9294 block 4 < scrub_point 3. Check with your parent
9295 block 5 < scrub_point 3. Check with your parent
9296 block 6 < scrub_point 3. Check with your parent
9297 block 7 < scrub_point 3. Check with your parent
9298 block 8 > parent 8. Go to SubVolume
9299 block 9 > parent 8. Go to SubVolume
9300 block 0+1 <= scrub_point 4. No parent check
9301 block 1+1 <= scrub_point 4. No parent check
9302 block 2+1 <= scrub_point 4. No parent check
9303 block 3+1 <= scrub_point 4. No parent check
9304 block 4 < scrub_point 4. Check with your parent
9305 block 5 < scrub_point 4. Check with your parent
9306 block 6 < scrub_point 4. Check with your parent
9307 block 7 < scrub_point 4. Check with your parent
9308 block 8 > parent 8. Go to SubVolume
9309 block 9 > parent 8. Go to SubVolume
9310 block 0+1 <= scrub_point 5. No parent check
9311 block 1+1 <= scrub_point 5. No parent check
9312 block 2+1 <= scrub_point 5. No parent check
9313 block 3+1 <= scrub_point 5. No parent check
9314 block 4+1 <= scrub_point 5. No parent check
9315 block 5 < scrub_point 5. Check with your parent
9316 block 6 < scrub_point 5. Check with your parent
9317 block 7 < scrub_point 5. Check with your parent
9318 block 8 > parent 8. Go to SubVolume
9319 block 9 > parent 8. Go to SubVolume
9320 block 0+1 <= scrub_point 6. No parent check
9321 block 1+1 <= scrub_point 6. No parent check
9322 block 2+1 <= scrub_point 6. No parent check
9323 block 3+1 <= scrub_point 6. No parent check
9324 block 4+1 <= scrub_point 6. No parent check
9325 block 5+1 <= scrub_point 6. No parent check
9326 block 6 < scrub_point 6. Check with your parent
9327 block 7 < scrub_point 6. Check with your parent
9328 block 8 > parent 8. Go to SubVolume
9329 block 9 > parent 8. Go to SubVolume
9330 block 0+1 <= scrub_point 7. No parent check
9331 block 1+1 <= scrub_point 7. No parent check
9332 block 2+1 <= scrub_point 7. No parent check
9333 block 3+1 <= scrub_point 7. No parent check
9334 block 4+1 <= scrub_point 7. No parent check
9335 block 5+1 <= scrub_point 7. No parent check
9336 block 6+1 <= scrub_point 7. No parent check
9337 block 7 < scrub_point 7. Check with your parent
9338 block 8 > parent 8. Go to SubVolume
9339 block 9 > parent 8. Go to SubVolume
9340 block 0+1 <= scrub_point 8. No parent check
9341 block 1+1 <= scrub_point 8. No parent check
9342 block 2+1 <= scrub_point 8. No parent check
9343 block 3+1 <= scrub_point 8. No parent check
9344 block 4+1 <= scrub_point 8. No parent check
9345 block 5+1 <= scrub_point 8. No parent check
9346 block 6+1 <= scrub_point 8. No parent check
9347 block 7+1 <= scrub_point 8. No parent check
9348 block 8 > parent 8. Go to SubVolume
9349 block 9 > parent 8. Go to SubVolume
9350 block 0 < scrub_point 0. Check with your parent
9351 block 1 < scrub_point 0. Check with your parent
9352 block 2 < scrub_point 0. Check with your parent
9353 block 3 < scrub_point 0. Check with your parent
9354 block 4 < scrub_point 0. Check with your parent
9355 block 5 < scrub_point 0. Check with your parent
9356 block 6 < scrub_point 0. Check with your parent
9357 block 7 < scrub_point 0. Check with your parent
9358 block 8 > parent 8. Go to SubVolume
9359 block 0 < scrub_point 1. Check with your parent
9360 block 1 < scrub_point 1. Check with your parent
9361 block 2 < scrub_point 1. Check with your parent
9362 block 3 < scrub_point 1. Check with your parent
9363 block 4 < scrub_point 1. Check with your parent
9364 block 5 < scrub_point 1. Check with your parent
9365 block 6 < scrub_point 1. Check with your parent
9366 block 7 < scrub_point 1. Check with your parent
9367 block 8 > parent 8. Go to SubVolume
9368 block 0+2 <= scrub_point 2. No parent check
9369 block 1 < scrub_point 2. Check with your parent
9370 block 2 < scrub_point 2. Check with your parent
9371 block 3 < scrub_point 2. Check with your parent
9372 block 4 < scrub_point 2. Check with your parent
9373 block 5 < scrub_point 2. Check with your parent
9374 block 6 < scrub_point 2. Check with your parent
9375 block 7 < scrub_point 2. Check with your parent
9376 block 8 > parent 8. Go to SubVolume
9377 block 0+2 <= scrub_point 3. No parent check
9378 block 1+2 <= scrub_point 3. No parent check
9379 block 2 < scrub_point 3. Check with your parent
9380 block 3 < scrub_point 3. Check with your parent
9381 block 4 < scrub_point 3. Check with your parent
9382 block 5 < scrub_point 3. Check with your parent
9383 block 6 < scrub_point 3. Check with your parent
9384 block 7 < scrub_point 3. Check with your parent
9385 block 8 > parent 8. Go to SubVolume
9386 block 0+2 <= scrub_point 4. No parent check
9387 block 1+2 <= scrub_point 4. No parent check
9388 block 2+2 <= scrub_point 4. No parent check
9389 block 3 < scrub_point 4. Check with your parent
9390 block 4 < scrub_point 4. Check with your parent
9391 block 5 < scrub_point 4. Check with your parent
9392 block 6 < scrub_point 4. Check with your parent
9393 block 7 < scrub_point 4. Check with your parent
9394 block 8 > parent 8. Go to SubVolume
9395 block 0+2 <= scrub_point 5. No parent check
9396 block 1+2 <= scrub_point 5. No parent check
9397 block 2+2 <= scrub_point 5. No parent check
9398 block 3+2 <= scrub_point 5. No parent check
9399 block 4 < scrub_point 5. Check with your parent
9400 block 5 < scrub_point 5. Check with your parent
9401 block 6 < scrub_point 5. Check with your parent
9402 block 7 < scrub_point 5. Check with your parent
9403 block 8 > parent 8. Go to SubVolume
9404 block 0+2 <= scrub_point 6. No parent check
9405 block 1+2 <= scrub_point 6. No parent check
9406 block 2+2 <= scrub_point 6. No parent check
9407 block 3+2 <= scrub_point 6. No parent check
9408 block 4+2 <= scrub_point 6. No parent check
9409 block 5 < scrub_point 6. Check with your parent
9410 block 6 < scrub_point 6. Check with your parent
9411 block 7 < scrub_point 6. Check with your parent
9412 block 8 > parent 8. Go to SubVolume
9413 block 0+2 <= scrub_point 7. No parent check
9414 block 1+2 <= scrub_point 7. No parent check
9415 block 2+2 <= scrub_point 7. No parent check
9416 block 3+2 <= scrub_point 7. No parent check
9417 block 4+2 <= scrub_point 7. No parent check
9418 block 5+2 <= scrub_point 7. No parent check
9419 block 6 < scrub_point 7. Check with your parent
9420 block 7 < scrub_point 7. Check with your parent
9421 block 8 > parent 8. Go to SubVolume
9422 block 0+2 <= scrub_point 8. No parent check
9423 block 1+2 <= scrub_point 8. No parent check
9424 block 2+2 <= scrub_point 8. No parent check
9425 block 3+2 <= scrub_point 8. No parent check
9426 block 4+2 <= scrub_point 8. No parent check
9427 block 5+2 <= scrub_point 8. No parent check
9428 block 6+2 <= scrub_point 8. No parent check
9429 block 7 < scrub_point 8. Check with your parent
9430 block 8 > parent 8. Go to SubVolume
9431 block 0 < scrub_point 0. Check with your parent
9432 block 1 < scrub_point 0. Check with your parent
9433 block 2 < scrub_point 0. Check with your parent
9434 block 3 < scrub_point 0. Check with your parent
9435 block 4 < scrub_point 0. Check with your parent
9436 block 5 < scrub_point 0. Check with your parent
9437 block 6 < scrub_point 0. Check with your parent
9438 block 7 < scrub_point 0. Check with your parent
9439 block 0 < scrub_point 1. Check with your parent
9440 block 1 < scrub_point 1. Check with your parent
9441 block 2 < scrub_point 1. Check with your parent
9442 block 3 < scrub_point 1. Check with your parent
9443 block 4 < scrub_point 1. Check with your parent
9444 block 5 < scrub_point 1. Check with your parent
9445 block 6 < scrub_point 1. Check with your parent
9446 block 7 < scrub_point 1. Check with your parent
9447 block 0 < scrub_point 2. Check with your parent
9448 block 1 < scrub_point 2. Check with your parent
9449 block 2 < scrub_point 2. Check with your parent
9450 block 3 < scrub_point 2. Check with your parent
9451 block 4 < scrub_point 2. Check with your parent
9452 block 5 < scrub_point 2. Check with your parent
9453 block 6 < scrub_point 2. Check with your parent
9454 block 7 < scrub_point 2. Check with your parent
9455 block 0+3 <= scrub_point 3. No parent check
9456 block 1 < scrub_point 3. Check with your parent
9457 block 2 < scrub_point 3. Check with your parent
9458 block 3 < scrub_point 3. Check with your parent
9459 block 4 < scrub_point 3. Check with your parent
9460 block 5 < scrub_point 3. Check with your parent
9461 block 6 < scrub_point 3. Check with your parent
9462 block 7 < scrub_point 3. Check with your parent
9463 block 0+3 <= scrub_point 4. No parent check
9464 block 1+3 <= scrub_point 4. No parent check
9465 block 2 < scrub_point 4. Check with your parent
9466 block 3 < scrub_point 4. Check with your parent
9467 block 4 < scrub_point 4. Check with your parent
9468 block 5 < scrub_point 4. Check with your parent
9469 block 6 < scrub_point 4. Check with your parent
9470 block 7 < scrub_point 4. Check with your parent
9471 block 0+3 <= scrub_point 5. No parent check
9472 block 1+3 <= scrub_point 5. No parent check
9473 block 2+3 <= scrub_point 5. No parent check
9474 block 3 < scrub_point 5. Check with your parent
9475 block 4 < scrub_point 5. Check with your parent
9476 block 5 < scrub_point 5. Check with your parent
9477 block 6 < scrub_point 5. Check with your parent
9478 block 7 < scrub_point 5. Check with your parent
9479 block 0+3 <= scrub_point 6. No parent check
9480 block 1+3 <= scrub_point 6. No parent check
9481 block 2+3 <= scrub_point 6. No parent check
9482 block 3+3 <= scrub_point 6. No parent check
9483 block 4 < scrub_point 6. Check with your parent
9484 block 5 < scrub_point 6. Check with your parent
9485 block 6 < scrub_point 6. Check with your parent
9486 block 7 < scrub_point 6. Check with your parent
9487 block 0+3 <= scrub_point 7. No parent check
9488 block 1+3 <= scrub_point 7. No parent check
9489 block 2+3 <= scrub_point 7. No parent check
9490 block 3+3 <= scrub_point 7. No parent check
9491 block 4+3 <= scrub_point 7. No parent check
9492 block 5 < scrub_point 7. Check with your parent
9493 block 6 < scrub_point 7. Check with your parent
9494 block 7 < scrub_point 7. Check with your parent
9495 block 0+3 <= scrub_point 8. No parent check
9496 block 1+3 <= scrub_point 8. No parent check
9497 block 2+3 <= scrub_point 8. No parent check
9498 block 3+3 <= scrub_point 8. No parent check
9499 block 4+3 <= scrub_point 8. No parent check
9500 block 5+3 <= scrub_point 8. No parent check
9501 block 6 < scrub_point 8. Check with your parent
9502 block 7 < scrub_point 8. Check with your parent
9503 block 0 < scrub_point 0. Check with your parent
9504 block 1 < scrub_point 0. Check with your parent
9505 block 2 < scrub_point 0. Check with your parent
9506 block 3 < scrub_point 0. Check with your parent
9507 block 4 < scrub_point 0. Check with your parent
9508 block 5 < scrub_point 0. Check with your parent
9509 block 6 < scrub_point 0. Check with your parent
9510 block 0 < scrub_point 1. Check with your parent
9511 block 1 < scrub_point 1. Check with your parent
9512 block 2 < scrub_point 1. Check with your parent
9513 block 3 < scrub_point 1. Check with your parent
9514 block 4 < scrub_point 1. Check with your parent
9515 block 5 < scrub_point 1. Check with your parent
9516 block 6 < scrub_point 1. Check with your parent
9517 block 0 < scrub_point 2. Check with your parent
9518 block 1 < scrub_point 2. Check with your parent
9519 block 2 < scrub_point 2. Check with your parent
9520 block 3 < scrub_point 2. Check with your parent
9521 block 4 < scrub_point 2. Check with your parent
9522 block 5 < scrub_point 2. Check with your parent
9523 block 6 < scrub_point 2. Check with your parent
9524 block 0 < scrub_point 3. Check with your parent
9525 block 1 < scrub_point 3. Check with your parent
9526 block 2 < scrub_point 3. Check with your parent
9527 block 3 < scrub_point 3. Check with your parent
9528 block 4 < scrub_point 3. Check with your parent
9529 block 5 < scrub_point 3. Check with your parent
9530 block 6 < scrub_point 3. Check with your parent
9531 block 0+4 <= scrub_point 4. No parent check
9532 block 1 < scrub_point 4. Check with your parent
9533 block 2 < scrub_point 4. Check with your parent
9534 block 3 < scrub_point 4. Check with your parent
9535 block 4 < scrub_point 4. Check with your parent
9536 block 5 < scrub_point 4. Check with your parent
9537 block 6 < scrub_point 4. Check with your parent
9538 block 0+4 <= scrub_point 5. No parent check
9539 block 1+4 <= scrub_point 5. No parent check
9540 block 2 < scrub_point 5. Check with your parent
9541 block 3 < scrub_point 5. Check with your parent
9542 block 4 < scrub_point 5. Check with your parent
9543 block 5 < scrub_point 5. Check with your parent
9544 block 6 < scrub_point 5. Check with your parent
9545 block 0+4 <= scrub_point 6. No parent check
9546 block 1+4 <= scrub_point 6. No parent check
9547 block 2+4 <= scrub_point 6. No parent check
9548 block 3 < scrub_point 6. Check with your parent
9549 block 4 < scrub_point 6. Check with your parent
9550 block 5 < scrub_point 6. Check with your parent
9551 block 6 < scrub_point 6. Check with your parent
9552 block 0+4 <= scrub_point 7. No parent check
9553 block 1+4 <= scrub_point 7. No parent check
9554 block 2+4 <= scrub_point 7. No parent check
9555 block 3+4 <= scrub_point 7. No parent check
9556 block 4 < scrub_point 7. Check with your parent
9557 block 5 < scrub_point 7. Check with your parent
9558 block 6 < scrub_point 7. Check with your parent
9559 block 0+4 <= scrub_point 8. No parent check
9560 block 1+4 <= scrub_point 8. No parent check
9561 block 2+4 <= scrub_point 8. No parent check
9562 block 3+4 <= scrub_point 8. No parent check
9563 block 4+4 <= scrub_point 8. No parent check
9564 block 5 < scrub_point 8. Check with your parent
9565 block 6 < scrub_point 8. Check with your parent
9566 block 0 < scrub_point 0. Check with your parent
9567 block 1 < scrub_point 0. Check with your parent
9568 block 2 < scrub_point 0. Check with your parent
9569 block 3 < scrub_point 0. Check with your parent
9570 block 4 < scrub_point 0. Check with your parent
9571 block 5 < scrub_point 0. Check with your parent
9572 block 0 < scrub_point 1. Check with your parent
9573 block 1 < scrub_point 1. Check with your parent
9574 block 2 < scrub_point 1. Check with your parent
9575 block 3 < scrub_point 1. Check with your parent
9576 block 4 < scrub_point 1. Check with your parent
9577 block 5 < scrub_point 1. Check with your parent
9578 block 0 < scrub_point 2. Check with your parent
9579 block 1 < scrub_point 2. Check with your parent
9580 block 2 < scrub_point 2. Check with your parent
9581 block 3 < scrub_point 2. Check with your parent
9582 block 4 < scrub_point 2. Check with your parent
9583 block 5 < scrub_point 2. Check with your parent
9584 block 0 < scrub_point 3. Check with your parent
9585 block 1 < scrub_point 3. Check with your parent
9586 block 2 < scrub_point 3. Check with your parent
9587 block 3 < scrub_point 3. Check with your parent
9588 block 4 < scrub_point 3. Check with your parent
9589 block 5 < scrub_point 3. Check with your parent
9590 block 0 < scrub_point 4. Check with your parent
9591 block 1 < scrub_point 4. Check with your parent
9592 block 2 < scrub_point 4. Check with your parent
9593 block 3 < scrub_point 4. Check with your parent
9594 block 4 < scrub_point 4. Check with your parent
9595 block 5 < scrub_point 4. Check with your parent
9596 block 0+5 <= scrub_point 5. No parent check
9597 block 1 < scrub_point 5. Check with your parent
9598 block 2 < scrub_point 5. Check with your parent
9599 block 3 < scrub_point 5. Check with your parent
9600 block 4 < scrub_point 5. Check with your parent
9601 block 5 < scrub_point 5. Check with your parent
9602 block 0+5 <= scrub_point 6. No parent check
9603 block 1+5 <= scrub_point 6. No parent check
9604 block 2 < scrub_point 6. Check with your parent
9605 block 3 < scrub_point 6. Check with your parent
9606 block 4 < scrub_point 6. Check with your parent
9607 block 5 < scrub_point 6. Check with your parent
9608 block 0+5 <= scrub_point 7. No parent check
9609 block 1+5 <= scrub_point 7. No parent check
9610 block 2+5 <= scrub_point 7. No parent check
9611 block 3 < scrub_point 7. Check with your parent
9612 block 4 < scrub_point 7. Check with your parent
9613 block 5 < scrub_point 7. Check with your parent
9614 block 0+5 <= scrub_point 8. No parent check
9615 block 1+5 <= scrub_point 8. No parent check
9616 block 2+5 <= scrub_point 8. No parent check
9617 block 3+5 <= scrub_point 8. No parent check
9618 block 4 < scrub_point 8. Check with your parent
9619 block 5 < scrub_point 8. Check with your parent
9620 block 0 < scrub_point 0. Check with your parent
9621 block 1 < scrub_point 0. Check with your parent
9622 block 2 < scrub_point 0. Check with your parent
9623 block 3 < scrub_point 0. Check with your parent
9624 block 4 < scrub_point 0. Check with your parent
9625 block 0 < scrub_point 1. Check with your parent
9626 block 1 < scrub_point 1. Check with your parent
9627 block 2 < scrub_point 1. Check with your parent
9628 block 3 < scrub_point 1. Check with your parent
9629 block 4 < scrub_point 1. Check with your parent
9630 block 0 < scrub_point 2. Check with your parent
9631 block 1 < scrub_point 2. Check with your parent
9632 block 2 < scrub_point 2. Check with your parent
9633 block 3 < scrub_point 2. Check with your parent
9634 block 4 < scrub_point 2. Check with your parent
9635 block 0 < scrub_point 3. Check with your parent
9636 block 1 < scrub_point 3. Check with your parent
9637 block 2 < scrub_point 3. Check with your parent
9638 block 3 < scrub_point 3. Check with your parent
9639 block 4 < scrub_point 3. Check with your parent
9640 block 0 < scrub_point 4. Check with your parent
9641 block 1 < scrub_point 4. Check with your parent
9642 block 2 < scrub_point 4. Check with your parent
9643 block 3 < scrub_point 4. Check with your parent
9644 block 4 < scrub_point 4. Check with your parent
9645 block 0 < scrub_point 5. Check with your parent
9646 block 1 < scrub_point 5. Check with your parent
9647 block 2 < scrub_point 5. Check with your parent
9648 block 3 < scrub_point 5. Check with your parent
9649 block 4 < scrub_point 5. Check with your parent
9650 block 0+6 <= scrub_point 6. No parent check
9651 block 1 < scrub_point 6. Check with your parent
9652 block 2 < scrub_point 6. Check with your parent
9653 block 3 < scrub_point 6. Check with your parent
9654 block 4 < scrub_point 6. Check with your parent
9655 block 0+6 <= scrub_point 7. No parent check
9656 block 1+6 <= scrub_point 7. No parent check
9657 block 2 < scrub_point 7. Check with your parent
9658 block 3 < scrub_point 7. Check with your parent
9659 block 4 < scrub_point 7. Check with your parent
9660 block 0+6 <= scrub_point 8. No parent check
9661 block 1+6 <= scrub_point 8. No parent check
9662 block 2+6 <= scrub_point 8. No parent check
9663 block 3 < scrub_point 8. Check with your parent
9664 block 4 < scrub_point 8. Check with your parent
9665 block 0 < scrub_point 0. Check with your parent
9666 block 1 < scrub_point 0. Check with your parent
9667 block 2 < scrub_point 0. Check with your parent
9668 block 3 < scrub_point 0. Check with your parent
9669 block 0 < scrub_point 1. Check with your parent
9670 block 1 < scrub_point 1. Check with your parent
9671 block 2 < scrub_point 1. Check with your parent
9672 block 3 < scrub_point 1. Check with your parent
9673 block 0 < scrub_point 2. Check with your parent
9674 block 1 < scrub_point 2. Check with your parent
9675 block 2 < scrub_point 2. Check with your parent
9676 block 3 < scrub_point 2. Check with your parent
9677 block 0 < scrub_point 3. Check with your parent
9678 block 1 < scrub_point 3. Check with your parent
9679 block 2 < scrub_point 3. Check with your parent
9680 block 3 < scrub_point 3. Check with your parent
9681 block 0 < scrub_point 4. Check with your parent
9682 block 1 < scrub_point 4. Check with your parent
9683 block 2 < scrub_point 4. Check with your parent
9684 block 3 < scrub_point 4. Check with your parent
9685 block 0 < scrub_point 5. Check with your parent
9686 block 1 < scrub_point 5. Check with your parent
9687 block 2 < scrub_point 5. Check with your parent
9688 block 3 < scrub_point 5. Check with your parent
9689 block 0 < scrub_point 6. Check with your parent
9690 block 1 < scrub_point 6. Check with your parent
9691 block 2 < scrub_point 6. Check with your parent
9692 block 3 < scrub_point 6. Check with your parent
9693 block 0+7 <= scrub_point 7. No parent check
9694 block 1 < scrub_point 7. Check with your parent
9695 block 2 < scrub_point 7. Check with your parent
9696 block 3 < scrub_point 7. Check with your parent
9697 block 0+7 <= scrub_point 8. No parent check
9698 block 1+7 <= scrub_point 8. No parent check
9699 block 2 < scrub_point 8. Check with your parent
9700 block 3 < scrub_point 8. Check with your parent
9701 block 0 < scrub_point 0. Check with your parent
9702 block 1 < scrub_point 0. Check with your parent
9703 block 2 < scrub_point 0. Check with your parent
9704 block 0 < scrub_point 1. Check with your parent
9705 block 1 < scrub_point 1. Check with your parent
9706 block 2 < scrub_point 1. Check with your parent
9707 block 0 < scrub_point 2. Check with your parent
9708 block 1 < scrub_point 2. Check with your parent
9709 block 2 < scrub_point 2. Check with your parent
9710 block 0 < scrub_point 3. Check with your parent
9711 block 1 < scrub_point 3. Check with your parent
9712 block 2 < scrub_point 3. Check with your parent
9713 block 0 < scrub_point 4. Check with your parent
9714 block 1 < scrub_point 4. Check with your parent
9715 block 2 < scrub_point 4. Check with your parent
9716 block 0 < scrub_point 5. Check with your parent
9717 block 1 < scrub_point 5. Check with your parent
9718 block 2 < scrub_point 5. Check with your parent
9719 block 0 < scrub_point 6. Check with your parent
9720 block 1 < scrub_point 6. Check with your parent
9721 block 2 < scrub_point 6. Check with your parent
9722 block 0 < scrub_point 7. Check with your parent
9723 block 1 < scrub_point 7. Check with your parent
9724 block 2 < scrub_point 7. Check with your parent
9725 block 0+8 <= scrub_point 8. No parent check
9726 block 1 < scrub_point 8. Check with your parent
9727 block 2 < scrub_point 8. Check with your parent
9728 block 0 < scrub_point 0. Check with your parent
9729 block 1 < scrub_point 0. Check with your parent
9730 block 0 < scrub_point 1. Check with your parent
9731 block 1 < scrub_point 1. Check with your parent
9732 block 0 < scrub_point 2. Check with your parent
9733 block 1 < scrub_point 2. Check with your parent
9734 block 0 < scrub_point 3. Check with your parent
9735 block 1 < scrub_point 3. Check with your parent
9736 block 0 < scrub_point 4. Check with your parent
9737 block 1 < scrub_point 4. Check with your parent
9738 block 0 < scrub_point 5. Check with your parent
9739 block 1 < scrub_point 5. Check with your parent
9740 block 0 < scrub_point 6. Check with your parent
9741 block 1 < scrub_point 6. Check with your parent
9742 block 0 < scrub_point 7. Check with your parent
9743 block 1 < scrub_point 7. Check with your parent
9744 block 0 < scrub_point 8. Check with your parent
9745 block 1 < scrub_point 8. Check with your parent
9746 test volume::test::test_scrub_point_two_subvolume_smaller_3 ... ok
9747 test volume::test::test_single_block ... ok
9748 test volume::test::test_single_sub_volume_lba_coverage ... ok
9749 test volume::test::test_single_sub_volume_lba_coverage_with_offset ... ok
9750 test volume::test::test_three_layers ... ok
9751 test volume::test::test_volume_size ... ok
9752 test volume::test::test_volume_with_only_read_only_parent ... ok
9753 test volume::test::test_write_unwritten_to_volume_with_only_read_only_parent ... ok
9754 test volume::test::test_writing_to_volume_with_only_read_only_parent ... ok
9755 Sep 22 23:15:12.342 INFO Test replacement of CID 0
9756 Sep 22 23:15:12.342 INFO replace 127.0.0.1:5555 with 127.0.0.1:8888
9757 Sep 22 23:15:12.342 INFO Test replacement of CID 1
9758 Sep 22 23:15:12.342 INFO replace 127.0.0.1:6666 with 127.0.0.1:8888
9759 Sep 22 23:15:12.342 INFO Test replacement of CID 2
9760 Sep 22 23:15:12.342 INFO replace 127.0.0.1:7777 with 127.0.0.1:8888
9761 test volume::test::volume_replace_basic ... ok
9762 test volume::test::volume_replace_drop_rop ... ok
9763 test volume::test::volume_replace_mismatch_opts_cert_pem ... ok
9764 test volume::test::volume_replace_mismatch_opts_control ... ok
9765 test volume::test::volume_replace_mismatch_opts_flush_timeout ... ok
9766 test volume::test::volume_replace_mismatch_opts_id ... ok
9767 test volume::test::volume_replace_mismatch_opts_key ... ok
9768 test volume::test::volume_replace_mismatch_opts_key_pem ... ok
9769 test volume::test::volume_replace_mismatch_opts_lossy ... ok
9770 test volume::test::volume_replace_mismatch_opts_read_only ... ok
9771 test volume::test::volume_replace_mismatch_opts_root_cert ... ok
9772 test volume::test::volume_replace_mismatch_sv_bpe ... ok
9773 test volume::test::volume_replace_mismatch_sv_bs ... ok
9774 test volume::test::volume_replace_mismatch_sv_ec ... ok
9775 test volume::test::volume_replace_mismatch_vblock ... ok
9776 test volume::test::volume_replace_mismatch_vid ... ok
9777 test volume::test::volume_replace_mismatch_vrop ... ok
9778 test volume::test::volume_replace_rop ... ok
9779 test volume::test::volume_replace_self ... ok
9780 test volume::test::volume_vcr_no_target ... ok
97812023-09-22T23:15:12.597ZINFOcrucible: Waiting for 3 jobs (currently 2)
97822023-09-22T23:15:12.597ZINFOcrucible: No repair needed for extent 0 = downstairs
97832023-09-22T23:15:12.597ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
97842023-09-22T23:15:12.731ZINFOcrucible: Waiting for 4 jobs (currently 3)
97852023-09-22T23:15:12.731ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
97862023-09-22T23:15:12.731ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
97872023-09-22T23:15:12.734ZINFOcrucible: Waiting for 4 jobs (currently 3)
97882023-09-22T23:15:12.734ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
97892023-09-22T23:15:12.859ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
97902023-09-22T23:15:12.859ZERROcrucible: [0] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
97912023-09-22T23:15:12.859ZINFOcrucible: [0] client skip 3 in process jobs because fault = downstairs
97922023-09-22T23:15:12.859ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
97932023-09-22T23:15:12.859ZINFOcrucible: [0] b256d998-7399-488e-9e7c-fec98f8e0755 (87e8b462-0c21-434d-84d7-49817a811b80) Active LiveRepair Active ds_transition to Faulted
97942023-09-22T23:15:12.859ZINFOcrucible: [0] Transition from Active to Faulted
97952023-09-22T23:15:12.859ZINFOcrucible: Waiting for 4 jobs (currently 3)
97962023-09-22T23:15:12.859ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
97972023-09-22T23:15:12.859ZINFOcrucible: [1] client skip 3 in process jobs because fault = downstairs
97982023-09-22T23:15:12.859ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
97992023-09-22T23:15:12.859ZINFOcrucible: [1] b256d998-7399-488e-9e7c-fec98f8e0755 (87e8b462-0c21-434d-84d7-49817a811b80) Faulted LiveRepair Active ds_transition to Faulted
98002023-09-22T23:15:12.859ZINFOcrucible: [1] Transition from LiveRepair to Faulted
98012023-09-22T23:15:12.859ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
98022023-09-22T23:15:13.598ZINFOcrucible: Waiting for 4 jobs (currently 3)
98032023-09-22T23:15:13.598ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
98042023-09-22T23:15:13.726ZINFOcrucible: Finally, move the ReOpen job forward
98052023-09-22T23:15:13.726ZINFOcrucible: Now ACK the reopen job
98062023-09-22T23:15:13.726ZWARNcrucible: RE:0 Bailing with error
98072023-09-22T23:15:13.726ZINFOcrucible: err:2 or:0
98082023-09-22T23:15:13.727ZINFOcrucible: Crucible stats registered with UUID: f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b
98092023-09-22T23:15:13.727ZINFOcrucible: Crucible f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b has session id: e6f4a709-5395-4d56-8e67-91d7f07e4ff7
98102023-09-22T23:15:13.727ZINFOcrucible: [0] f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b (ff123089-f6f2-4dc3-8c57-434e173b3d67) New New New ds_transition to WaitActive
98112023-09-22T23:15:13.727ZINFOcrucible: [0] Transition from New to WaitActive
98122023-09-22T23:15:13.727ZINFOcrucible: [0] f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b (ff123089-f6f2-4dc3-8c57-434e173b3d67) WaitActive New New ds_transition to WaitQuorum
98132023-09-22T23:15:13.727ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
98142023-09-22T23:15:13.727ZINFOcrucible: [0] f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b (ff123089-f6f2-4dc3-8c57-434e173b3d67) WaitQuorum New New ds_transition to Active
98152023-09-22T23:15:13.727ZINFOcrucible: [0] Transition from WaitQuorum to Active
98162023-09-22T23:15:13.727ZINFOcrucible: [1] f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b (ff123089-f6f2-4dc3-8c57-434e173b3d67) Active New New ds_transition to WaitActive
98172023-09-22T23:15:13.727ZINFOcrucible: [1] Transition from New to WaitActive
98182023-09-22T23:15:13.727ZINFOcrucible: [1] f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b (ff123089-f6f2-4dc3-8c57-434e173b3d67) Active WaitActive New ds_transition to WaitQuorum
98192023-09-22T23:15:13.727ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
98202023-09-22T23:15:13.727ZINFOcrucible: [1] f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b (ff123089-f6f2-4dc3-8c57-434e173b3d67) Active WaitQuorum New ds_transition to Active
98212023-09-22T23:15:13.727ZINFOcrucible: [1] Transition from WaitQuorum to Active
98222023-09-22T23:15:13.727ZINFOcrucible: [2] f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b (ff123089-f6f2-4dc3-8c57-434e173b3d67) Active Active New ds_transition to WaitActive
98232023-09-22T23:15:13.727ZINFOcrucible: [2] Transition from New to WaitActive
98242023-09-22T23:15:13.727ZINFOcrucible: [2] f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b (ff123089-f6f2-4dc3-8c57-434e173b3d67) Active Active WaitActive ds_transition to WaitQuorum
98252023-09-22T23:15:13.727ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
98262023-09-22T23:15:13.727ZINFOcrucible: [2] f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b (ff123089-f6f2-4dc3-8c57-434e173b3d67) Active Active WaitQuorum ds_transition to Active
98272023-09-22T23:15:13.727ZINFOcrucible: [2] Transition from WaitQuorum to Active
98282023-09-22T23:15:13.727ZINFOcrucible: f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b is now active with session: ff123089-f6f2-4dc3-8c57-434e173b3d67
98292023-09-22T23:15:13.727ZINFOcrucible: [1] f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b (ff123089-f6f2-4dc3-8c57-434e173b3d67) Active Active Active ds_transition to Faulted
98302023-09-22T23:15:13.727ZINFOcrucible: [1] Transition from Active to Faulted
98312023-09-22T23:15:13.727ZINFOcrucible: [1] f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b (ff123089-f6f2-4dc3-8c57-434e173b3d67) Active Faulted Active ds_transition to LiveRepairReady
98322023-09-22T23:15:13.727ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
98332023-09-22T23:15:13.727ZINFOcrucible: [1] f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b (ff123089-f6f2-4dc3-8c57-434e173b3d67) Active LiveRepairReady Active ds_transition to LiveRepair
98342023-09-22T23:15:13.727ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
98352023-09-22T23:15:13.727ZINFOcrucible: Waiting for Close + ReOpen jobs
98362023-09-22T23:15:13.728ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
98372023-09-22T23:15:13.728ZINFOcrucible: RE:0 close id:1000 queued, notify DS
98382023-09-22T23:15:13.728ZINFOcrucible: RE:0 Wait for result from close command 1000:1
98392023-09-22T23:15:13.732ZINFOcrucible: Now move the NoOp job forward
98402023-09-22T23:15:13.732ZINFOcrucible: Now ACK the NoOp job
98412023-09-22T23:15:13.732ZINFOcrucible: Finally, move the ReOpen job forward
98422023-09-22T23:15:13.732ZINFOcrucible: Now ACK the Reopen job
98432023-09-22T23:15:13.732ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
98442023-09-22T23:15:13.732ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
9845 {"msg":"Extent 0 close id:1003 Failed: Error: bad","v":0,"name":"crucible","level":50,"time":"2023-09-22T23:15:13.733031228Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759----------------------------------------------------------------
9846 }
9847 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
9848 {"msg":GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
9849 "RE:0 Bailing with error","v":0,"name":"crucible"," 1 Acked 1000 FClose 0level":40 Done Err Done false
9850 2 Acked 1001 NoOp 0 Done Skip Done false
9851 3 Acked 1002 NoOp 0," Donetime Skip" Done: false"
9852 2023-09-22T23:15:13.733079694Z", 4 Acked 1003 Reopen 0" Done Skiphostname Done" false:
9853 " STATES DS:0 DS:1 DS:2 TOTAL
9854 ip-10-150-1-55.us-west-2.compute.internal" New ,"pid 0 ": 0 4759 0 } 0
9855 
9856 Sent 0 0 0 0
9857 Done 4 0 4 8
9858 Skipped 0 3 0 3
9859 Error 0 1 0 1
9860 Last Flush: 0 0 0
9861 Downstairs last five completed:
9862 Upstairs last five completed: 4 3 2 1
98632023-09-22T23:15:13.733ZINFOcrucible: Crucible stats registered with UUID: 342ffdcc-1be9-41e2-9df4-d887f80de943
98642023-09-22T23:15:13.733ZINFOcrucible: Crucible 342ffdcc-1be9-41e2-9df4-d887f80de943 has session id: 3681e62b-cf90-4959-8dd8-3b7e79ddb73f
98652023-09-22T23:15:13.733ZINFOcrucible: [0] 342ffdcc-1be9-41e2-9df4-d887f80de943 (36f75453-9462-4780-abce-7f7f3c31d228) New New New ds_transition to WaitActive
98662023-09-22T23:15:13.733ZINFOcrucible: [0] Transition from New to WaitActive
98672023-09-22T23:15:13.733ZINFOcrucible: [0] 342ffdcc-1be9-41e2-9df4-d887f80de943 (36f75453-9462-4780-abce-7f7f3c31d228) WaitActive New New ds_transition to WaitQuorum
98682023-09-22T23:15:13.733ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
98692023-09-22T23:15:13.733ZINFOcrucible: [0] 342ffdcc-1be9-41e2-9df4-d887f80de943 (36f75453-9462-4780-abce-7f7f3c31d228) WaitQuorum New New ds_transition to Active
98702023-09-22T23:15:13.733ZINFOcrucible: [0] Transition from WaitQuorum to Active
98712023-09-22T23:15:13.733ZINFOcrucible: [1] 342ffdcc-1be9-41e2-9df4-d887f80de943 (36f75453-9462-4780-abce-7f7f3c31d228) Active New New ds_transition to WaitActive
98722023-09-22T23:15:13.733ZINFOcrucible: [1] Transition from New to WaitActive
98732023-09-22T23:15:13.733ZINFOcrucible: [1] 342ffdcc-1be9-41e2-9df4-d887f80de943 (36f75453-9462-4780-abce-7f7f3c31d228) Active WaitActive New ds_transition to WaitQuorum
98742023-09-22T23:15:13.733ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
98752023-09-22T23:15:13.733ZINFOcrucible: [1] 342ffdcc-1be9-41e2-9df4-d887f80de943 (36f75453-9462-4780-abce-7f7f3c31d228) Active WaitQuorum New ds_transition to Active
98762023-09-22T23:15:13.733ZINFOcrucible: [1] Transition from WaitQuorum to Active
98772023-09-22T23:15:13.733ZINFOcrucible: [2] 342ffdcc-1be9-41e2-9df4-d887f80de943 (36f75453-9462-4780-abce-7f7f3c31d228) Active Active New ds_transition to WaitActive
98782023-09-22T23:15:13.733ZINFOcrucible: [2] Transition from New to WaitActive
98792023-09-22T23:15:13.733ZINFOcrucible: [2] 342ffdcc-1be9-41e2-9df4-d887f80de943 (36f75453-9462-4780-abce-7f7f3c31d228) Active Active WaitActive ds_transition to WaitQuorum
98802023-09-22T23:15:13.733ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
98812023-09-22T23:15:13.733ZINFOcrucible: [2] 342ffdcc-1be9-41e2-9df4-d887f80de943 (36f75453-9462-4780-abce-7f7f3c31d228) Active Active WaitQuorum ds_transition to Active
98822023-09-22T23:15:13.733ZINFOcrucible: [2] Transition from WaitQuorum to Active
98832023-09-22T23:15:13.733ZINFOcrucible: 342ffdcc-1be9-41e2-9df4-d887f80de943 is now active with session: 36f75453-9462-4780-abce-7f7f3c31d228
98842023-09-22T23:15:13.733ZINFOcrucible: [1] 342ffdcc-1be9-41e2-9df4-d887f80de943 (36f75453-9462-4780-abce-7f7f3c31d228) Active Active Active ds_transition to Faulted
98852023-09-22T23:15:13.733ZINFOcrucible: [1] Transition from Active to Faulted
98862023-09-22T23:15:13.733ZINFOcrucible: [1] 342ffdcc-1be9-41e2-9df4-d887f80de943 (36f75453-9462-4780-abce-7f7f3c31d228) Active Faulted Active ds_transition to LiveRepairReady
98872023-09-22T23:15:13.734ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
98882023-09-22T23:15:13.734ZINFOcrucible: [1] 342ffdcc-1be9-41e2-9df4-d887f80de943 (36f75453-9462-4780-abce-7f7f3c31d228) Active LiveRepairReady Active ds_transition to LiveRepair
98892023-09-22T23:15:13.734ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
98902023-09-22T23:15:13.734ZINFOcrucible: Waiting for Close + ReOpen jobs
98912023-09-22T23:15:13.734ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
98922023-09-22T23:15:13.734ZINFOcrucible: RE:0 close id:1000 queued, notify DS
98932023-09-22T23:15:13.734ZINFOcrucible: RE:0 Wait for result from close command 1000:1
98942023-09-22T23:15:13.735ZINFOcrucible: Now move the NoOp job forward
98952023-09-22T23:15:13.735ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
98962023-09-22T23:15:13.735ZERROcrucible: [1] Reports error GenericError("bad") on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
98972023-09-22T23:15:13.735ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
98982023-09-22T23:15:13.735ZINFOcrucible: [1] changed 0 jobs to fault skipped = downstairs
98992023-09-22T23:15:13.735ZINFOcrucible: [1] e2730307-dcf6-4644-b256-08168ac114e1 (8d751494-4fad-49f3-9d80-50dcfbbc3677) Active LiveRepair Active ds_transition to Faulted
99002023-09-22T23:15:13.735ZINFOcrucible: [1] Transition from LiveRepair to Faulted
99012023-09-22T23:15:13.735ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
99022023-09-22T23:15:13.735ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
99032023-09-22T23:15:13.735ZWARNcrucible: RE:0 Bailing with error
99042023-09-22T23:15:13.736ZINFOcrucible: Crucible stats registered with UUID: 56fedc7f-6649-4d9c-8d0a-ac22f65da705
99052023-09-22T23:15:13.736ZINFOcrucible: Crucible 56fedc7f-6649-4d9c-8d0a-ac22f65da705 has session id: 53c11a87-8b8f-4c6f-84ef-36f22a19d7c6
99062023-09-22T23:15:13.736ZINFOcrucible: [0] 56fedc7f-6649-4d9c-8d0a-ac22f65da705 (38051f6c-52ee-44a3-80ec-3f0d3c3a61b5) New New New ds_transition to WaitActive
99072023-09-22T23:15:13.736ZINFOcrucible: [0] Transition from New to WaitActive
99082023-09-22T23:15:13.736ZINFOcrucible: [0] 56fedc7f-6649-4d9c-8d0a-ac22f65da705 (38051f6c-52ee-44a3-80ec-3f0d3c3a61b5) WaitActive New New ds_transition to WaitQuorum
99092023-09-22T23:15:13.736ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
99102023-09-22T23:15:13.736ZINFOcrucible: [0] 56fedc7f-6649-4d9c-8d0a-ac22f65da705 (38051f6c-52ee-44a3-80ec-3f0d3c3a61b5) WaitQuorum New New ds_transition to Active
99112023-09-22T23:15:13.736ZINFOcrucible: [0] Transition from WaitQuorum to Active
99122023-09-22T23:15:13.736ZINFOcrucible: [1] 56fedc7f-6649-4d9c-8d0a-ac22f65da705 (38051f6c-52ee-44a3-80ec-3f0d3c3a61b5) Active New New ds_transition to WaitActive
99132023-09-22T23:15:13.736ZINFOcrucible: [1] Transition from New to WaitActive
99142023-09-22T23:15:13.736ZINFOcrucible: [1] 56fedc7f-6649-4d9c-8d0a-ac22f65da705 (38051f6c-52ee-44a3-80ec-3f0d3c3a61b5) Active WaitActive New ds_transition to WaitQuorum
99152023-09-22T23:15:13.736ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
99162023-09-22T23:15:13.736ZINFOcrucible: [1] 56fedc7f-6649-4d9c-8d0a-ac22f65da705 (38051f6c-52ee-44a3-80ec-3f0d3c3a61b5) Active WaitQuorum New ds_transition to Active
99172023-09-22T23:15:13.736ZINFOcrucible: [1] Transition from WaitQuorum to Active
99182023-09-22T23:15:13.736ZINFOcrucible: [2] 56fedc7f-6649-4d9c-8d0a-ac22f65da705 (38051f6c-52ee-44a3-80ec-3f0d3c3a61b5) Active Active New ds_transition to WaitActive
99192023-09-22T23:15:13.736ZINFOcrucible: [2] Transition from New to WaitActive
99202023-09-22T23:15:13.736ZINFOcrucible: [2] 56fedc7f-6649-4d9c-8d0a-ac22f65da705 (38051f6c-52ee-44a3-80ec-3f0d3c3a61b5) Active Active WaitActive ds_transition to WaitQuorum
99212023-09-22T23:15:13.736ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
99222023-09-22T23:15:13.736ZINFOcrucible: [2] 56fedc7f-6649-4d9c-8d0a-ac22f65da705 (38051f6c-52ee-44a3-80ec-3f0d3c3a61b5) Active Active WaitQuorum ds_transition to Active
99232023-09-22T23:15:13.736ZINFOcrucible: [2] Transition from WaitQuorum to Active
99242023-09-22T23:15:13.736ZINFOcrucible: 56fedc7f-6649-4d9c-8d0a-ac22f65da705 is now active with session: 38051f6c-52ee-44a3-80ec-3f0d3c3a61b5
99252023-09-22T23:15:13.736ZINFOcrucible: [1] 56fedc7f-6649-4d9c-8d0a-ac22f65da705 (38051f6c-52ee-44a3-80ec-3f0d3c3a61b5) Active Active Active ds_transition to Faulted
99262023-09-22T23:15:13.736ZINFOcrucible: [1] Transition from Active to Faulted
99272023-09-22T23:15:13.736ZINFOcrucible: [1] 56fedc7f-6649-4d9c-8d0a-ac22f65da705 (38051f6c-52ee-44a3-80ec-3f0d3c3a61b5) Active Faulted Active ds_transition to LiveRepairReady
99282023-09-22T23:15:13.736ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
99292023-09-22T23:15:13.736ZINFOcrucible: [1] 56fedc7f-6649-4d9c-8d0a-ac22f65da705 (38051f6c-52ee-44a3-80ec-3f0d3c3a61b5) Active LiveRepairReady Active ds_transition to LiveRepair
99302023-09-22T23:15:13.736ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
99312023-09-22T23:15:13.736ZINFOcrucible: Waiting for Close + ReOpen jobs
99322023-09-22T23:15:13.736ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
99332023-09-22T23:15:13.736ZINFOcrucible: RE:0 close id:1000 queued, notify DS
99342023-09-22T23:15:13.736ZINFOcrucible: RE:0 Wait for result from close command 1000:1
99352023-09-22T23:15:13.860ZINFOcrucible: Now move the NoOp job forward
99362023-09-22T23:15:13.860ZINFOcrucible: Now ACK the NoOp job
99372023-09-22T23:15:13.860ZINFOcrucible: Finally, move the ReOpen job forward
99382023-09-22T23:15:13.860ZINFOcrucible: Now ACK the Reopen job
99392023-09-22T23:15:13.861ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
99402023-09-22T23:15:13.861ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
99412023-09-22T23:15:13.861ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
99422023-09-22T23:15:13.861ZWARNcrucible: RE:0 Bailing with error
99432023-09-22T23:15:13.861ZINFOcrucible: Crucible stats registered with UUID: e2a24436-8a58-49c3-b253-d7ce5f8c2715
99442023-09-22T23:15:13.861ZINFOcrucible: Crucible e2a24436-8a58-49c3-b253-d7ce5f8c2715 has session id: 36f3d398-f991-4e44-9702-12dd51e5824b
99452023-09-22T23:15:13.861ZINFOcrucible: [0] e2a24436-8a58-49c3-b253-d7ce5f8c2715 (1d7b944a-0284-455a-8b0b-72a46a80d72a) New New New ds_transition to WaitActive
99462023-09-22T23:15:13.861ZINFOcrucible: [0] Transition from New to WaitActive
99472023-09-22T23:15:13.861ZINFOcrucible: [0] e2a24436-8a58-49c3-b253-d7ce5f8c2715 (1d7b944a-0284-455a-8b0b-72a46a80d72a) WaitActive New New ds_transition to WaitQuorum
99482023-09-22T23:15:13.861ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
99492023-09-22T23:15:13.861ZINFOcrucible: [0] e2a24436-8a58-49c3-b253-d7ce5f8c2715 (1d7b944a-0284-455a-8b0b-72a46a80d72a) WaitQuorum New New ds_transition to Active
99502023-09-22T23:15:13.861ZINFOcrucible: [0] Transition from WaitQuorum to Active
99512023-09-22T23:15:13.861ZINFOcrucible: [1] e2a24436-8a58-49c3-b253-d7ce5f8c2715 (1d7b944a-0284-455a-8b0b-72a46a80d72a) Active New New ds_transition to WaitActive
99522023-09-22T23:15:13.861ZINFOcrucible: [1] Transition from New to WaitActive
99532023-09-22T23:15:13.861ZINFOcrucible: [1] e2a24436-8a58-49c3-b253-d7ce5f8c2715 (1d7b944a-0284-455a-8b0b-72a46a80d72a) Active WaitActive New ds_transition to WaitQuorum
99542023-09-22T23:15:13.861ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
99552023-09-22T23:15:13.861ZINFOcrucible: [1] e2a24436-8a58-49c3-b253-d7ce5f8c2715 (1d7b944a-0284-455a-8b0b-72a46a80d72a) Active WaitQuorum New ds_transition to Active
99562023-09-22T23:15:13.861ZINFOcrucible: [1] Transition from WaitQuorum to Active
99572023-09-22T23:15:13.861ZINFOcrucible: [2] e2a24436-8a58-49c3-b253-d7ce5f8c2715 (1d7b944a-0284-455a-8b0b-72a46a80d72a) Active Active New ds_transition to WaitActive
99582023-09-22T23:15:13.861ZINFOcrucible: [2] Transition from New to WaitActive
99592023-09-22T23:15:13.861ZINFOcrucible: [2] e2a24436-8a58-49c3-b253-d7ce5f8c2715 (1d7b944a-0284-455a-8b0b-72a46a80d72a) Active Active WaitActive ds_transition to WaitQuorum
99602023-09-22T23:15:13.861ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
99612023-09-22T23:15:13.861ZINFOcrucible: [2] e2a24436-8a58-49c3-b253-d7ce5f8c2715 (1d7b944a-0284-455a-8b0b-72a46a80d72a) Active Active WaitQuorum ds_transition to Active
99622023-09-22T23:15:13.861ZINFOcrucible: [2] Transition from WaitQuorum to Active
99632023-09-22T23:15:13.861ZINFOcrucible: e2a24436-8a58-49c3-b253-d7ce5f8c2715 is now active with session: 1d7b944a-0284-455a-8b0b-72a46a80d72a
99642023-09-22T23:15:13.861ZINFOcrucible: [1] e2a24436-8a58-49c3-b253-d7ce5f8c2715 (1d7b944a-0284-455a-8b0b-72a46a80d72a) Active Active Active ds_transition to Faulted
99652023-09-22T23:15:13.861ZINFOcrucible: [1] Transition from Active to Faulted
99662023-09-22T23:15:13.861ZINFOcrucible: [1] e2a24436-8a58-49c3-b253-d7ce5f8c2715 (1d7b944a-0284-455a-8b0b-72a46a80d72a) Active Faulted Active ds_transition to LiveRepairReady
99672023-09-22T23:15:13.862ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
99682023-09-22T23:15:13.862ZINFOcrucible: [1] e2a24436-8a58-49c3-b253-d7ce5f8c2715 (1d7b944a-0284-455a-8b0b-72a46a80d72a) Active LiveRepairReady Active ds_transition to LiveRepair
99692023-09-22T23:15:13.862ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
99702023-09-22T23:15:13.862ZINFOcrucible: Waiting for Close + ReOpen jobs
99712023-09-22T23:15:13.862ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
99722023-09-22T23:15:13.862ZINFOcrucible: RE:0 close id:1000 queued, notify DS
99732023-09-22T23:15:13.862ZINFOcrucible: RE:0 Wait for result from close command 1000:1
99742023-09-22T23:15:14.729ZINFOcrucible: Waiting for 3 jobs (currently 2)
99752023-09-22T23:15:14.729ZINFOcrucible: No repair needed for extent 0 = downstairs
99762023-09-22T23:15:14.729ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
99772023-09-22T23:15:14.734ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(2), repair_downstairs: [ClientId(1)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
99782023-09-22T23:15:14.734ZERROcrucible: [2] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(2), repair_downstairs: [ClientId(1)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
99792023-09-22T23:15:14.734ZINFOcrucible: [2] client skip 2 in process jobs because fault = downstairs
99802023-09-22T23:15:14.734ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
99812023-09-22T23:15:14.734ZINFOcrucible: [2] 342ffdcc-1be9-41e2-9df4-d887f80de943 (36f75453-9462-4780-abce-7f7f3c31d228) Active LiveRepair Active ds_transition to Faulted
99822023-09-22T23:15:14.734ZINFOcrucible: [2] Transition from Active to Faulted
99832023-09-22T23:15:14.734ZINFOcrucible: Now ACK the close job
99842023-09-22T23:15:14.734ZINFOcrucible: Waiting for 3 jobs (currently 2)
99852023-09-22T23:15:14.734ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
99862023-09-22T23:15:14.734ZINFOcrucible: [1] client skip 2 in process jobs because fault = downstairs
99872023-09-22T23:15:14.734ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
99882023-09-22T23:15:14.734ZINFOcrucible: [1] 342ffdcc-1be9-41e2-9df4-d887f80de943 (36f75453-9462-4780-abce-7f7f3c31d228) Active LiveRepair Faulted ds_transition to Faulted
99892023-09-22T23:15:14.734ZINFOcrucible: [1] Transition from LiveRepair to Faulted
99902023-09-22T23:15:14.734ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
99912023-09-22T23:15:14.736ZINFOcrucible: Waiting for 3 jobs (currently 2)
99922023-09-22T23:15:14.736ZINFOcrucible: No repair needed for extent 0 = downstairs
99932023-09-22T23:15:14.736ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
99942023-09-22T23:15:14.863ZINFOcrucible: Now ACK the close job
99952023-09-22T23:15:14.863ZINFOcrucible: Waiting for 3 jobs (currently 2)
99962023-09-22T23:15:14.863ZINFOcrucible: No repair needed for extent 0 = downstairs
99972023-09-22T23:15:14.863ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
99982023-09-22T23:15:15.729ZINFOcrucible: Waiting for 4 jobs (currently 3)
99992023-09-22T23:15:15.729ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
100002023-09-22T23:15:15.735ZINFOcrucible: Waiting for 4 jobs (currently 3)
100012023-09-22T23:15:15.735ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
100022023-09-22T23:15:15.735ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
100032023-09-22T23:15:15.737ZINFOcrucible: Waiting for 4 jobs (currently 3)
100042023-09-22T23:15:15.738ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
100052023-09-22T23:15:15.863ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
100062023-09-22T23:15:15.863ZERROcrucible: [1] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
100072023-09-22T23:15:15.863ZINFOcrucible: [1] client skip 3 in process jobs because fault = downstairs
100082023-09-22T23:15:15.863ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
100092023-09-22T23:15:15.863ZINFOcrucible: [1] e2a24436-8a58-49c3-b253-d7ce5f8c2715 (1d7b944a-0284-455a-8b0b-72a46a80d72a) Active LiveRepair Active ds_transition to Faulted
100102023-09-22T23:15:15.863ZINFOcrucible: [1] Transition from LiveRepair to Faulted
100112023-09-22T23:15:15.863ZINFOcrucible: Waiting for 4 jobs (currently 3)
100122023-09-22T23:15:15.863ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
100132023-09-22T23:15:15.863ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
10014 ----------------------------------------------------------------
10015 Crucible gen:0 GIO:true work queues: Upstairs:2 downstairs:4
10016 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
10017 1 Acked 1000 FClose 0 Done Done Done false
10018 2 Acked 1001 NoOp 0 Done Done Done false
10019 3 NotAcked 1002 NoOp 0 New New New false
10020 4 NotAcked 1003 Reopen 0 New New New false
10021 STATES DS:0 DS:1 DS:2 TOTAL
10022 New 2 2 2 6
10023 Sent 0 0 0 0
10024 Done 2 2 2 6
10025 Skipped 0 0 0 0
10026 Error 0 0 0 0
10027 Last Flush: 0 0 0
10028 Downstairs last five completed:
10029 Upstairs last five completed: 2 1
100302023-09-22T23:15:16.598ZINFOcrucible: Now move the NoOp job forward
100312023-09-22T23:15:16.598ZINFOcrucible: Finally, move the ReOpen job forward
100322023-09-22T23:15:16.598ZINFOcrucible: Now ACK the reopen job
100332023-09-22T23:15:16.598ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
100342023-09-22T23:15:16.599ZINFOcrucible: jobs are: 4
10035 test live_repair::repair_test::test_repair_extent_no_action_all ... ok
100362023-09-22T23:15:16.730ZINFOcrucible: Now move the NoOp job forward
100372023-09-22T23:15:16.730ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
100382023-09-22T23:15:16.730ZERROcrucible: [0] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
100392023-09-22T23:15:16.730ZINFOcrucible: [0] client skip 4 in process jobs because fault = downstairs
100402023-09-22T23:15:16.730ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
100412023-09-22T23:15:16.730ZINFOcrucible: [0] f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b (ff123089-f6f2-4dc3-8c57-434e173b3d67) Active LiveRepair Active ds_transition to Faulted
100422023-09-22T23:15:16.730ZINFOcrucible: [0] Transition from Active to Faulted
100432023-09-22T23:15:16.731ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
100442023-09-22T23:15:16.731ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
100452023-09-22T23:15:16.731ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
100462023-09-22T23:15:16.731ZINFOcrucible: [1] f006ef8c-c009-4b2e-bcc5-fdf6b7dadb1b (ff123089-f6f2-4dc3-8c57-434e173b3d67) Faulted LiveRepair Active ds_transition to Faulted
100472023-09-22T23:15:16.731ZINFOcrucible: [1] Transition from LiveRepair to Faulted
100482023-09-22T23:15:16.731ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
100492023-09-22T23:15:16.735ZINFOcrucible: Now move the NoOp job forward
100502023-09-22T23:15:16.736ZINFOcrucible: Now ACK the NoOp job
100512023-09-22T23:15:16.736ZINFOcrucible: Finally, move the ReOpen job forward
100522023-09-22T23:15:16.736ZINFOcrucible: Now ACK the Reopen job
10053 {"msg":"Extent 0 close id:1002 Failed: Error: bad","v":0,"name":"crucible","level":50----------------------------------------------------------------
10054 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
10055 ,"time":"GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
10056 2023-09-22T23:15:16.736153514Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal" 1 Acked 1000 FClose 0," Done Donepid" Err: false4759
10057 }
10058 2 Acked 1001 NoOp 0 Done Skip Skip false
10059 { 3 Acked 1002 NoOp 0 Done Skip Skip false
10060 " 4 Acked 1003 Reopen 0msg" Done: Skip Skip" false
10061 STATES DS:0 DS:1 DS:2 TOTAL
10062 RE:0 Wait for result from reopen command 1003:4" New , 0 " 0 v": 0 0 0
10063 , Sent " 0 name" 0 : 0 " 0
10064 Done crucible" 4 1 , 0 " 5
10065 Skipped level" 0 : 3 30 3 6
10066 Error 0 0 1 1
10067 Last Flush: 0 0 0
10068 Downstairs last five completed:
10069 ,"time"Upstairs last five completed: :" 42023-09-22T23:15:16.736233281Z" 3 2 1
10070 ,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
100712023-09-22T23:15:16.736ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
100722023-09-22T23:15:16.736ZWARNcrucible: RE:0 Bailing with error
100732023-09-22T23:15:16.736ZINFOcrucible: Crucible stats registered with UUID: 8aebef52-94cd-4f20-8828-2b4a72214191
100742023-09-22T23:15:16.736ZINFOcrucible: Crucible 8aebef52-94cd-4f20-8828-2b4a72214191 has session id: 4dca0879-c48b-435e-bfa8-ff047a61d6cf
100752023-09-22T23:15:16.736ZINFOcrucible: [0] 8aebef52-94cd-4f20-8828-2b4a72214191 (217e8b67-8753-4bb1-be70-961d7790fe1e) New New New ds_transition to WaitActive
100762023-09-22T23:15:16.736ZINFOcrucible: [0] Transition from New to WaitActive
100772023-09-22T23:15:16.736ZINFOcrucible: [0] 8aebef52-94cd-4f20-8828-2b4a72214191 (217e8b67-8753-4bb1-be70-961d7790fe1e) WaitActive New New ds_transition to WaitQuorum
100782023-09-22T23:15:16.736ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
100792023-09-22T23:15:16.736ZINFOcrucible: [0] 8aebef52-94cd-4f20-8828-2b4a72214191 (217e8b67-8753-4bb1-be70-961d7790fe1e) WaitQuorum New New ds_transition to Active
100802023-09-22T23:15:16.736ZINFOcrucible: [0] Transition from WaitQuorum to Active
100812023-09-22T23:15:16.736ZINFOcrucible: [1] 8aebef52-94cd-4f20-8828-2b4a72214191 (217e8b67-8753-4bb1-be70-961d7790fe1e) Active New New ds_transition to WaitActive
100822023-09-22T23:15:16.736ZINFOcrucible: [1] Transition from New to WaitActive
100832023-09-22T23:15:16.737ZINFOcrucible: [1] 8aebef52-94cd-4f20-8828-2b4a72214191 (217e8b67-8753-4bb1-be70-961d7790fe1e) Active WaitActive New ds_transition to WaitQuorum
100842023-09-22T23:15:16.737ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
100852023-09-22T23:15:16.737ZINFOcrucible: [1] 8aebef52-94cd-4f20-8828-2b4a72214191 (217e8b67-8753-4bb1-be70-961d7790fe1e) Active WaitQuorum New ds_transition to Active
100862023-09-22T23:15:16.737ZINFOcrucible: [1] Transition from WaitQuorum to Active
100872023-09-22T23:15:16.737ZINFOcrucible: [2] 8aebef52-94cd-4f20-8828-2b4a72214191 (217e8b67-8753-4bb1-be70-961d7790fe1e) Active Active New ds_transition to WaitActive
100882023-09-22T23:15:16.737ZINFOcrucible: [2] Transition from New to WaitActive
100892023-09-22T23:15:16.737ZINFOcrucible: [2] 8aebef52-94cd-4f20-8828-2b4a72214191 (217e8b67-8753-4bb1-be70-961d7790fe1e) Active Active WaitActive ds_transition to WaitQuorum
100902023-09-22T23:15:16.737ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
100912023-09-22T23:15:16.737ZINFOcrucible: [2] 8aebef52-94cd-4f20-8828-2b4a72214191 (217e8b67-8753-4bb1-be70-961d7790fe1e) Active Active WaitQuorum ds_transition to Active
100922023-09-22T23:15:16.737ZINFOcrucible: [2] Transition from WaitQuorum to Active
100932023-09-22T23:15:16.737ZINFOcrucible: 8aebef52-94cd-4f20-8828-2b4a72214191 is now active with session: 217e8b67-8753-4bb1-be70-961d7790fe1e
100942023-09-22T23:15:16.737ZINFOcrucible: [2] 8aebef52-94cd-4f20-8828-2b4a72214191 (217e8b67-8753-4bb1-be70-961d7790fe1e) Active Active Active ds_transition to Faulted
100952023-09-22T23:15:16.737ZINFOcrucible: [2] Transition from Active to Faulted
100962023-09-22T23:15:16.737ZINFOcrucible: [2] 8aebef52-94cd-4f20-8828-2b4a72214191 (217e8b67-8753-4bb1-be70-961d7790fe1e) Active Active Faulted ds_transition to LiveRepairReady
100972023-09-22T23:15:16.737ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
100982023-09-22T23:15:16.737ZINFOcrucible: [2] 8aebef52-94cd-4f20-8828-2b4a72214191 (217e8b67-8753-4bb1-be70-961d7790fe1e) Active Active LiveRepairReady ds_transition to LiveRepair
100992023-09-22T23:15:16.737ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
101002023-09-22T23:15:16.737ZINFOcrucible: Waiting for Close + ReOpen jobs
101012023-09-22T23:15:16.737ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
101022023-09-22T23:15:16.737ZINFOcrucible: RE:0 close id:1000 queued, notify DS
101032023-09-22T23:15:16.737ZINFOcrucible: RE:0 Wait for result from close command 1000:1
101042023-09-22T23:15:16.739ZINFOcrucible: Now move the NoOp job forward
101052023-09-22T23:15:16.739ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
101062023-09-22T23:15:16.739ZERROcrucible: [2] Reports error GenericError("bad") on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
101072023-09-22T23:15:16.739ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
101082023-09-22T23:15:16.739ZINFOcrucible: [2] changed 0 jobs to fault skipped = downstairs
101092023-09-22T23:15:16.739ZINFOcrucible: [2] 56fedc7f-6649-4d9c-8d0a-ac22f65da705 (38051f6c-52ee-44a3-80ec-3f0d3c3a61b5) Active LiveRepair Active ds_transition to Faulted
101102023-09-22T23:15:16.739ZINFOcrucible: [2] Transition from Active to Faulted
101112023-09-22T23:15:16.739ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
101122023-09-22T23:15:16.739ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
101132023-09-22T23:15:16.739ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
101142023-09-22T23:15:16.739ZINFOcrucible: [1] changed 0 jobs to fault skipped = downstairs
101152023-09-22T23:15:16.739ZINFOcrucible: [1] 56fedc7f-6649-4d9c-8d0a-ac22f65da705 (38051f6c-52ee-44a3-80ec-3f0d3c3a61b5) Active LiveRepair Faulted ds_transition to Faulted
101162023-09-22T23:15:16.739ZINFOcrucible: [1] Transition from LiveRepair to Faulted
101172023-09-22T23:15:16.739ZWARNcrucible: RE:0 Bailing with error
101182023-09-22T23:15:16.739ZINFOcrucible: Crucible stats registered with UUID: 889fe5fc-0632-4e64-8f28-45e2de5accae
101192023-09-22T23:15:16.740ZINFOcrucible: Crucible 889fe5fc-0632-4e64-8f28-45e2de5accae has session id: 61f44ac6-6799-447c-a841-782f5bb4b649
101202023-09-22T23:15:16.740ZINFOcrucible: [0] 889fe5fc-0632-4e64-8f28-45e2de5accae (0cc985d8-8a6a-47f7-b1fd-cb9776e7451b) New New New ds_transition to WaitActive
101212023-09-22T23:15:16.740ZINFOcrucible: [0] Transition from New to WaitActive
101222023-09-22T23:15:16.740ZINFOcrucible: [0] 889fe5fc-0632-4e64-8f28-45e2de5accae (0cc985d8-8a6a-47f7-b1fd-cb9776e7451b) WaitActive New New ds_transition to WaitQuorum
101232023-09-22T23:15:16.740ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
101242023-09-22T23:15:16.740ZINFOcrucible: [0] 889fe5fc-0632-4e64-8f28-45e2de5accae (0cc985d8-8a6a-47f7-b1fd-cb9776e7451b) WaitQuorum New New ds_transition to Active
101252023-09-22T23:15:16.740ZINFOcrucible: [0] Transition from WaitQuorum to Active
101262023-09-22T23:15:16.740ZINFOcrucible: [1] 889fe5fc-0632-4e64-8f28-45e2de5accae (0cc985d8-8a6a-47f7-b1fd-cb9776e7451b) Active New New ds_transition to WaitActive
101272023-09-22T23:15:16.740ZINFOcrucible: [1] Transition from New to WaitActive
101282023-09-22T23:15:16.740ZINFOcrucible: [1] 889fe5fc-0632-4e64-8f28-45e2de5accae (0cc985d8-8a6a-47f7-b1fd-cb9776e7451b) Active WaitActive New ds_transition to WaitQuorum
101292023-09-22T23:15:16.740ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
101302023-09-22T23:15:16.740ZINFOcrucible: [1] 889fe5fc-0632-4e64-8f28-45e2de5accae (0cc985d8-8a6a-47f7-b1fd-cb9776e7451b) Active WaitQuorum New ds_transition to Active
101312023-09-22T23:15:16.740ZINFOcrucible: [1] Transition from WaitQuorum to Active
101322023-09-22T23:15:16.740ZINFOcrucible: [2] 889fe5fc-0632-4e64-8f28-45e2de5accae (0cc985d8-8a6a-47f7-b1fd-cb9776e7451b) Active Active New ds_transition to WaitActive
101332023-09-22T23:15:16.740ZINFOcrucible: [2] Transition from New to WaitActive
101342023-09-22T23:15:16.740ZINFOcrucible: [2] 889fe5fc-0632-4e64-8f28-45e2de5accae (0cc985d8-8a6a-47f7-b1fd-cb9776e7451b) Active Active WaitActive ds_transition to WaitQuorum
101352023-09-22T23:15:16.740ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
101362023-09-22T23:15:16.740ZINFOcrucible: [2] 889fe5fc-0632-4e64-8f28-45e2de5accae (0cc985d8-8a6a-47f7-b1fd-cb9776e7451b) Active Active WaitQuorum ds_transition to Active
101372023-09-22T23:15:16.740ZINFOcrucible: [2] Transition from WaitQuorum to Active
101382023-09-22T23:15:16.740ZINFOcrucible: 889fe5fc-0632-4e64-8f28-45e2de5accae is now active with session: 0cc985d8-8a6a-47f7-b1fd-cb9776e7451b
101392023-09-22T23:15:16.740ZINFOcrucible: [2] 889fe5fc-0632-4e64-8f28-45e2de5accae (0cc985d8-8a6a-47f7-b1fd-cb9776e7451b) Active Active Active ds_transition to Faulted
101402023-09-22T23:15:16.740ZINFOcrucible: [2] Transition from Active to Faulted
101412023-09-22T23:15:16.740ZINFOcrucible: [2] 889fe5fc-0632-4e64-8f28-45e2de5accae (0cc985d8-8a6a-47f7-b1fd-cb9776e7451b) Active Active Faulted ds_transition to LiveRepairReady
101422023-09-22T23:15:16.740ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
101432023-09-22T23:15:16.740ZINFOcrucible: [2] 889fe5fc-0632-4e64-8f28-45e2de5accae (0cc985d8-8a6a-47f7-b1fd-cb9776e7451b) Active Active LiveRepairReady ds_transition to LiveRepair
101442023-09-22T23:15:16.740ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
101452023-09-22T23:15:16.740ZINFOcrucible: Waiting for Close + ReOpen jobs
101462023-09-22T23:15:16.740ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
101472023-09-22T23:15:16.740ZINFOcrucible: RE:0 close id:1000 queued, notify DS
101482023-09-22T23:15:16.740ZINFOcrucible: RE:0 Wait for result from close command 1000:1
101492023-09-22T23:15:16.864ZINFOcrucible: Now move the NoOp job forward
101502023-09-22T23:15:16.865ZINFOcrucible: Now ACK the NoOp job
101512023-09-22T23:15:16.865ZINFOcrucible: Finally, move the ReOpen job forward
101522023-09-22T23:15:16.865ZINFOcrucible: Now ACK the Reopen job
101532023-09-22T23:15:16.865ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
101542023-09-22T23:15:16.865ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
101552023-09-22T23:15:16.865ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
101562023-09-22T23:15:16.865ZWARNcrucible: RE:0 Bailing with error
101572023-09-22T23:15:16.865ZINFOcrucible: Crucible stats registered with UUID: 01994c6f-c2d5-493f-a8f0-5367919f7ab3
101582023-09-22T23:15:16.865ZINFOcrucible: Crucible 01994c6f-c2d5-493f-a8f0-5367919f7ab3 has session id: 547be00f-e88f-4516-bfaa-248097069cea
101592023-09-22T23:15:16.865ZINFOcrucible: [0] 01994c6f-c2d5-493f-a8f0-5367919f7ab3 (7327196b-8480-4c3d-8d38-567327aabf7f) New New New ds_transition to WaitActive
101602023-09-22T23:15:16.865ZINFOcrucible: [0] Transition from New to WaitActive
101612023-09-22T23:15:16.865ZINFOcrucible: [0] 01994c6f-c2d5-493f-a8f0-5367919f7ab3 (7327196b-8480-4c3d-8d38-567327aabf7f) WaitActive New New ds_transition to WaitQuorum
101622023-09-22T23:15:16.865ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
101632023-09-22T23:15:16.865ZINFOcrucible: [0] 01994c6f-c2d5-493f-a8f0-5367919f7ab3 (7327196b-8480-4c3d-8d38-567327aabf7f) WaitQuorum New New ds_transition to Active
101642023-09-22T23:15:16.865ZINFOcrucible: [0] Transition from WaitQuorum to Active
101652023-09-22T23:15:16.865ZINFOcrucible: [1] 01994c6f-c2d5-493f-a8f0-5367919f7ab3 (7327196b-8480-4c3d-8d38-567327aabf7f) Active New New ds_transition to WaitActive
101662023-09-22T23:15:16.865ZINFOcrucible: [1] Transition from New to WaitActive
101672023-09-22T23:15:16.865ZINFOcrucible: [1] 01994c6f-c2d5-493f-a8f0-5367919f7ab3 (7327196b-8480-4c3d-8d38-567327aabf7f) Active WaitActive New ds_transition to WaitQuorum
101682023-09-22T23:15:16.865ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
101692023-09-22T23:15:16.865ZINFOcrucible: [1] 01994c6f-c2d5-493f-a8f0-5367919f7ab3 (7327196b-8480-4c3d-8d38-567327aabf7f) Active WaitQuorum New ds_transition to Active
101702023-09-22T23:15:16.865ZINFOcrucible: [1] Transition from WaitQuorum to Active
101712023-09-22T23:15:16.865ZINFOcrucible: [2] 01994c6f-c2d5-493f-a8f0-5367919f7ab3 (7327196b-8480-4c3d-8d38-567327aabf7f) Active Active New ds_transition to WaitActive
101722023-09-22T23:15:16.865ZINFOcrucible: [2] Transition from New to WaitActive
101732023-09-22T23:15:16.865ZINFOcrucible: [2] 01994c6f-c2d5-493f-a8f0-5367919f7ab3 (7327196b-8480-4c3d-8d38-567327aabf7f) Active Active WaitActive ds_transition to WaitQuorum
101742023-09-22T23:15:16.866ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
101752023-09-22T23:15:16.866ZINFOcrucible: [2] 01994c6f-c2d5-493f-a8f0-5367919f7ab3 (7327196b-8480-4c3d-8d38-567327aabf7f) Active Active WaitQuorum ds_transition to Active
101762023-09-22T23:15:16.866ZINFOcrucible: [2] Transition from WaitQuorum to Active
101772023-09-22T23:15:16.866ZINFOcrucible: 01994c6f-c2d5-493f-a8f0-5367919f7ab3 is now active with session: 7327196b-8480-4c3d-8d38-567327aabf7f
101782023-09-22T23:15:16.866ZINFOcrucible: [1] 01994c6f-c2d5-493f-a8f0-5367919f7ab3 (7327196b-8480-4c3d-8d38-567327aabf7f) Active Active Active ds_transition to Faulted
101792023-09-22T23:15:16.866ZINFOcrucible: [1] Transition from Active to Faulted
101802023-09-22T23:15:16.866ZINFOcrucible: [1] 01994c6f-c2d5-493f-a8f0-5367919f7ab3 (7327196b-8480-4c3d-8d38-567327aabf7f) Active Faulted Active ds_transition to LiveRepairReady
101812023-09-22T23:15:16.866ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
101822023-09-22T23:15:16.866ZINFOcrucible: [1] 01994c6f-c2d5-493f-a8f0-5367919f7ab3 (7327196b-8480-4c3d-8d38-567327aabf7f) Active LiveRepairReady Active ds_transition to LiveRepair
101832023-09-22T23:15:16.866ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
101842023-09-22T23:15:16.866ZINFOcrucible: Waiting for Close + ReOpen jobs
101852023-09-22T23:15:16.866ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
101862023-09-22T23:15:16.866ZINFOcrucible: RE:0 close id:1000 queued, notify DS
101872023-09-22T23:15:16.866ZINFOcrucible: RE:0 Wait for result from close command 1000:1
101882023-09-22T23:15:17.738ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(0), repair_downstairs: [ClientId(2)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
101892023-09-22T23:15:17.738ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(0), repair_downstairs: [ClientId(2)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
101902023-09-22T23:15:17.738ZINFOcrucible: [0] client skip 2 in process jobs because fault = downstairs
101912023-09-22T23:15:17.738ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
101922023-09-22T23:15:17.738ZINFOcrucible: [0] 8aebef52-94cd-4f20-8828-2b4a72214191 (217e8b67-8753-4bb1-be70-961d7790fe1e) Active Active LiveRepair ds_transition to Faulted
101932023-09-22T23:15:17.738ZINFOcrucible: [0] Transition from Active to Faulted
101942023-09-22T23:15:17.738ZINFOcrucible: Now ACK the close job
101952023-09-22T23:15:17.738ZINFOcrucible: Waiting for 3 jobs (currently 2)
101962023-09-22T23:15:17.738ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
101972023-09-22T23:15:17.738ZINFOcrucible: [2] client skip 2 in process jobs because fault = downstairs
101982023-09-22T23:15:17.738ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
101992023-09-22T23:15:17.738ZINFOcrucible: [2] 8aebef52-94cd-4f20-8828-2b4a72214191 (217e8b67-8753-4bb1-be70-961d7790fe1e) Faulted Active LiveRepair ds_transition to Faulted
102002023-09-22T23:15:17.738ZINFOcrucible: [2] Transition from LiveRepair to Faulted
102012023-09-22T23:15:17.738ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
102022023-09-22T23:15:17.740ZINFOcrucible: Waiting for 3 jobs (currently 2)
102032023-09-22T23:15:17.740ZINFOcrucible: No repair needed for extent 0 = downstairs
102042023-09-22T23:15:17.740ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
102052023-09-22T23:15:17.867ZINFOcrucible: Now ACK the close job
102062023-09-22T23:15:17.867ZINFOcrucible: Waiting for 3 jobs (currently 2)
102072023-09-22T23:15:17.867ZINFOcrucible: No repair needed for extent 0 = downstairs
102082023-09-22T23:15:17.867ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
102092023-09-22T23:15:18.581ZINFOcrucible: responded to ping downstairs = 1
102102023-09-22T23:15:18.583ZINFOcrucible: responded to ping downstairs = 1
102112023-09-22T23:15:18.732ZINFOcrucible: Finally, move the ReOpen job forward
102122023-09-22T23:15:18.732ZINFOcrucible: Now ACK the reopen job
102132023-09-22T23:15:18.732ZWARNcrucible: RE:0 Bailing with error
102142023-09-22T23:15:18.732ZINFOcrucible: err:0 or:1
102152023-09-22T23:15:18.732ZINFOcrucible: Crucible stats registered with UUID: cb3ee61f-889b-4ca8-a683-5edf45aa3b9f
102162023-09-22T23:15:18.732ZINFOcrucible: Crucible cb3ee61f-889b-4ca8-a683-5edf45aa3b9f has session id: 92cb7af1-472f-4178-9c2c-4b85812af232
102172023-09-22T23:15:18.732ZINFOcrucible: [0] cb3ee61f-889b-4ca8-a683-5edf45aa3b9f (15d39748-d69b-4979-93e2-ab7205c3ae88) New New New ds_transition to WaitActive
102182023-09-22T23:15:18.732ZINFOcrucible: [0] Transition from New to WaitActive
102192023-09-22T23:15:18.732ZINFOcrucible: [0] cb3ee61f-889b-4ca8-a683-5edf45aa3b9f (15d39748-d69b-4979-93e2-ab7205c3ae88) WaitActive New New ds_transition to WaitQuorum
102202023-09-22T23:15:18.732ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
102212023-09-22T23:15:18.732ZINFOcrucible: [0] cb3ee61f-889b-4ca8-a683-5edf45aa3b9f (15d39748-d69b-4979-93e2-ab7205c3ae88) WaitQuorum New New ds_transition to Active
102222023-09-22T23:15:18.733ZINFOcrucible: [0] Transition from WaitQuorum to Active
102232023-09-22T23:15:18.733ZINFOcrucible: [1] cb3ee61f-889b-4ca8-a683-5edf45aa3b9f (15d39748-d69b-4979-93e2-ab7205c3ae88) Active New New ds_transition to WaitActive
102242023-09-22T23:15:18.733ZINFOcrucible: [1] Transition from New to WaitActive
102252023-09-22T23:15:18.733ZINFOcrucible: [1] cb3ee61f-889b-4ca8-a683-5edf45aa3b9f (15d39748-d69b-4979-93e2-ab7205c3ae88) Active WaitActive New ds_transition to WaitQuorum
102262023-09-22T23:15:18.733ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
102272023-09-22T23:15:18.733ZINFOcrucible: [1] cb3ee61f-889b-4ca8-a683-5edf45aa3b9f (15d39748-d69b-4979-93e2-ab7205c3ae88) Active WaitQuorum New ds_transition to Active
102282023-09-22T23:15:18.733ZINFOcrucible: [1] Transition from WaitQuorum to Active
102292023-09-22T23:15:18.733ZINFOcrucible: [2] cb3ee61f-889b-4ca8-a683-5edf45aa3b9f (15d39748-d69b-4979-93e2-ab7205c3ae88) Active Active New ds_transition to WaitActive
102302023-09-22T23:15:18.733ZINFOcrucible: [2] Transition from New to WaitActive
102312023-09-22T23:15:18.733ZINFOcrucible: [2] cb3ee61f-889b-4ca8-a683-5edf45aa3b9f (15d39748-d69b-4979-93e2-ab7205c3ae88) Active Active WaitActive ds_transition to WaitQuorum
102322023-09-22T23:15:18.733ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
102332023-09-22T23:15:18.733ZINFOcrucible: [2] cb3ee61f-889b-4ca8-a683-5edf45aa3b9f (15d39748-d69b-4979-93e2-ab7205c3ae88) Active Active WaitQuorum ds_transition to Active
102342023-09-22T23:15:18.733ZINFOcrucible: [2] Transition from WaitQuorum to Active
102352023-09-22T23:15:18.733ZINFOcrucible: cb3ee61f-889b-4ca8-a683-5edf45aa3b9f is now active with session: 15d39748-d69b-4979-93e2-ab7205c3ae88
102362023-09-22T23:15:18.733ZINFOcrucible: [1] cb3ee61f-889b-4ca8-a683-5edf45aa3b9f (15d39748-d69b-4979-93e2-ab7205c3ae88) Active Active Active ds_transition to Faulted
102372023-09-22T23:15:18.733ZINFOcrucible: [1] Transition from Active to Faulted
102382023-09-22T23:15:18.733ZINFOcrucible: [1] cb3ee61f-889b-4ca8-a683-5edf45aa3b9f (15d39748-d69b-4979-93e2-ab7205c3ae88) Active Faulted Active ds_transition to LiveRepairReady
102392023-09-22T23:15:18.733ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
102402023-09-22T23:15:18.733ZINFOcrucible: [1] cb3ee61f-889b-4ca8-a683-5edf45aa3b9f (15d39748-d69b-4979-93e2-ab7205c3ae88) Active LiveRepairReady Active ds_transition to LiveRepair
102412023-09-22T23:15:18.733ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
102422023-09-22T23:15:18.733ZINFOcrucible: Waiting for Close + ReOpen jobs
102432023-09-22T23:15:18.733ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
102442023-09-22T23:15:18.733ZINFOcrucible: RE:0 close id:1000 queued, notify DS
102452023-09-22T23:15:18.733ZINFOcrucible: RE:0 Wait for result from close command 1000:1
102462023-09-22T23:15:18.739ZINFOcrucible: Waiting for 4 jobs (currently 3)
102472023-09-22T23:15:18.739ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
102482023-09-22T23:15:18.739ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
102492023-09-22T23:15:18.741ZINFOcrucible: Waiting for 4 jobs (currently 3)
102502023-09-22T23:15:18.741ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
102512023-09-22T23:15:18.868ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
102522023-09-22T23:15:18.868ZERROcrucible: [2] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
102532023-09-22T23:15:18.868ZINFOcrucible: [2] client skip 3 in process jobs because fault = downstairs
102542023-09-22T23:15:18.868ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
102552023-09-22T23:15:18.868ZINFOcrucible: [2] 01994c6f-c2d5-493f-a8f0-5367919f7ab3 (7327196b-8480-4c3d-8d38-567327aabf7f) Active LiveRepair Active ds_transition to Faulted
102562023-09-22T23:15:18.868ZINFOcrucible: [2] Transition from Active to Faulted
102572023-09-22T23:15:18.868ZINFOcrucible: Waiting for 4 jobs (currently 3)
102582023-09-22T23:15:18.868ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
102592023-09-22T23:15:18.868ZINFOcrucible: [1] client skip 3 in process jobs because fault = downstairs
102602023-09-22T23:15:18.868ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
102612023-09-22T23:15:18.868ZINFOcrucible: [1] 01994c6f-c2d5-493f-a8f0-5367919f7ab3 (7327196b-8480-4c3d-8d38-567327aabf7f) Active LiveRepair Faulted ds_transition to Faulted
102622023-09-22T23:15:18.869ZINFOcrucible: [1] Transition from LiveRepair to Faulted
102632023-09-22T23:15:18.869ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
102642023-09-22T23:15:19.733ZINFOcrucible: Waiting for 3 jobs (currently 2)
102652023-09-22T23:15:19.733ZINFOcrucible: No repair needed for extent 0 = downstairs
102662023-09-22T23:15:19.733ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
102672023-09-22T23:15:19.741ZINFOcrucible: Now move the NoOp job forward
102682023-09-22T23:15:19.741ZINFOcrucible: Now ACK the NoOp job
102692023-09-22T23:15:19.741ZINFOcrucible: Finally, move the ReOpen job forward
102702023-09-22T23:15:19.741ZINFOcrucible: Now ACK the Reopen job
102712023-09-22T23:15:19.741ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
102722023-09-22T23:15:19.741ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
102732023-09-22T23:15:19.741ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
102742023-09-22T23:15:19.741ZWARNcrucible: RE:0 Bailing with error
10275 ----------------------------------------------------------------
10276 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
10277 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
10278 1 Acked 1000 FClose 0 Err Done Done false
10279 2 Acked 1001 NoOp 0 Skip Done Skip false
10280 3 Acked 1002 NoOp 0 Skip Done Skip false
10281 4 Acked 1003 Reopen 0 Skip Done Skip false
10282 STATES DS:0 DS:1 DS:2 TOTAL
10283 New 0 0 0 0
10284 Sent 0 0 0 0
10285 Done 0 4 1 5
10286 Skipped 3 0 3 6
10287 Error 1 0 0 1
10288 Last Flush: 0 0 0
10289 Downstairs last five completed:
10290 Upstairs last five completed: 4 3 2 1
102912023-09-22T23:15:19.742ZINFOcrucible: Crucible stats registered with UUID: 8192eba5-97c6-48ab-8071-94d8c33e981b
102922023-09-22T23:15:19.742ZINFOcrucible: Crucible 8192eba5-97c6-48ab-8071-94d8c33e981b has session id: db40d87f-3f0f-4425-b8f0-ae0a1d863219
102932023-09-22T23:15:19.742ZINFOcrucible: [0] 8192eba5-97c6-48ab-8071-94d8c33e981b (36b7cbe2-43d9-4803-b4a9-182785cca042) New New New ds_transition to WaitActive
102942023-09-22T23:15:19.742ZINFOcrucible: [0] Transition from New to WaitActive
102952023-09-22T23:15:19.742ZINFOcrucible: [0] 8192eba5-97c6-48ab-8071-94d8c33e981b (36b7cbe2-43d9-4803-b4a9-182785cca042) WaitActive New New ds_transition to WaitQuorum
102962023-09-22T23:15:19.742ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
102972023-09-22T23:15:19.742ZINFOcrucible: [0] 8192eba5-97c6-48ab-8071-94d8c33e981b (36b7cbe2-43d9-4803-b4a9-182785cca042) WaitQuorum New New ds_transition to Active
102982023-09-22T23:15:19.742ZINFOcrucible: [0] Transition from WaitQuorum to Active
102992023-09-22T23:15:19.742ZINFOcrucible: [1] 8192eba5-97c6-48ab-8071-94d8c33e981b (36b7cbe2-43d9-4803-b4a9-182785cca042) Active New New ds_transition to WaitActive
103002023-09-22T23:15:19.742ZINFOcrucible: [1] Transition from New to WaitActive
103012023-09-22T23:15:19.742ZINFOcrucible: [1] 8192eba5-97c6-48ab-8071-94d8c33e981b (36b7cbe2-43d9-4803-b4a9-182785cca042) Active WaitActive New ds_transition to WaitQuorum
103022023-09-22T23:15:19.742ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
103032023-09-22T23:15:19.742ZINFOcrucible: [1] 8192eba5-97c6-48ab-8071-94d8c33e981b (36b7cbe2-43d9-4803-b4a9-182785cca042) Active WaitQuorum New ds_transition to Active
103042023-09-22T23:15:19.742ZINFOcrucible: [1] Transition from WaitQuorum to Active
103052023-09-22T23:15:19.742ZINFOcrucible: [2] 8192eba5-97c6-48ab-8071-94d8c33e981b (36b7cbe2-43d9-4803-b4a9-182785cca042) Active Active New ds_transition to WaitActive
103062023-09-22T23:15:19.742ZINFOcrucible: [2] Transition from New to WaitActive
103072023-09-22T23:15:19.742ZINFOcrucible: [2] 8192eba5-97c6-48ab-8071-94d8c33e981b (36b7cbe2-43d9-4803-b4a9-182785cca042) Active Active WaitActive ds_transition to WaitQuorum
103082023-09-22T23:15:19.742ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
103092023-09-22T23:15:19.742ZINFOcrucible: [2] 8192eba5-97c6-48ab-8071-94d8c33e981b (36b7cbe2-43d9-4803-b4a9-182785cca042) Active Active WaitQuorum ds_transition to Active
103102023-09-22T23:15:19.742ZINFOcrucible: [2] Transition from WaitQuorum to Active
103112023-09-22T23:15:19.742ZINFOcrucible: 8192eba5-97c6-48ab-8071-94d8c33e981b is now active with session: 36b7cbe2-43d9-4803-b4a9-182785cca042
103122023-09-22T23:15:19.742ZINFOcrucible: [2] 8192eba5-97c6-48ab-8071-94d8c33e981b (36b7cbe2-43d9-4803-b4a9-182785cca042) Active Active Active ds_transition to Faulted
103132023-09-22T23:15:19.742ZINFOcrucible: [2] Transition from Active to Faulted
103142023-09-22T23:15:19.742ZINFOcrucible: [2] 8192eba5-97c6-48ab-8071-94d8c33e981b (36b7cbe2-43d9-4803-b4a9-182785cca042) Active Active Faulted ds_transition to LiveRepairReady
103152023-09-22T23:15:19.742ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
103162023-09-22T23:15:19.742ZINFOcrucible: [2] 8192eba5-97c6-48ab-8071-94d8c33e981b (36b7cbe2-43d9-4803-b4a9-182785cca042) Active Active LiveRepairReady ds_transition to LiveRepair
103172023-09-22T23:15:19.742ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
103182023-09-22T23:15:19.742ZINFOcrucible: Waiting for Close + ReOpen jobs
103192023-09-22T23:15:19.742ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
103202023-09-22T23:15:19.742ZINFOcrucible: RE:0 close id:1000 queued, notify DS
103212023-09-22T23:15:19.742ZINFOcrucible: RE:0 Wait for result from close command 1000:1
103222023-09-22T23:15:19.743ZINFOcrucible: Now move the NoOp job forward
103232023-09-22T23:15:19.743ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
103242023-09-22T23:15:19.743ZERROcrucible: [0] Reports error GenericError("bad") on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
103252023-09-22T23:15:19.743ZINFOcrucible: [0] client skip 4 in process jobs because fault = downstairs
103262023-09-22T23:15:19.743ZINFOcrucible: [0] changed 0 jobs to fault skipped = downstairs
103272023-09-22T23:15:19.743ZINFOcrucible: [0] 889fe5fc-0632-4e64-8f28-45e2de5accae (0cc985d8-8a6a-47f7-b1fd-cb9776e7451b) Active Active LiveRepair ds_transition to Faulted
103282023-09-22T23:15:19.743ZINFOcrucible: [0] Transition from Active to Faulted
103292023-09-22T23:15:19.743ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
103302023-09-22T23:15:19.743ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
103312023-09-22T23:15:19.743ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
103322023-09-22T23:15:19.743ZINFOcrucible: [2] changed 0 jobs to fault skipped = downstairs
103332023-09-22T23:15:19.743ZINFOcrucible: [2] 889fe5fc-0632-4e64-8f28-45e2de5accae (0cc985d8-8a6a-47f7-b1fd-cb9776e7451b) Faulted Active LiveRepair ds_transition to Faulted
103342023-09-22T23:15:19.743ZINFOcrucible: [2] Transition from LiveRepair to Faulted
103352023-09-22T23:15:19.743ZWARNcrucible: RE:0 Bailing with error
103362023-09-22T23:15:19.743ZINFOcrucible: Crucible stats registered with UUID: 1be45e4a-5a68-4e53-8c36-ebf527273e3d
103372023-09-22T23:15:19.743ZINFOcrucible: Crucible 1be45e4a-5a68-4e53-8c36-ebf527273e3d has session id: 34f73e1e-9954-45b5-9b2a-d076dda2cd1d
103382023-09-22T23:15:19.743ZINFOcrucible: [0] 1be45e4a-5a68-4e53-8c36-ebf527273e3d (1cc0e6a6-7f15-4d27-989e-892a7dc5f1be) New New New ds_transition to WaitActive
103392023-09-22T23:15:19.744ZINFOcrucible: [0] Transition from New to WaitActive
103402023-09-22T23:15:19.744ZINFOcrucible: [0] 1be45e4a-5a68-4e53-8c36-ebf527273e3d (1cc0e6a6-7f15-4d27-989e-892a7dc5f1be) WaitActive New New ds_transition to WaitQuorum
103412023-09-22T23:15:19.744ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
103422023-09-22T23:15:19.744ZINFOcrucible: [0] 1be45e4a-5a68-4e53-8c36-ebf527273e3d (1cc0e6a6-7f15-4d27-989e-892a7dc5f1be) WaitQuorum New New ds_transition to Active
103432023-09-22T23:15:19.744ZINFOcrucible: [0] Transition from WaitQuorum to Active
103442023-09-22T23:15:19.744ZINFOcrucible: [1] 1be45e4a-5a68-4e53-8c36-ebf527273e3d (1cc0e6a6-7f15-4d27-989e-892a7dc5f1be) Active New New ds_transition to WaitActive
103452023-09-22T23:15:19.744ZINFOcrucible: [1] Transition from New to WaitActive
103462023-09-22T23:15:19.744ZINFOcrucible: [1] 1be45e4a-5a68-4e53-8c36-ebf527273e3d (1cc0e6a6-7f15-4d27-989e-892a7dc5f1be) Active WaitActive New ds_transition to WaitQuorum
103472023-09-22T23:15:19.744ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
103482023-09-22T23:15:19.744ZINFOcrucible: [1] 1be45e4a-5a68-4e53-8c36-ebf527273e3d (1cc0e6a6-7f15-4d27-989e-892a7dc5f1be) Active WaitQuorum New ds_transition to Active
103492023-09-22T23:15:19.744ZINFOcrucible: [1] Transition from WaitQuorum to Active
103502023-09-22T23:15:19.744ZINFOcrucible: [2] 1be45e4a-5a68-4e53-8c36-ebf527273e3d (1cc0e6a6-7f15-4d27-989e-892a7dc5f1be) Active Active New ds_transition to WaitActive
103512023-09-22T23:15:19.744ZINFOcrucible: [2] Transition from New to WaitActive
103522023-09-22T23:15:19.744ZINFOcrucible: [2] 1be45e4a-5a68-4e53-8c36-ebf527273e3d (1cc0e6a6-7f15-4d27-989e-892a7dc5f1be) Active Active WaitActive ds_transition to WaitQuorum
103532023-09-22T23:15:19.744ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
103542023-09-22T23:15:19.744ZINFOcrucible: [2] 1be45e4a-5a68-4e53-8c36-ebf527273e3d (1cc0e6a6-7f15-4d27-989e-892a7dc5f1be) Active Active WaitQuorum ds_transition to Active
103552023-09-22T23:15:19.744ZINFOcrucible: [2] Transition from WaitQuorum to Active
103562023-09-22T23:15:19.744ZINFOcrucible: 1be45e4a-5a68-4e53-8c36-ebf527273e3d is now active with session: 1cc0e6a6-7f15-4d27-989e-892a7dc5f1be
103572023-09-22T23:15:19.744ZINFOcrucible: [2] 1be45e4a-5a68-4e53-8c36-ebf527273e3d (1cc0e6a6-7f15-4d27-989e-892a7dc5f1be) Active Active Active ds_transition to Faulted
103582023-09-22T23:15:19.744ZINFOcrucible: [2] Transition from Active to Faulted
103592023-09-22T23:15:19.744ZINFOcrucible: [2] 1be45e4a-5a68-4e53-8c36-ebf527273e3d (1cc0e6a6-7f15-4d27-989e-892a7dc5f1be) Active Active Faulted ds_transition to LiveRepairReady
103602023-09-22T23:15:19.744ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
103612023-09-22T23:15:19.744ZINFOcrucible: [2] 1be45e4a-5a68-4e53-8c36-ebf527273e3d (1cc0e6a6-7f15-4d27-989e-892a7dc5f1be) Active Active LiveRepairReady ds_transition to LiveRepair
103622023-09-22T23:15:19.744ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
103632023-09-22T23:15:19.744ZINFOcrucible: Waiting for Close + ReOpen jobs
103642023-09-22T23:15:19.744ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
103652023-09-22T23:15:19.744ZINFOcrucible: RE:0 close id:1000 queued, notify DS
103662023-09-22T23:15:19.744ZINFOcrucible: RE:0 Wait for result from close command 1000:1
103672023-09-22T23:15:19.870ZINFOcrucible: Now move the NoOp job forward
103682023-09-22T23:15:19.870ZINFOcrucible: Now ACK the NoOp job
103692023-09-22T23:15:19.870ZINFOcrucible: Finally, move the ReOpen job forward
103702023-09-22T23:15:19.870ZINFOcrucible: Now ACK the Reopen job
103712023-09-22T23:15:19.870ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
103722023-09-22T23:15:19.870ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
103732023-09-22T23:15:19.870ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
103742023-09-22T23:15:19.870ZWARNcrucible: RE:0 Bailing with error
103752023-09-22T23:15:19.870ZINFOcrucible: Crucible stats registered with UUID: cf578269-121b-47cb-9294-332f82a54c8d
103762023-09-22T23:15:19.870ZINFOcrucible: Crucible cf578269-121b-47cb-9294-332f82a54c8d has session id: e7759d74-a4ba-467d-a028-464f8d9cc295
103772023-09-22T23:15:19.870ZINFOcrucible: [0] cf578269-121b-47cb-9294-332f82a54c8d (b7ea8e7a-8c29-4b17-900a-18bdd32b26b0) New New New ds_transition to WaitActive
103782023-09-22T23:15:19.870ZINFOcrucible: [0] Transition from New to WaitActive
103792023-09-22T23:15:19.870ZINFOcrucible: [0] cf578269-121b-47cb-9294-332f82a54c8d (b7ea8e7a-8c29-4b17-900a-18bdd32b26b0) WaitActive New New ds_transition to WaitQuorum
103802023-09-22T23:15:19.870ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
103812023-09-22T23:15:19.870ZINFOcrucible: [0] cf578269-121b-47cb-9294-332f82a54c8d (b7ea8e7a-8c29-4b17-900a-18bdd32b26b0) WaitQuorum New New ds_transition to Active
103822023-09-22T23:15:19.870ZINFOcrucible: [0] Transition from WaitQuorum to Active
103832023-09-22T23:15:19.871ZINFOcrucible: [1] cf578269-121b-47cb-9294-332f82a54c8d (b7ea8e7a-8c29-4b17-900a-18bdd32b26b0) Active New New ds_transition to WaitActive
103842023-09-22T23:15:19.871ZINFOcrucible: [1] Transition from New to WaitActive
103852023-09-22T23:15:19.871ZINFOcrucible: [1] cf578269-121b-47cb-9294-332f82a54c8d (b7ea8e7a-8c29-4b17-900a-18bdd32b26b0) Active WaitActive New ds_transition to WaitQuorum
103862023-09-22T23:15:19.871ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
103872023-09-22T23:15:19.871ZINFOcrucible: [1] cf578269-121b-47cb-9294-332f82a54c8d (b7ea8e7a-8c29-4b17-900a-18bdd32b26b0) Active WaitQuorum New ds_transition to Active
103882023-09-22T23:15:19.871ZINFOcrucible: [1] Transition from WaitQuorum to Active
103892023-09-22T23:15:19.871ZINFOcrucible: [2] cf578269-121b-47cb-9294-332f82a54c8d (b7ea8e7a-8c29-4b17-900a-18bdd32b26b0) Active Active New ds_transition to WaitActive
103902023-09-22T23:15:19.871ZINFOcrucible: [2] Transition from New to WaitActive
103912023-09-22T23:15:19.871ZINFOcrucible: [2] cf578269-121b-47cb-9294-332f82a54c8d (b7ea8e7a-8c29-4b17-900a-18bdd32b26b0) Active Active WaitActive ds_transition to WaitQuorum
103922023-09-22T23:15:19.871ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
103932023-09-22T23:15:19.871ZINFOcrucible: [2] cf578269-121b-47cb-9294-332f82a54c8d (b7ea8e7a-8c29-4b17-900a-18bdd32b26b0) Active Active WaitQuorum ds_transition to Active
103942023-09-22T23:15:19.871ZINFOcrucible: [2] Transition from WaitQuorum to Active
103952023-09-22T23:15:19.871ZINFOcrucible: cf578269-121b-47cb-9294-332f82a54c8d is now active with session: b7ea8e7a-8c29-4b17-900a-18bdd32b26b0
103962023-09-22T23:15:19.871ZINFOcrucible: [2] cf578269-121b-47cb-9294-332f82a54c8d (b7ea8e7a-8c29-4b17-900a-18bdd32b26b0) Active Active Active ds_transition to Faulted
103972023-09-22T23:15:19.871ZINFOcrucible: [2] Transition from Active to Faulted
103982023-09-22T23:15:19.871ZINFOcrucible: [2] cf578269-121b-47cb-9294-332f82a54c8d (b7ea8e7a-8c29-4b17-900a-18bdd32b26b0) Active Active Faulted ds_transition to LiveRepairReady
103992023-09-22T23:15:19.871ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
104002023-09-22T23:15:19.871ZINFOcrucible: [2] cf578269-121b-47cb-9294-332f82a54c8d (b7ea8e7a-8c29-4b17-900a-18bdd32b26b0) Active Active LiveRepairReady ds_transition to LiveRepair
104012023-09-22T23:15:19.871ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
104022023-09-22T23:15:19.871ZINFOcrucible: Waiting for Close + ReOpen jobs
104032023-09-22T23:15:19.871ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
104042023-09-22T23:15:19.871ZINFOcrucible: RE:0 close id:1000 queued, notify DS
104052023-09-22T23:15:19.871ZINFOcrucible: RE:0 Wait for result from close command 1000:1
104062023-09-22T23:15:20.734ZINFOcrucible: Waiting for 4 jobs (currently 3)
104072023-09-22T23:15:20.734ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
104082023-09-22T23:15:20.743ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(0), repair_downstairs: [ClientId(2)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
104092023-09-22T23:15:20.743ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(0), repair_downstairs: [ClientId(2)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
104102023-09-22T23:15:20.743ZINFOcrucible: [1] client skip 2 in process jobs because fault = downstairs
104112023-09-22T23:15:20.743ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
104122023-09-22T23:15:20.743ZINFOcrucible: [1] 8192eba5-97c6-48ab-8071-94d8c33e981b (36b7cbe2-43d9-4803-b4a9-182785cca042) Active Active LiveRepair ds_transition to Faulted
104132023-09-22T23:15:20.743ZINFOcrucible: [1] Transition from Active to Faulted
104142023-09-22T23:15:20.743ZINFOcrucible: Now ACK the close job
104152023-09-22T23:15:20.743ZINFOcrucible: Waiting for 3 jobs (currently 2)
104162023-09-22T23:15:20.743ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
104172023-09-22T23:15:20.743ZINFOcrucible: [2] client skip 2 in process jobs because fault = downstairs
104182023-09-22T23:15:20.743ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
104192023-09-22T23:15:20.743ZINFOcrucible: [2] 8192eba5-97c6-48ab-8071-94d8c33e981b (36b7cbe2-43d9-4803-b4a9-182785cca042) Active Faulted LiveRepair ds_transition to Faulted
104202023-09-22T23:15:20.743ZINFOcrucible: [2] Transition from LiveRepair to Faulted
104212023-09-22T23:15:20.743ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
104222023-09-22T23:15:20.744ZINFOcrucible: Waiting for 3 jobs (currently 2)
104232023-09-22T23:15:20.744ZINFOcrucible: No repair needed for extent 0 = downstairs
104242023-09-22T23:15:20.744ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
104252023-09-22T23:15:20.871ZINFOcrucible: Now ACK the close job
104262023-09-22T23:15:20.871ZINFOcrucible: Waiting for 3 jobs (currently 2)
104272023-09-22T23:15:20.871ZINFOcrucible: No repair needed for extent 0 = downstairs
104282023-09-22T23:15:20.871ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
104292023-09-22T23:15:21.735ZINFOcrucible: Now move the NoOp job forward
104302023-09-22T23:15:21.735ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
104312023-09-22T23:15:21.735ZERROcrucible: [1] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
104322023-09-22T23:15:21.735ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
104332023-09-22T23:15:21.735ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
104342023-09-22T23:15:21.735ZINFOcrucible: [1] cb3ee61f-889b-4ca8-a683-5edf45aa3b9f (15d39748-d69b-4979-93e2-ab7205c3ae88) Active LiveRepair Active ds_transition to Faulted
104352023-09-22T23:15:21.735ZINFOcrucible: [1] Transition from LiveRepair to Faulted
104362023-09-22T23:15:21.735ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
104372023-09-22T23:15:21.735ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
104382023-09-22T23:15:21.745ZINFOcrucible: Waiting for 4 jobs (currently 3)
104392023-09-22T23:15:21.745ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
104402023-09-22T23:15:21.745ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
104412023-09-22T23:15:21.745ZINFOcrucible: Waiting for 4 jobs (currently 3)
104422023-09-22T23:15:21.745ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
104432023-09-22T23:15:21.872ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
104442023-09-22T23:15:21.872ZERROcrucible: [0] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
104452023-09-22T23:15:21.872ZINFOcrucible: [0] client skip 3 in process jobs because fault = downstairs
104462023-09-22T23:15:21.872ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
104472023-09-22T23:15:21.872ZINFOcrucible: [0] cf578269-121b-47cb-9294-332f82a54c8d (b7ea8e7a-8c29-4b17-900a-18bdd32b26b0) Active Active LiveRepair ds_transition to Faulted
104482023-09-22T23:15:21.872ZINFOcrucible: [0] Transition from Active to Faulted
104492023-09-22T23:15:21.873ZINFOcrucible: Waiting for 4 jobs (currently 3)
104502023-09-22T23:15:21.873ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
104512023-09-22T23:15:21.873ZINFOcrucible: [2] client skip 3 in process jobs because fault = downstairs
104522023-09-22T23:15:21.873ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
104532023-09-22T23:15:21.873ZINFOcrucible: [2] cf578269-121b-47cb-9294-332f82a54c8d (b7ea8e7a-8c29-4b17-900a-18bdd32b26b0) Faulted Active LiveRepair ds_transition to Faulted
104542023-09-22T23:15:21.873ZINFOcrucible: [2] Transition from LiveRepair to Faulted
104552023-09-22T23:15:21.873ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
104562023-09-22T23:15:22.746ZINFOcrucible: Now move the NoOp job forward
104572023-09-22T23:15:22.746ZINFOcrucible: Now ACK the NoOp job
104582023-09-22T23:15:22.746ZINFOcrucible: Finally, move the ReOpen job forward
104592023-09-22T23:15:22.746ZINFOcrucible: Now ACK the Reopen job
104602023-09-22T23:15:22.746ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
104612023-09-22T23:15:22.746ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
104622023-09-22T23:15:22.746ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
104632023-09-22T23:15:22.746ZWARNcrucible: RE:0 Bailing with error
10464 ----------------------------------------------------------------
10465 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
10466 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
10467 1 Acked 1000 FClose 0 Done Err Done false
10468 2 Acked 1001 NoOp 0 Done Skip Skip false
10469 3 Acked 1002 NoOp 0 Done Skip Skip false
10470 4 Acked 1003 Reopen 0 Done Skip Skip false
10471 STATES DS:0 DS:1 DS:2 TOTAL
10472 New 0 0 0 0
10473 Sent 0 0 0 0
10474 Done 4 0 1 5
10475 Skipped 0 3 3 6
10476 Error 0 1 0 1
10477 Last Flush: 0 0 0
10478 Downstairs last five completed:
10479 Upstairs last five completed: 4 3 2 1
104802023-09-22T23:15:22.747ZINFOcrucible: Now move the NoOp job forward
10481 {"msg":"[1] DS Reports error Err(GenericError(\"bad\"{)) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }"","msg"v":":0,"name":"crucible"Crucible stats registered with UUID: 0c15e6ed-424b-4937-a4ba-572efa006555,""level":50,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:22.747239949Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"":","downstairs"time":"}
10482 2023-09-22T23:15:22.747246131Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":"4759msg":"}
10483 {"[1] Reports error GenericError(msg"\":"bad\"Crucible 0c15e6ed-424b-4937-a4ba-572efa006555 has session id: 1894bd35-ed67-4d23-a2e0-dfa3df762dc8","v":0,"name":"crucible","level":30) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }","v":0,"name":"crucible","level":50,"time":"2023-09-22T23:15:22.747325216Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
10484 ,"time":"{2023-09-22T23:15:22.747339485Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"":"downstairs"[0] 0c15e6ed-424b-4937-a4ba-572efa006555 (f68cb12b-8c72-4782-bbfd-626dd395c9fb) New New New ds_transition to WaitActive"},"
10485 v":0,"name":"crucible"{,"level":30"msg":"[1] client skip 4 in process jobs because fault","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:22.747379555Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,4759"time":"}
10486 2023-09-22T23:15:22.747389657Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid"":msg4759":","":"downstairs"[0] Transition from New to WaitActive"}
10487 ,"v":0,"name":"{crucible","level":"30msg":"[1] changed 0 jobs to fault skipped","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:22.747431858Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
10488 ,"time":"{2023-09-22T23:15:22.747439911Z",""hostnamemsg""::""ip-10-150-1-55.us-west-2.compute.internal","pid":4759,[0] 0c15e6ed-424b-4937-a4ba-572efa006555 (f68cb12b-8c72-4782-bbfd-626dd395c9fb) WaitActive New New ds_transition to WaitQuorum""":","downstairs"v":}0
10489 ,"name":"crucible","level":{30"msg":"[1] 1be45e4a-5a68-4e53-8c36-ebf527273e3d (1cc0e6a6-7f15-4d27-989e-892a7dc5f1be) Active Active LiveRepair ds_transition to Faulted","v":0,"name":"crucible","level":,30"time":"2023-09-22T23:15:22.747482859Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
10490 ,"{time":"2023-09-22T23:15:22.747497786Z""msg":","hostname":"[0] Transition from WaitActive to WaitQuorum","v"ip-10-150-1-55.us-west-2.compute.internal":,"0pid":,4759"name":"}crucible"
10491 ,"level":30{"msg":"[1] Transition from Active to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:22.747534802Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
10492 ,"time":"{2023-09-22T23:15:22.747547205Z"",msg":""hostname":"ip-10-150-1-55.us-west-2.compute.internal[0] 0c15e6ed-424b-4937-a4ba-572efa006555 (f68cb12b-8c72-4782-bbfd-626dd395c9fb) WaitQuorum New New ds_transition to Active"",",v":"0pid",":name":"4759crucible","level":}30
10493 {,"time":"2023-09-22T23:15:22.74758087Z",""hostname":"msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
10494 RE:0 Wait for result from reopen command 1003:4"{,"v""msg":":0[0] Transition from WaitQuorum to Active",","name":v":"0,crucible""name,":""crucible"level":,"30level":30,"time":"2023-09-22T23:15:22.747614884Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
10495 {,"time":""msg":"2023-09-22T23:15:22.747614634Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",[1] 0c15e6ed-424b-4937-a4ba-572efa006555 (f68cb12b-8c72-4782-bbfd-626dd395c9fb) Active New New ds_transition to WaitActive"",pid":"4759v":0,"}name":"
10496 crucible","level":30{"msg":"Extent 0 close id:1003 Failed: Error: bad","v":0,"name":"crucible",",time":""level":502023-09-22T23:15:22.74764478Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
10497 {,"time":""msg":"2023-09-22T23:15:22.747654796Z","[1] Transition from New to WaitActivehostname":"","v":0ip-10-150-1-55.us-west-2.compute.internal",",name":""crucible"pid":,"4759level"}:30
10498 {"msg":"[2] client skip 4 in process jobs because fault","v":0,","timename":"":"crucible","level2023-09-22T23:15:22.747676752Z"":30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
10499 {,"time":""msg":"2023-09-22T23:15:22.747691485Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759[1] 0c15e6ed-424b-4937-a4ba-572efa006555 (f68cb12b-8c72-4782-bbfd-626dd395c9fb) Active WaitActive New ds_transition to WaitQuorum",,""v":0",":name":""crucible"downstairs,""level":}30
10500 ,"time":"{2023-09-22T23:15:22.747713976Z",""hostnamemsg":"":"[2] changed 0 jobs to fault skipped","ip-10-150-1-55.us-west-2.compute.internal"v":,"0pid":,"4759name":}"
10501 crucible","{level":30"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:22.747741276Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",","time":"pid":47592023-09-22T23:15:22.747746116Z,""",":"hostname":downstairs""}
10502 ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
10503 {{"msg":""msg":"[2] 1be45e4a-5a68-4e53-8c36-ebf527273e3d (1cc0e6a6-7f15-4d27-989e-892a7dc5f1be) Active Faulted LiveRepair ds_transition to Faulted"[1] 0c15e6ed-424b-4937-a4ba-572efa006555 (f68cb12b-8c72-4782-bbfd-626dd395c9fb) Active WaitQuorum New ds_transition to Active",,""v":0v":0,","name":"name":"crucible"crucible",","levellevel":"30:30,"time":","time2023-09-22T23:15:22.747789594Z"",":"hostname":"2023-09-22T23:15:22.747790049Z","ip-10-150-1-55.us-west-2.compute.internal","hostname":"pid":4759}
10504 ip-10-150-1-55.us-west-2.compute.internal","pid{":4759"}msg":"
10505 [1] Transition from WaitQuorum to Active","v{":0,""msgname"":":"crucible","level":30[2] Transition from LiveRepair to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:22.747820088Z","hostname":","time"ip-10-150-1-55.us-west-2.compute.internal",":"pid":47592023-09-22T23:15:22.747823299Z}
10506 ","hostname":"{"ip-10-150-1-55.us-west-2.compute.internal"msg":","pid":4759}
10507 [2] 0c15e6ed-424b-4937-a4ba-572efa006555 (f68cb12b-8c72-4782-bbfd-626dd395c9fb) Active Active New ds_transition to WaitActive","v":0{,"name":""crucible"msg,"":"level":30RE:0 Bailing with error","v":0,"name":"crucible","level":40,"time":"2023-09-22T23:15:22.747852798Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","pid2023-09-22T23:15:22.747855979Z"",":4759hostname":}"
10508 ip-10-150-1-55.us-west-2.compute.internal","pid":{4759}
10509 "msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:22.747879635Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
105102023-09-22T23:15:22.747ZINFOcrucible: [2] 0c15e6ed-424b-4937-a4ba-572efa006555 (f68cb12b-8c72-4782-bbfd-626dd395c9fb) Active Active WaitActive ds_transition to WaitQuorum
105112023-09-22T23:15:22.747ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
105122023-09-22T23:15:22.747ZINFOcrucible: [2] 0c15e6ed-424b-4937-a4ba-572efa006555 (f68cb12b-8c72-4782-bbfd-626dd395c9fb) Active Active WaitQuorum ds_transition to Active
105132023-09-22T23:15:22.747ZINFOcrucible: [2] Transition from WaitQuorum to Active
105142023-09-22T23:15:22.748ZINFOcrucible: 0c15e6ed-424b-4937-a4ba-572efa006555 is now active with session: f68cb12b-8c72-4782-bbfd-626dd395c9fb
105152023-09-22T23:15:22.748ZINFOcrucible: [2] 0c15e6ed-424b-4937-a4ba-572efa006555 (f68cb12b-8c72-4782-bbfd-626dd395c9fb) Active Active Active ds_transition to Faulted
105162023-09-22T23:15:22.748ZINFOcrucible: [2] Transition from Active to Faulted
105172023-09-22T23:15:22.748ZINFOcrucible: [2] 0c15e6ed-424b-4937-a4ba-572efa006555 (f68cb12b-8c72-4782-bbfd-626dd395c9fb) Active Active Faulted ds_transition to LiveRepairReady
105182023-09-22T23:15:22.748ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
105192023-09-22T23:15:22.748ZINFOcrucible: [2] 0c15e6ed-424b-4937-a4ba-572efa006555 (f68cb12b-8c72-4782-bbfd-626dd395c9fb) Active Active LiveRepairReady ds_transition to LiveRepair
10520 {"msg":"[2] Transition from LiveRepairReady to LiveRepair","v":0,"name":"crucible","level":30,"time":"{2023-09-22T23:15:22.748244004Z","hostname":""msg"ip-10-150-1-55.us-west-2.compute.internal":","pid":4759Crucible stats registered with UUID: ac3b0226-dead-4e0a-a77b-1f009b390e9a"}
10521 ,"v":0,"name":"{crucible","level":30"msg":"Waiting for Close + ReOpen jobs","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:22.748276424Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,4759"}time"
10522 :"2023-09-22T23:15:22.748284167Z{",""hostname":msg":""ip-10-150-1-55.us-west-2.compute.internal","Crucible ac3b0226-dead-4e0a-a77b-1f009b390e9a has session id: 80f29230-7c3e-4c9e-8ba7-584487f3d649"pid",":v4759":0,"}name":
10523 "crucible","level":30{"msg":"RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]",,""vtime":":"0,"2023-09-22T23:15:22.748325954Z"name",":hostname":""crucible"ip-10-150-1-55.us-west-2.compute.internal",,""pid":level4759":}30
10524 {"msg":"[0] ac3b0226-dead-4e0a-a77b-1f009b390e9a (733e1a80-c7f6-4e41-9e2f-675bfc296009) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:15:22.748348948Z","hostname":","time":"2023-09-22T23:15:22.748362814Z"ip-10-150-1-55.us-west-2.compute.internal",",hostname":""pid":ip-10-150-1-55.us-west-2.compute.internal4759","pid":}4759
10525 }
10526 {{""msgmsg":"":"[0] Transition from New to WaitActive","v":0,"RE:0 close id:1000 queued, notify DSname":""crucible",,""level":v"30:0,"name":"crucible","level":30,"time":"2023-09-22T23:15:22.748399187Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
10527 {"msg":","[0] ac3b0226-dead-4e0a-a77b-1f009b390e9a (733e1a80-c7f6-4e41-9e2f-675bfc296009) WaitActive New New ds_transition to WaitQuorum"time,""v":0:,""name":"crucible","2023-09-22T23:15:22.748404452Zlevel":"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"}time":
10528 "2023-09-22T23:15:22.748426203Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4759"}msg"
10529 :"{"msg":"RE:0 Wait for result from close command 1000:1","[0] Transition from WaitActive to WaitQuorum"v",:"0v",:"0,name"":name":""crucible","crucible"level":,30"level":30,"time":"2023-09-22T23:15:22.748464937Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
10530 {"msg":","time":"[0] ac3b0226-dead-4e0a-a77b-1f009b390e9a (733e1a80-c7f6-4e41-9e2f-675bfc296009) WaitQuorum New New ds_transition to Active","v":0,"2023-09-22T23:15:22.748466785Z"name":","crucible","hostname":level":"30ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
10531 ,"time":"2023-09-22T23:15:22.748491608Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759}
105322023-09-22T23:15:22.748ZINFOcrucible: [0] Transition from WaitQuorum to Active
105332023-09-22T23:15:22.748ZINFOcrucible: [1] ac3b0226-dead-4e0a-a77b-1f009b390e9a (733e1a80-c7f6-4e41-9e2f-675bfc296009) Active New New ds_transition to WaitActive
105342023-09-22T23:15:22.748ZINFOcrucible: [1] Transition from New to WaitActive
105352023-09-22T23:15:22.748ZINFOcrucible: [1] ac3b0226-dead-4e0a-a77b-1f009b390e9a (733e1a80-c7f6-4e41-9e2f-675bfc296009) Active WaitActive New ds_transition to WaitQuorum
105362023-09-22T23:15:22.748ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
105372023-09-22T23:15:22.748ZINFOcrucible: [1] ac3b0226-dead-4e0a-a77b-1f009b390e9a (733e1a80-c7f6-4e41-9e2f-675bfc296009) Active WaitQuorum New ds_transition to Active
105382023-09-22T23:15:22.748ZINFOcrucible: [1] Transition from WaitQuorum to Active
105392023-09-22T23:15:22.748ZINFOcrucible: [2] ac3b0226-dead-4e0a-a77b-1f009b390e9a (733e1a80-c7f6-4e41-9e2f-675bfc296009) Active Active New ds_transition to WaitActive
105402023-09-22T23:15:22.748ZINFOcrucible: [2] Transition from New to WaitActive
105412023-09-22T23:15:22.748ZINFOcrucible: [2] ac3b0226-dead-4e0a-a77b-1f009b390e9a (733e1a80-c7f6-4e41-9e2f-675bfc296009) Active Active WaitActive ds_transition to WaitQuorum
105422023-09-22T23:15:22.748ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
105432023-09-22T23:15:22.748ZINFOcrucible: [2] ac3b0226-dead-4e0a-a77b-1f009b390e9a (733e1a80-c7f6-4e41-9e2f-675bfc296009) Active Active WaitQuorum ds_transition to Active
105442023-09-22T23:15:22.748ZINFOcrucible: [2] Transition from WaitQuorum to Active
105452023-09-22T23:15:22.748ZINFOcrucible: ac3b0226-dead-4e0a-a77b-1f009b390e9a is now active with session: 733e1a80-c7f6-4e41-9e2f-675bfc296009
105462023-09-22T23:15:22.748ZINFOcrucible: [2] ac3b0226-dead-4e0a-a77b-1f009b390e9a (733e1a80-c7f6-4e41-9e2f-675bfc296009) Active Active Active ds_transition to Faulted
105472023-09-22T23:15:22.748ZINFOcrucible: [2] Transition from Active to Faulted
105482023-09-22T23:15:22.748ZINFOcrucible: [2] ac3b0226-dead-4e0a-a77b-1f009b390e9a (733e1a80-c7f6-4e41-9e2f-675bfc296009) Active Active Faulted ds_transition to LiveRepairReady
105492023-09-22T23:15:22.748ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
105502023-09-22T23:15:22.748ZINFOcrucible: [2] ac3b0226-dead-4e0a-a77b-1f009b390e9a (733e1a80-c7f6-4e41-9e2f-675bfc296009) Active Active LiveRepairReady ds_transition to LiveRepair
105512023-09-22T23:15:22.748ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
105522023-09-22T23:15:22.748ZINFOcrucible: Waiting for Close + ReOpen jobs
105532023-09-22T23:15:22.748ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
105542023-09-22T23:15:22.748ZINFOcrucible: RE:0 close id:1000 queued, notify DS
105552023-09-22T23:15:22.748ZINFOcrucible: RE:0 Wait for result from close command 1000:1
105562023-09-22T23:15:22.873ZINFOcrucible: Now move the NoOp job forward
105572023-09-22T23:15:22.873ZINFOcrucible: Now ACK the NoOp job
105582023-09-22T23:15:22.873ZINFOcrucible: Finally, move the ReOpen job forward
105592023-09-22T23:15:22.873ZINFOcrucible: Now ACK the Reopen job
105602023-09-22T23:15:22.873ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
105612023-09-22T23:15:22.873ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
105622023-09-22T23:15:22.873ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
105632023-09-22T23:15:22.873ZWARNcrucible: RE:0 Bailing with error
105642023-09-22T23:15:22.873ZINFOcrucible: Crucible stats registered with UUID: 41428a10-9420-4a67-b738-1e9612812de2
105652023-09-22T23:15:22.873ZINFOcrucible: Crucible 41428a10-9420-4a67-b738-1e9612812de2 has session id: 75e0b9c2-4a79-4bee-bb52-2513fd0a37bd
105662023-09-22T23:15:22.873ZINFOcrucible: [0] 41428a10-9420-4a67-b738-1e9612812de2 (bb21993f-9652-4c5a-822b-f68bed4ca967) New New New ds_transition to WaitActive
105672023-09-22T23:15:22.873ZINFOcrucible: [0] Transition from New to WaitActive
105682023-09-22T23:15:22.873ZINFOcrucible: [0] 41428a10-9420-4a67-b738-1e9612812de2 (bb21993f-9652-4c5a-822b-f68bed4ca967) WaitActive New New ds_transition to WaitQuorum
105692023-09-22T23:15:22.873ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
105702023-09-22T23:15:22.874ZINFOcrucible: [0] 41428a10-9420-4a67-b738-1e9612812de2 (bb21993f-9652-4c5a-822b-f68bed4ca967) WaitQuorum New New ds_transition to Active
105712023-09-22T23:15:22.874ZINFOcrucible: [0] Transition from WaitQuorum to Active
105722023-09-22T23:15:22.874ZINFOcrucible: [1] 41428a10-9420-4a67-b738-1e9612812de2 (bb21993f-9652-4c5a-822b-f68bed4ca967) Active New New ds_transition to WaitActive
105732023-09-22T23:15:22.874ZINFOcrucible: [1] Transition from New to WaitActive
105742023-09-22T23:15:22.874ZINFOcrucible: [1] 41428a10-9420-4a67-b738-1e9612812de2 (bb21993f-9652-4c5a-822b-f68bed4ca967) Active WaitActive New ds_transition to WaitQuorum
105752023-09-22T23:15:22.874ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
105762023-09-22T23:15:22.874ZINFOcrucible: [1] 41428a10-9420-4a67-b738-1e9612812de2 (bb21993f-9652-4c5a-822b-f68bed4ca967) Active WaitQuorum New ds_transition to Active
105772023-09-22T23:15:22.874ZINFOcrucible: [1] Transition from WaitQuorum to Active
105782023-09-22T23:15:22.874ZINFOcrucible: [2] 41428a10-9420-4a67-b738-1e9612812de2 (bb21993f-9652-4c5a-822b-f68bed4ca967) Active Active New ds_transition to WaitActive
105792023-09-22T23:15:22.874ZINFOcrucible: [2] Transition from New to WaitActive
105802023-09-22T23:15:22.874ZINFOcrucible: [2] 41428a10-9420-4a67-b738-1e9612812de2 (bb21993f-9652-4c5a-822b-f68bed4ca967) Active Active WaitActive ds_transition to WaitQuorum
105812023-09-22T23:15:22.874ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
105822023-09-22T23:15:22.874ZINFOcrucible: [2] 41428a10-9420-4a67-b738-1e9612812de2 (bb21993f-9652-4c5a-822b-f68bed4ca967) Active Active WaitQuorum ds_transition to Active
105832023-09-22T23:15:22.874ZINFOcrucible: [2] Transition from WaitQuorum to Active
105842023-09-22T23:15:22.874ZINFOcrucible: 41428a10-9420-4a67-b738-1e9612812de2 is now active with session: bb21993f-9652-4c5a-822b-f68bed4ca967
105852023-09-22T23:15:22.874ZINFOcrucible: [2] 41428a10-9420-4a67-b738-1e9612812de2 (bb21993f-9652-4c5a-822b-f68bed4ca967) Active Active Active ds_transition to Faulted
105862023-09-22T23:15:22.874ZINFOcrucible: [2] Transition from Active to Faulted
105872023-09-22T23:15:22.874ZINFOcrucible: [2] 41428a10-9420-4a67-b738-1e9612812de2 (bb21993f-9652-4c5a-822b-f68bed4ca967) Active Active Faulted ds_transition to LiveRepairReady
105882023-09-22T23:15:22.874ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
105892023-09-22T23:15:22.874ZINFOcrucible: [2] 41428a10-9420-4a67-b738-1e9612812de2 (bb21993f-9652-4c5a-822b-f68bed4ca967) Active Active LiveRepairReady ds_transition to LiveRepair
105902023-09-22T23:15:22.874ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
105912023-09-22T23:15:22.874ZINFOcrucible: Waiting for Close + ReOpen jobs
105922023-09-22T23:15:22.874ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
105932023-09-22T23:15:22.874ZINFOcrucible: RE:0 close id:1000 queued, notify DS
105942023-09-22T23:15:22.874ZINFOcrucible: RE:0 Wait for result from close command 1000:1
105952023-09-22T23:15:23.736ZINFOcrucible: Finally, move the ReOpen job forward
105962023-09-22T23:15:23.736ZINFOcrucible: Now ACK the reopen job
105972023-09-22T23:15:23.736ZWARNcrucible: RE:0 Bailing with error
105982023-09-22T23:15:23.737ZINFOcrucible: Crucible stats registered with UUID: 9a789d93-ee2b-4070-ad87-88d89554e7ff
105992023-09-22T23:15:23.737ZINFOcrucible: Crucible 9a789d93-ee2b-4070-ad87-88d89554e7ff has session id: 1d38b817-30d0-4b12-9e45-a0a733ff22aa
106002023-09-22T23:15:23.737ZINFOcrucible: [0] 9a789d93-ee2b-4070-ad87-88d89554e7ff (059a98cb-d0c4-4eed-b294-64d16509a464) New New New ds_transition to WaitActive
106012023-09-22T23:15:23.737ZINFOcrucible: [0] Transition from New to WaitActive
106022023-09-22T23:15:23.737ZINFOcrucible: [0] 9a789d93-ee2b-4070-ad87-88d89554e7ff (059a98cb-d0c4-4eed-b294-64d16509a464) WaitActive New New ds_transition to WaitQuorum
106032023-09-22T23:15:23.737ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
106042023-09-22T23:15:23.737ZINFOcrucible: [0] 9a789d93-ee2b-4070-ad87-88d89554e7ff (059a98cb-d0c4-4eed-b294-64d16509a464) WaitQuorum New New ds_transition to Active
106052023-09-22T23:15:23.737ZINFOcrucible: [0] Transition from WaitQuorum to Active
106062023-09-22T23:15:23.737ZINFOcrucible: [1] 9a789d93-ee2b-4070-ad87-88d89554e7ff (059a98cb-d0c4-4eed-b294-64d16509a464) Active New New ds_transition to WaitActive
106072023-09-22T23:15:23.737ZINFOcrucible: [1] Transition from New to WaitActive
106082023-09-22T23:15:23.737ZINFOcrucible: [1] 9a789d93-ee2b-4070-ad87-88d89554e7ff (059a98cb-d0c4-4eed-b294-64d16509a464) Active WaitActive New ds_transition to WaitQuorum
106092023-09-22T23:15:23.737ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
106102023-09-22T23:15:23.737ZINFOcrucible: [1] 9a789d93-ee2b-4070-ad87-88d89554e7ff (059a98cb-d0c4-4eed-b294-64d16509a464) Active WaitQuorum New ds_transition to Active
106112023-09-22T23:15:23.737ZINFOcrucible: [1] Transition from WaitQuorum to Active
106122023-09-22T23:15:23.737ZINFOcrucible: [2] 9a789d93-ee2b-4070-ad87-88d89554e7ff (059a98cb-d0c4-4eed-b294-64d16509a464) Active Active New ds_transition to WaitActive
106132023-09-22T23:15:23.737ZINFOcrucible: [2] Transition from New to WaitActive
106142023-09-22T23:15:23.737ZINFOcrucible: [2] 9a789d93-ee2b-4070-ad87-88d89554e7ff (059a98cb-d0c4-4eed-b294-64d16509a464) Active Active WaitActive ds_transition to WaitQuorum
106152023-09-22T23:15:23.737ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
106162023-09-22T23:15:23.737ZINFOcrucible: [2] 9a789d93-ee2b-4070-ad87-88d89554e7ff (059a98cb-d0c4-4eed-b294-64d16509a464) Active Active WaitQuorum ds_transition to Active
106172023-09-22T23:15:23.737ZINFOcrucible: [2] Transition from WaitQuorum to Active
106182023-09-22T23:15:23.737ZINFOcrucible: 9a789d93-ee2b-4070-ad87-88d89554e7ff is now active with session: 059a98cb-d0c4-4eed-b294-64d16509a464
106192023-09-22T23:15:23.737ZINFOcrucible: [1] 9a789d93-ee2b-4070-ad87-88d89554e7ff (059a98cb-d0c4-4eed-b294-64d16509a464) Active Active Active ds_transition to Faulted
106202023-09-22T23:15:23.737ZINFOcrucible: [1] Transition from Active to Faulted
106212023-09-22T23:15:23.737ZINFOcrucible: [1] 9a789d93-ee2b-4070-ad87-88d89554e7ff (059a98cb-d0c4-4eed-b294-64d16509a464) Active Faulted Active ds_transition to LiveRepairReady
106222023-09-22T23:15:23.737ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
106232023-09-22T23:15:23.737ZINFOcrucible: [1] 9a789d93-ee2b-4070-ad87-88d89554e7ff (059a98cb-d0c4-4eed-b294-64d16509a464) Active LiveRepairReady Active ds_transition to LiveRepair
106242023-09-22T23:15:23.737ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
106252023-09-22T23:15:23.737ZINFOcrucible: Waiting for Close + ReOpen jobs
106262023-09-22T23:15:23.737ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
106272023-09-22T23:15:23.737ZINFOcrucible: RE:0 close id:1000 queued, notify DS
106282023-09-22T23:15:23.738ZINFOcrucible: RE:0 Wait for result from close command 1000:1
106292023-09-22T23:15:23.748ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(0), repair_downstairs: [ClientId(2)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
106302023-09-22T23:15:23.748ZERROcrucible: [2] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(0), repair_downstairs: [ClientId(2)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
106312023-09-22T23:15:23.748ZINFOcrucible: [2] client skip 2 in process jobs because fault = downstairs
106322023-09-22T23:15:23.748ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
106332023-09-22T23:15:23.748ZINFOcrucible: [2] 0c15e6ed-424b-4937-a4ba-572efa006555 (f68cb12b-8c72-4782-bbfd-626dd395c9fb) Active Active LiveRepair ds_transition to Faulted
106342023-09-22T23:15:23.748ZINFOcrucible: [2] Transition from LiveRepair to Faulted
106352023-09-22T23:15:23.748ZINFOcrucible: Now ACK the close job
106362023-09-22T23:15:23.748ZINFOcrucible: Waiting for 3 jobs (currently 2)
106372023-09-22T23:15:23.748ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
106382023-09-22T23:15:23.748ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
106392023-09-22T23:15:23.750ZINFOcrucible: Waiting for 3 jobs (currently 2)
106402023-09-22T23:15:23.750ZINFOcrucible: No repair needed for extent 0 = downstairs
106412023-09-22T23:15:23.750ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
106422023-09-22T23:15:23.874ZINFOcrucible: Now ACK the close job
106432023-09-22T23:15:23.874ZINFOcrucible: Waiting for 3 jobs (currently 2)
106442023-09-22T23:15:23.874ZINFOcrucible: No repair needed for extent 0 = downstairs
106452023-09-22T23:15:23.874ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
106462023-09-22T23:15:24.738ZINFOcrucible: Waiting for 3 jobs (currently 2)
106472023-09-22T23:15:24.739ZINFOcrucible: No repair needed for extent 0 = downstairs
106482023-09-22T23:15:24.739ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
106492023-09-22T23:15:24.749ZINFOcrucible: Waiting for 4 jobs (currently 3)
106502023-09-22T23:15:24.749ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
106512023-09-22T23:15:24.749ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
106522023-09-22T23:15:24.750ZINFOcrucible: Waiting for 4 jobs (currently 3)
106532023-09-22T23:15:24.750ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
106542023-09-22T23:15:24.875ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
106552023-09-22T23:15:24.875ZERROcrucible: [1] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
106562023-09-22T23:15:24.875ZINFOcrucible: [1] client skip 3 in process jobs because fault = downstairs
106572023-09-22T23:15:24.875ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
106582023-09-22T23:15:24.875ZINFOcrucible: [1] 41428a10-9420-4a67-b738-1e9612812de2 (bb21993f-9652-4c5a-822b-f68bed4ca967) Active Active LiveRepair ds_transition to Faulted
106592023-09-22T23:15:24.875ZINFOcrucible: [1] Transition from Active to Faulted
106602023-09-22T23:15:24.875ZINFOcrucible: Waiting for 4 jobs (currently 3)
106612023-09-22T23:15:24.876ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
106622023-09-22T23:15:24.876ZINFOcrucible: [2] client skip 3 in process jobs because fault = downstairs
106632023-09-22T23:15:24.876ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
106642023-09-22T23:15:24.876ZINFOcrucible: [2] 41428a10-9420-4a67-b738-1e9612812de2 (bb21993f-9652-4c5a-822b-f68bed4ca967) Active Faulted LiveRepair ds_transition to Faulted
106652023-09-22T23:15:24.876ZINFOcrucible: [2] Transition from LiveRepair to Faulted
106662023-09-22T23:15:24.876ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
106672023-09-22T23:15:25.739ZINFOcrucible: Waiting for 4 jobs (currently 3)
106682023-09-22T23:15:25.739ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
106692023-09-22T23:15:25.749ZINFOcrucible: Now move the NoOp job forward
106702023-09-22T23:15:25.749ZINFOcrucible: Now ACK the NoOp job
106712023-09-22T23:15:25.749ZINFOcrucible: Finally, move the ReOpen job forward
106722023-09-22T23:15:25.749ZINFOcrucible: Now ACK the Reopen job
106732023-09-22T23:15:25.749ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
106742023-09-22T23:15:25.749ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
106752023-09-22T23:15:25.749ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
106762023-09-22T23:15:25.749ZWARNcrucible: RE:0 Bailing with error
10677 ----------------------------------------------------------------
10678 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
10679 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
10680 1 Acked 1000 FClose 0 Done Done Err false
10681 2 Acked 1001 NoOp 0 Done Done Skip false
10682 3 Acked 1002 NoOp 0 Done Done Skip false
10683 4 Acked 1003 Reopen 0 Done Done Skip false
10684 STATES DS:0 DS:1 DS:2 TOTAL
10685 New 0 0 0 0
10686 Sent 0 0 0 0
10687 Done 4 4 0 8
10688 Skipped 0 0 3 3
10689 Error 0 0 1 1
10690 Last Flush: 0 0 0
10691 Downstairs last five completed:
10692 Upstairs last five completed: 4 3 2 1
10693 test live_repair::repair_test::test_repair_extent_close_fails_all ... ok
106942023-09-22T23:15:25.751ZINFOcrucible: Now move the NoOp job forward
106952023-09-22T23:15:25.751ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
106962023-09-22T23:15:25.751ZERROcrucible: [2] Reports error GenericError("bad") on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
106972023-09-22T23:15:25.751ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
106982023-09-22T23:15:25.751ZINFOcrucible: [2] changed 0 jobs to fault skipped = downstairs
106992023-09-22T23:15:25.751ZINFOcrucible: [2] ac3b0226-dead-4e0a-a77b-1f009b390e9a (733e1a80-c7f6-4e41-9e2f-675bfc296009) Active Active LiveRepair ds_transition to Faulted
107002023-09-22T23:15:25.751ZINFOcrucible: [2] Transition from LiveRepair to Faulted
107012023-09-22T23:15:25.751ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
107022023-09-22T23:15:25.751ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
107032023-09-22T23:15:25.751ZWARNcrucible: RE:0 Bailing with error
10704 test live_repair::repair_test::test_repair_extent_fail_reopen_all ... ok
107052023-09-22T23:15:25.876ZINFOcrucible: Now move the NoOp job forward
107062023-09-22T23:15:25.876ZINFOcrucible: Now ACK the NoOp job
107072023-09-22T23:15:25.876ZINFOcrucible: Finally, move the ReOpen job forward
107082023-09-22T23:15:25.876ZINFOcrucible: Now ACK the Reopen job
107092023-09-22T23:15:25.876ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
107102023-09-22T23:15:25.876ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
107112023-09-22T23:15:25.876ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
107122023-09-22T23:15:25.876ZWARNcrucible: RE:0 Bailing with error
107132023-09-22T23:15:25.876ZINFOcrucible: Crucible stats registered with UUID: 77bc3a9f-f333-44eb-ad87-4ee9f025b577
107142023-09-22T23:15:25.876ZINFOcrucible: Crucible 77bc3a9f-f333-44eb-ad87-4ee9f025b577 has session id: aaeb38c7-4ff1-4b76-b886-a1af536d6ef6
107152023-09-22T23:15:25.877ZINFOcrucible: [0] 77bc3a9f-f333-44eb-ad87-4ee9f025b577 (579d228d-6578-4a36-9875-7a6297b92e8b) New New New ds_transition to WaitActive
107162023-09-22T23:15:25.877ZINFOcrucible: [0] Transition from New to WaitActive
107172023-09-22T23:15:25.877ZINFOcrucible: [0] 77bc3a9f-f333-44eb-ad87-4ee9f025b577 (579d228d-6578-4a36-9875-7a6297b92e8b) WaitActive New New ds_transition to WaitQuorum
107182023-09-22T23:15:25.877ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
107192023-09-22T23:15:25.877ZINFOcrucible: [0] 77bc3a9f-f333-44eb-ad87-4ee9f025b577 (579d228d-6578-4a36-9875-7a6297b92e8b) WaitQuorum New New ds_transition to Active
107202023-09-22T23:15:25.877ZINFOcrucible: [0] Transition from WaitQuorum to Active
107212023-09-22T23:15:25.877ZINFOcrucible: [1] 77bc3a9f-f333-44eb-ad87-4ee9f025b577 (579d228d-6578-4a36-9875-7a6297b92e8b) Active New New ds_transition to WaitActive
107222023-09-22T23:15:25.877ZINFOcrucible: [1] Transition from New to WaitActive
107232023-09-22T23:15:25.877ZINFOcrucible: [1] 77bc3a9f-f333-44eb-ad87-4ee9f025b577 (579d228d-6578-4a36-9875-7a6297b92e8b) Active WaitActive New ds_transition to WaitQuorum
107242023-09-22T23:15:25.877ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
107252023-09-22T23:15:25.877ZINFOcrucible: [1] 77bc3a9f-f333-44eb-ad87-4ee9f025b577 (579d228d-6578-4a36-9875-7a6297b92e8b) Active WaitQuorum New ds_transition to Active
107262023-09-22T23:15:25.877ZINFOcrucible: [1] Transition from WaitQuorum to Active
107272023-09-22T23:15:25.877ZINFOcrucible: [2] 77bc3a9f-f333-44eb-ad87-4ee9f025b577 (579d228d-6578-4a36-9875-7a6297b92e8b) Active Active New ds_transition to WaitActive
107282023-09-22T23:15:25.877ZINFOcrucible: [2] Transition from New to WaitActive
107292023-09-22T23:15:25.877ZINFOcrucible: [2] 77bc3a9f-f333-44eb-ad87-4ee9f025b577 (579d228d-6578-4a36-9875-7a6297b92e8b) Active Active WaitActive ds_transition to WaitQuorum
107302023-09-22T23:15:25.877ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
107312023-09-22T23:15:25.877ZINFOcrucible: [2] 77bc3a9f-f333-44eb-ad87-4ee9f025b577 (579d228d-6578-4a36-9875-7a6297b92e8b) Active Active WaitQuorum ds_transition to Active
107322023-09-22T23:15:25.877ZINFOcrucible: [2] Transition from WaitQuorum to Active
107332023-09-22T23:15:25.877ZINFOcrucible: 77bc3a9f-f333-44eb-ad87-4ee9f025b577 is now active with session: 579d228d-6578-4a36-9875-7a6297b92e8b
107342023-09-22T23:15:25.877ZINFOcrucible: [2] 77bc3a9f-f333-44eb-ad87-4ee9f025b577 (579d228d-6578-4a36-9875-7a6297b92e8b) Active Active Active ds_transition to Faulted
107352023-09-22T23:15:25.877ZINFOcrucible: [2] Transition from Active to Faulted
107362023-09-22T23:15:25.877ZINFOcrucible: [2] 77bc3a9f-f333-44eb-ad87-4ee9f025b577 (579d228d-6578-4a36-9875-7a6297b92e8b) Active Active Faulted ds_transition to LiveRepairReady
107372023-09-22T23:15:25.877ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
107382023-09-22T23:15:25.877ZINFOcrucible: [2] 77bc3a9f-f333-44eb-ad87-4ee9f025b577 (579d228d-6578-4a36-9875-7a6297b92e8b) Active Active LiveRepairReady ds_transition to LiveRepair
107392023-09-22T23:15:25.877ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
107402023-09-22T23:15:25.877ZINFOcrucible: Waiting for Close + ReOpen jobs
107412023-09-22T23:15:25.877ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
107422023-09-22T23:15:25.877ZINFOcrucible: RE:0 close id:1000 queued, notify DS
107432023-09-22T23:15:25.877ZINFOcrucible: RE:0 Wait for result from close command 1000:1
107442023-09-22T23:15:26.740ZINFOcrucible: Now move the NoOp job forward
107452023-09-22T23:15:26.740ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
107462023-09-22T23:15:26.740ZERROcrucible: [2] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
107472023-09-22T23:15:26.740ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
107482023-09-22T23:15:26.740ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
107492023-09-22T23:15:26.740ZINFOcrucible: [2] 9a789d93-ee2b-4070-ad87-88d89554e7ff (059a98cb-d0c4-4eed-b294-64d16509a464) Active LiveRepair Active ds_transition to Faulted
107502023-09-22T23:15:26.740ZINFOcrucible: [2] Transition from Active to Faulted
107512023-09-22T23:15:26.740ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
107522023-09-22T23:15:26.740ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
107532023-09-22T23:15:26.740ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
107542023-09-22T23:15:26.740ZINFOcrucible: [1] 9a789d93-ee2b-4070-ad87-88d89554e7ff (059a98cb-d0c4-4eed-b294-64d16509a464) Active LiveRepair Faulted ds_transition to Faulted
107552023-09-22T23:15:26.740ZINFOcrucible: [1] Transition from LiveRepair to Faulted
107562023-09-22T23:15:26.740ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
107572023-09-22T23:15:26.878ZINFOcrucible: Now ACK the close job
107582023-09-22T23:15:26.878ZINFOcrucible: Waiting for 3 jobs (currently 2)
107592023-09-22T23:15:26.878ZINFOcrucible: No repair needed for extent 0 = downstairs
107602023-09-22T23:15:26.878ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
107612023-09-22T23:15:27.880ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
107622023-09-22T23:15:27.880ZERROcrucible: [2] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
107632023-09-22T23:15:27.880ZINFOcrucible: [2] client skip 3 in process jobs because fault = downstairs
107642023-09-22T23:15:27.880ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
107652023-09-22T23:15:27.880ZINFOcrucible: [2] 77bc3a9f-f333-44eb-ad87-4ee9f025b577 (579d228d-6578-4a36-9875-7a6297b92e8b) Active Active LiveRepair ds_transition to Faulted
107662023-09-22T23:15:27.880ZINFOcrucible: [2] Transition from LiveRepair to Faulted
107672023-09-22T23:15:27.880ZINFOcrucible: Waiting for 4 jobs (currently 3)
107682023-09-22T23:15:27.880ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
107692023-09-22T23:15:27.880ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
107702023-09-22T23:15:28.582ZINFOcrucible: responded to ping downstairs = 1
107712023-09-22T23:15:28.584ZINFOcrucible: responded to ping downstairs = 1
107722023-09-22T23:15:28.741ZINFOcrucible: Finally, move the ReOpen job forward
107732023-09-22T23:15:28.741ZINFOcrucible: Now ACK the reopen job
107742023-09-22T23:15:28.741ZWARNcrucible: RE:0 Bailing with error
107752023-09-22T23:15:28.741ZINFOcrucible: err:2 or:1
107762023-09-22T23:15:28.741ZINFOcrucible: Crucible stats registered with UUID: 46ceccb1-8e43-4e30-90b6-a8d90c7cea97
107772023-09-22T23:15:28.741ZINFOcrucible: Crucible 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 has session id: de9240d0-ab6d-4567-a2ad-e7a003b12882
107782023-09-22T23:15:28.741ZINFOcrucible: [0] 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 (c4c9ffcf-62b7-4d90-8f2e-3be934889a7f) New New New ds_transition to WaitActive
107792023-09-22T23:15:28.741ZINFOcrucible: [0] Transition from New to WaitActive
107802023-09-22T23:15:28.741ZINFOcrucible: [0] 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 (c4c9ffcf-62b7-4d90-8f2e-3be934889a7f) WaitActive New New ds_transition to WaitQuorum
107812023-09-22T23:15:28.741ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
107822023-09-22T23:15:28.741ZINFOcrucible: [0] 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 (c4c9ffcf-62b7-4d90-8f2e-3be934889a7f) WaitQuorum New New ds_transition to Active
107832023-09-22T23:15:28.741ZINFOcrucible: [0] Transition from WaitQuorum to Active
107842023-09-22T23:15:28.742ZINFOcrucible: [1] 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 (c4c9ffcf-62b7-4d90-8f2e-3be934889a7f) Active New New ds_transition to WaitActive
107852023-09-22T23:15:28.742ZINFOcrucible: [1] Transition from New to WaitActive
107862023-09-22T23:15:28.742ZINFOcrucible: [1] 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 (c4c9ffcf-62b7-4d90-8f2e-3be934889a7f) Active WaitActive New ds_transition to WaitQuorum
107872023-09-22T23:15:28.742ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
107882023-09-22T23:15:28.742ZINFOcrucible: [1] 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 (c4c9ffcf-62b7-4d90-8f2e-3be934889a7f) Active WaitQuorum New ds_transition to Active
107892023-09-22T23:15:28.742ZINFOcrucible: [1] Transition from WaitQuorum to Active
107902023-09-22T23:15:28.742ZINFOcrucible: [2] 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 (c4c9ffcf-62b7-4d90-8f2e-3be934889a7f) Active Active New ds_transition to WaitActive
107912023-09-22T23:15:28.742ZINFOcrucible: [2] Transition from New to WaitActive
107922023-09-22T23:15:28.742ZINFOcrucible: [2] 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 (c4c9ffcf-62b7-4d90-8f2e-3be934889a7f) Active Active WaitActive ds_transition to WaitQuorum
107932023-09-22T23:15:28.742ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
107942023-09-22T23:15:28.742ZINFOcrucible: [2] 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 (c4c9ffcf-62b7-4d90-8f2e-3be934889a7f) Active Active WaitQuorum ds_transition to Active
107952023-09-22T23:15:28.742ZINFOcrucible: [2] Transition from WaitQuorum to Active
107962023-09-22T23:15:28.742ZINFOcrucible: 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 is now active with session: c4c9ffcf-62b7-4d90-8f2e-3be934889a7f
107972023-09-22T23:15:28.742ZINFOcrucible: [2] 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 (c4c9ffcf-62b7-4d90-8f2e-3be934889a7f) Active Active Active ds_transition to Faulted
107982023-09-22T23:15:28.742ZINFOcrucible: [2] Transition from Active to Faulted
107992023-09-22T23:15:28.742ZINFOcrucible: [2] 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 (c4c9ffcf-62b7-4d90-8f2e-3be934889a7f) Active Active Faulted ds_transition to LiveRepairReady
108002023-09-22T23:15:28.742ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
108012023-09-22T23:15:28.742ZINFOcrucible: [2] 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 (c4c9ffcf-62b7-4d90-8f2e-3be934889a7f) Active Active LiveRepairReady ds_transition to LiveRepair
108022023-09-22T23:15:28.742ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
108032023-09-22T23:15:28.742ZINFOcrucible: Waiting for Close + ReOpen jobs
108042023-09-22T23:15:28.742ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
108052023-09-22T23:15:28.742ZINFOcrucible: RE:0 close id:1000 queued, notify DS
108062023-09-22T23:15:28.742ZINFOcrucible: RE:0 Wait for result from close command 1000:1
108072023-09-22T23:15:28.881ZINFOcrucible: Now move the NoOp job forward
108082023-09-22T23:15:28.881ZINFOcrucible: Now ACK the NoOp job
108092023-09-22T23:15:28.881ZINFOcrucible: Finally, move the ReOpen job forward
108102023-09-22T23:15:28.881ZINFOcrucible: Now ACK the Reopen job
108112023-09-22T23:15:28.881ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
108122023-09-22T23:15:28.881ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
108132023-09-22T23:15:28.881ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
108142023-09-22T23:15:28.881ZWARNcrucible: RE:0 Bailing with error
10815 test live_repair::repair_test::test_repair_extent_repair_fails_all ... ok
108162023-09-22T23:15:29.743ZINFOcrucible: Waiting for 3 jobs (currently 2)
108172023-09-22T23:15:29.743ZINFOcrucible: No repair needed for extent 0 = downstairs
108182023-09-22T23:15:29.743ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
108192023-09-22T23:15:30.744ZINFOcrucible: Waiting for 4 jobs (currently 3)
108202023-09-22T23:15:30.744ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
108212023-09-22T23:15:31.745ZINFOcrucible: Now move the NoOp job forward
108222023-09-22T23:15:31.745ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
108232023-09-22T23:15:31.745ZERROcrucible: [0] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
108242023-09-22T23:15:31.745ZINFOcrucible: [0] client skip 4 in process jobs because fault = downstairs
108252023-09-22T23:15:31.745ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
108262023-09-22T23:15:31.745ZINFOcrucible: [0] 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 (c4c9ffcf-62b7-4d90-8f2e-3be934889a7f) Active Active LiveRepair ds_transition to Faulted
108272023-09-22T23:15:31.745ZINFOcrucible: [0] Transition from Active to Faulted
108282023-09-22T23:15:31.745ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
108292023-09-22T23:15:31.745ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
108302023-09-22T23:15:31.745ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
108312023-09-22T23:15:31.745ZINFOcrucible: [2] 46ceccb1-8e43-4e30-90b6-a8d90c7cea97 (c4c9ffcf-62b7-4d90-8f2e-3be934889a7f) Faulted Active LiveRepair ds_transition to Faulted
108322023-09-22T23:15:31.745ZINFOcrucible: [2] Transition from LiveRepair to Faulted
108332023-09-22T23:15:31.745ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
108342023-09-22T23:15:33.746ZINFOcrucible: Finally, move the ReOpen job forward
108352023-09-22T23:15:33.746ZINFOcrucible: Now ACK the reopen job
108362023-09-22T23:15:33.746ZWARNcrucible: RE:0 Bailing with error
108372023-09-22T23:15:33.746ZINFOcrucible: err:0 or:2
108382023-09-22T23:15:33.747ZINFOcrucible: Crucible stats registered with UUID: 50bf3ac1-5436-4030-be9f-16763e0c8bad
108392023-09-22T23:15:33.747ZINFOcrucible: Crucible 50bf3ac1-5436-4030-be9f-16763e0c8bad has session id: 9b00af65-f93d-4019-ab40-d9c93fcb3b99
108402023-09-22T23:15:33.747ZINFOcrucible: [0] 50bf3ac1-5436-4030-be9f-16763e0c8bad (3b5addfa-5709-424a-bddc-395f274761b4) New New New ds_transition to WaitActive
108412023-09-22T23:15:33.747ZINFOcrucible: [0] Transition from New to WaitActive
108422023-09-22T23:15:33.747ZINFOcrucible: [0] 50bf3ac1-5436-4030-be9f-16763e0c8bad (3b5addfa-5709-424a-bddc-395f274761b4) WaitActive New New ds_transition to WaitQuorum
108432023-09-22T23:15:33.747ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
108442023-09-22T23:15:33.747ZINFOcrucible: [0] 50bf3ac1-5436-4030-be9f-16763e0c8bad (3b5addfa-5709-424a-bddc-395f274761b4) WaitQuorum New New ds_transition to Active
108452023-09-22T23:15:33.747ZINFOcrucible: [0] Transition from WaitQuorum to Active
108462023-09-22T23:15:33.747ZINFOcrucible: [1] 50bf3ac1-5436-4030-be9f-16763e0c8bad (3b5addfa-5709-424a-bddc-395f274761b4) Active New New ds_transition to WaitActive
108472023-09-22T23:15:33.747ZINFOcrucible: [1] Transition from New to WaitActive
108482023-09-22T23:15:33.747ZINFOcrucible: [1] 50bf3ac1-5436-4030-be9f-16763e0c8bad (3b5addfa-5709-424a-bddc-395f274761b4) Active WaitActive New ds_transition to WaitQuorum
108492023-09-22T23:15:33.747ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
108502023-09-22T23:15:33.747ZINFOcrucible: [1] 50bf3ac1-5436-4030-be9f-16763e0c8bad (3b5addfa-5709-424a-bddc-395f274761b4) Active WaitQuorum New ds_transition to Active
108512023-09-22T23:15:33.747ZINFOcrucible: [1] Transition from WaitQuorum to Active
108522023-09-22T23:15:33.747ZINFOcrucible: [2] 50bf3ac1-5436-4030-be9f-16763e0c8bad (3b5addfa-5709-424a-bddc-395f274761b4) Active Active New ds_transition to WaitActive
108532023-09-22T23:15:33.747ZINFOcrucible: [2] Transition from New to WaitActive
108542023-09-22T23:15:33.747ZINFOcrucible: [2] 50bf3ac1-5436-4030-be9f-16763e0c8bad (3b5addfa-5709-424a-bddc-395f274761b4) Active Active WaitActive ds_transition to WaitQuorum
108552023-09-22T23:15:33.747ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
108562023-09-22T23:15:33.747ZINFOcrucible: [2] 50bf3ac1-5436-4030-be9f-16763e0c8bad (3b5addfa-5709-424a-bddc-395f274761b4) Active Active WaitQuorum ds_transition to Active
108572023-09-22T23:15:33.747ZINFOcrucible: [2] Transition from WaitQuorum to Active
108582023-09-22T23:15:33.747ZINFOcrucible: 50bf3ac1-5436-4030-be9f-16763e0c8bad is now active with session: 3b5addfa-5709-424a-bddc-395f274761b4
108592023-09-22T23:15:33.747ZINFOcrucible: [2] 50bf3ac1-5436-4030-be9f-16763e0c8bad (3b5addfa-5709-424a-bddc-395f274761b4) Active Active Active ds_transition to Faulted
108602023-09-22T23:15:33.747ZINFOcrucible: [2] Transition from Active to Faulted
108612023-09-22T23:15:33.747ZINFOcrucible: [2] 50bf3ac1-5436-4030-be9f-16763e0c8bad (3b5addfa-5709-424a-bddc-395f274761b4) Active Active Faulted ds_transition to LiveRepairReady
108622023-09-22T23:15:33.747ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
108632023-09-22T23:15:33.747ZINFOcrucible: [2] 50bf3ac1-5436-4030-be9f-16763e0c8bad (3b5addfa-5709-424a-bddc-395f274761b4) Active Active LiveRepairReady ds_transition to LiveRepair
108642023-09-22T23:15:33.748ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
108652023-09-22T23:15:33.748ZINFOcrucible: Waiting for Close + ReOpen jobs
108662023-09-22T23:15:33.748ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
108672023-09-22T23:15:33.748ZINFOcrucible: RE:0 close id:1000 queued, notify DS
108682023-09-22T23:15:33.748ZINFOcrucible: RE:0 Wait for result from close command 1000:1
108692023-09-22T23:15:34.748ZINFOcrucible: Waiting for 3 jobs (currently 2)
108702023-09-22T23:15:34.748ZINFOcrucible: No repair needed for extent 0 = downstairs
108712023-09-22T23:15:34.748ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
108722023-09-22T23:15:35.749ZINFOcrucible: Waiting for 4 jobs (currently 3)
108732023-09-22T23:15:35.749ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
108742023-09-22T23:15:36.750ZINFOcrucible: Now move the NoOp job forward
108752023-09-22T23:15:36.750ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
108762023-09-22T23:15:36.750ZERROcrucible: [1] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
108772023-09-22T23:15:36.750ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
108782023-09-22T23:15:36.751ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
108792023-09-22T23:15:36.751ZINFOcrucible: [1] 50bf3ac1-5436-4030-be9f-16763e0c8bad (3b5addfa-5709-424a-bddc-395f274761b4) Active Active LiveRepair ds_transition to Faulted
108802023-09-22T23:15:36.751ZINFOcrucible: [1] Transition from Active to Faulted
108812023-09-22T23:15:36.751ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
108822023-09-22T23:15:36.751ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
108832023-09-22T23:15:36.751ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
108842023-09-22T23:15:36.751ZINFOcrucible: [2] 50bf3ac1-5436-4030-be9f-16763e0c8bad (3b5addfa-5709-424a-bddc-395f274761b4) Active Faulted LiveRepair ds_transition to Faulted
108852023-09-22T23:15:36.751ZINFOcrucible: [2] Transition from LiveRepair to Faulted
108862023-09-22T23:15:36.751ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
108872023-09-22T23:15:38.585ZINFOcrucible: responded to ping downstairs = 1
108882023-09-22T23:15:38.587ZINFOcrucible: responded to ping downstairs = 1
108892023-09-22T23:15:38.752ZINFOcrucible: Finally, move the ReOpen job forward
108902023-09-22T23:15:38.752ZINFOcrucible: Now ACK the reopen job
108912023-09-22T23:15:38.752ZWARNcrucible: RE:0 Bailing with error
108922023-09-22T23:15:38.752ZINFOcrucible: err:1 or:2
108932023-09-22T23:15:38.752ZINFOcrucible: Crucible stats registered with UUID: ac0a948c-9d98-4276-8284-4295e9402293
108942023-09-22T23:15:38.752ZINFOcrucible: Crucible ac0a948c-9d98-4276-8284-4295e9402293 has session id: 0f185046-3285-4801-a97b-f4ca49d7f69a
108952023-09-22T23:15:38.753ZINFOcrucible: [0] ac0a948c-9d98-4276-8284-4295e9402293 (bb779cc3-1172-4b3f-b5ea-1d842045ac13) New New New ds_transition to WaitActive
108962023-09-22T23:15:38.753ZINFOcrucible: [0] Transition from New to WaitActive
108972023-09-22T23:15:38.753ZINFOcrucible: [0] ac0a948c-9d98-4276-8284-4295e9402293 (bb779cc3-1172-4b3f-b5ea-1d842045ac13) WaitActive New New ds_transition to WaitQuorum
108982023-09-22T23:15:38.753ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
108992023-09-22T23:15:38.753ZINFOcrucible: [0] ac0a948c-9d98-4276-8284-4295e9402293 (bb779cc3-1172-4b3f-b5ea-1d842045ac13) WaitQuorum New New ds_transition to Active
109002023-09-22T23:15:38.753ZINFOcrucible: [0] Transition from WaitQuorum to Active
109012023-09-22T23:15:38.753ZINFOcrucible: [1] ac0a948c-9d98-4276-8284-4295e9402293 (bb779cc3-1172-4b3f-b5ea-1d842045ac13) Active New New ds_transition to WaitActive
109022023-09-22T23:15:38.753ZINFOcrucible: [1] Transition from New to WaitActive
109032023-09-22T23:15:38.753ZINFOcrucible: [1] ac0a948c-9d98-4276-8284-4295e9402293 (bb779cc3-1172-4b3f-b5ea-1d842045ac13) Active WaitActive New ds_transition to WaitQuorum
109042023-09-22T23:15:38.753ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
109052023-09-22T23:15:38.753ZINFOcrucible: [1] ac0a948c-9d98-4276-8284-4295e9402293 (bb779cc3-1172-4b3f-b5ea-1d842045ac13) Active WaitQuorum New ds_transition to Active
109062023-09-22T23:15:38.753ZINFOcrucible: [1] Transition from WaitQuorum to Active
109072023-09-22T23:15:38.753ZINFOcrucible: [2] ac0a948c-9d98-4276-8284-4295e9402293 (bb779cc3-1172-4b3f-b5ea-1d842045ac13) Active Active New ds_transition to WaitActive
109082023-09-22T23:15:38.753ZINFOcrucible: [2] Transition from New to WaitActive
109092023-09-22T23:15:38.753ZINFOcrucible: [2] ac0a948c-9d98-4276-8284-4295e9402293 (bb779cc3-1172-4b3f-b5ea-1d842045ac13) Active Active WaitActive ds_transition to WaitQuorum
109102023-09-22T23:15:38.753ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
109112023-09-22T23:15:38.753ZINFOcrucible: [2] ac0a948c-9d98-4276-8284-4295e9402293 (bb779cc3-1172-4b3f-b5ea-1d842045ac13) Active Active WaitQuorum ds_transition to Active
109122023-09-22T23:15:38.753ZINFOcrucible: [2] Transition from WaitQuorum to Active
109132023-09-22T23:15:38.753ZINFOcrucible: ac0a948c-9d98-4276-8284-4295e9402293 is now active with session: bb779cc3-1172-4b3f-b5ea-1d842045ac13
109142023-09-22T23:15:38.753ZINFOcrucible: [2] ac0a948c-9d98-4276-8284-4295e9402293 (bb779cc3-1172-4b3f-b5ea-1d842045ac13) Active Active Active ds_transition to Faulted
109152023-09-22T23:15:38.753ZINFOcrucible: [2] Transition from Active to Faulted
109162023-09-22T23:15:38.753ZINFOcrucible: [2] ac0a948c-9d98-4276-8284-4295e9402293 (bb779cc3-1172-4b3f-b5ea-1d842045ac13) Active Active Faulted ds_transition to LiveRepairReady
109172023-09-22T23:15:38.753ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
109182023-09-22T23:15:38.753ZINFOcrucible: [2] ac0a948c-9d98-4276-8284-4295e9402293 (bb779cc3-1172-4b3f-b5ea-1d842045ac13) Active Active LiveRepairReady ds_transition to LiveRepair
109192023-09-22T23:15:38.753ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
109202023-09-22T23:15:38.753ZINFOcrucible: Waiting for Close + ReOpen jobs
109212023-09-22T23:15:38.753ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
109222023-09-22T23:15:38.753ZINFOcrucible: RE:0 close id:1000 queued, notify DS
109232023-09-22T23:15:38.753ZINFOcrucible: RE:0 Wait for result from close command 1000:1
109242023-09-22T23:15:39.754ZINFOcrucible: Waiting for 3 jobs (currently 2)
109252023-09-22T23:15:39.754ZINFOcrucible: No repair needed for extent 0 = downstairs
109262023-09-22T23:15:39.754ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
109272023-09-22T23:15:40.755ZINFOcrucible: Waiting for 4 jobs (currently 3)
109282023-09-22T23:15:40.755ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
109292023-09-22T23:15:41.756ZINFOcrucible: Now move the NoOp job forward
109302023-09-22T23:15:41.756ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
109312023-09-22T23:15:41.756ZERROcrucible: [2] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
109322023-09-22T23:15:41.756ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
109332023-09-22T23:15:41.756ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
109342023-09-22T23:15:41.756ZINFOcrucible: [2] ac0a948c-9d98-4276-8284-4295e9402293 (bb779cc3-1172-4b3f-b5ea-1d842045ac13) Active Active LiveRepair ds_transition to Faulted
109352023-09-22T23:15:41.756ZINFOcrucible: [2] Transition from LiveRepair to Faulted
109362023-09-22T23:15:41.756ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
109372023-09-22T23:15:41.756ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
109382023-09-22T23:15:43.757ZINFOcrucible: Finally, move the ReOpen job forward
109392023-09-22T23:15:43.757ZINFOcrucible: Now ACK the reopen job
109402023-09-22T23:15:43.757ZWARNcrucible: RE:0 Bailing with error
10941 test live_repair::repair_test::test_repair_extent_fail_noop_all ... ok
109422023-09-22T23:15:48.586ZINFOcrucible: responded to ping downstairs = 1
109432023-09-22T23:15:48.588ZINFOcrucible: responded to ping downstairs = 1
10944 test dummy_downstairs_tests::protocol_test::test_error_during_live_repair_no_halt has been running for over 60 seconds
10945 test dummy_downstairs_tests::protocol_test::test_successful_live_repair has been running for over 60 seconds
109462023-09-22T23:15:58.588ZINFOcrucible: responded to ping downstairs = 1
109472023-09-22T23:15:58.594ZINFOcrucible: responded to ping downstairs = 1
109482023-09-22T23:16:08.593ZINFOcrucible: responded to ping downstairs = 1
109492023-09-22T23:16:08.599ZINFOcrucible: responded to ping downstairs = 1
109502023-09-22T23:16:18.595ZINFOcrucible: responded to ping downstairs = 1
109512023-09-22T23:16:18.605ZINFOcrucible: responded to ping downstairs = 1
109522023-09-22T23:16:28.598ZINFOcrucible: responded to ping downstairs = 1
109532023-09-22T23:16:28.608ZINFOcrucible: responded to ping downstairs = 1
109542023-09-22T23:16:38.600ZINFOcrucible: responded to ping downstairs = 1
109552023-09-22T23:16:38.613ZINFOcrucible: responded to ping downstairs = 1
109562023-09-22T23:16:48.606ZINFOcrucible: responded to ping downstairs = 1
109572023-09-22T23:16:48.614ZINFOcrucible: responded to ping downstairs = 1
109582023-09-22T23:16:58.610ZINFOcrucible: responded to ping downstairs = 1
109592023-09-22T23:16:58.621ZINFOcrucible: responded to ping downstairs = 1
109602023-09-22T23:17:08.615ZINFOcrucible: responded to ping downstairs = 1
109612023-09-22T23:17:08.625ZINFOcrucible: responded to ping downstairs = 1
109622023-09-22T23:17:18.620ZINFOcrucible: responded to ping downstairs = 1
109632023-09-22T23:17:18.631ZINFOcrucible: responded to ping downstairs = 1
109642023-09-22T23:17:28.629ZINFOcrucible: responded to ping downstairs = 1
109652023-09-22T23:17:28.637ZINFOcrucible: responded to ping downstairs = 1
109662023-09-22T23:17:38.637ZINFOcrucible: responded to ping downstairs = 1
109672023-09-22T23:17:38.640ZINFOcrucible: responded to ping downstairs = 1
109682023-09-22T23:17:48.642ZINFOcrucible: responded to ping downstairs = 1
109692023-09-22T23:17:48.644ZINFOcrucible: responded to ping downstairs = 1
109702023-09-22T23:17:58.648ZINFOcrucible: responded to ping downstairs = 1
109712023-09-22T23:17:58.654ZINFOcrucible: responded to ping downstairs = 1
109722023-09-22T23:18:08.653ZINFOcrucible: responded to ping downstairs = 1
109732023-09-22T23:18:08.656ZINFOcrucible: responded to ping downstairs = 1
109742023-09-22T23:18:18.662ZINFOcrucible: responded to ping downstairs = 1
109752023-09-22T23:18:18.663ZINFOcrucible: responded to ping downstairs = 1
109762023-09-22T23:18:28.664ZINFOcrucible: responded to ping downstairs = 1
109772023-09-22T23:18:28.666ZINFOcrucible: responded to ping downstairs = 1
109782023-09-22T23:18:38.668ZINFOcrucible: responded to ping downstairs = 1
109792023-09-22T23:18:38.675ZINFOcrucible: responded to ping downstairs = 1
109802023-09-22T23:18:48.670ZINFOcrucible: responded to ping downstairs = 1
109812023-09-22T23:18:48.676ZINFOcrucible: responded to ping downstairs = 1
109822023-09-22T23:18:58.678ZINFOcrucible: responded to ping downstairs = 1
109832023-09-22T23:18:58.687ZINFOcrucible: responded to ping downstairs = 1
109842023-09-22T23:19:08.689ZINFOcrucible: responded to ping downstairs = 1
109852023-09-22T23:19:08.699ZINFOcrucible: responded to ping downstairs = 1
109862023-09-22T23:19:18.696ZINFOcrucible: responded to ping downstairs = 1
109872023-09-22T23:19:18.701ZINFOcrucible: responded to ping downstairs = 1
109882023-09-22T23:19:28.707ZINFOcrucible: responded to ping downstairs = 1
109892023-09-22T23:19:28.708ZINFOcrucible: responded to ping downstairs = 1
109902023-09-22T23:19:38.718ZINFOcrucible: responded to ping downstairs = 1
109912023-09-22T23:19:38.719ZINFOcrucible: responded to ping downstairs = 1
109922023-09-22T23:19:48.723ZINFOcrucible: responded to ping downstairs = 1
109932023-09-22T23:19:48.725ZINFOcrucible: responded to ping downstairs = 1
109942023-09-22T23:19:58.728ZINFOcrucible: responded to ping downstairs = 1
109952023-09-22T23:19:58.733ZINFOcrucible: responded to ping downstairs = 1
109962023-09-22T23:20:08.739ZINFOcrucible: responded to ping downstairs = 1
109972023-09-22T23:20:08.742ZINFOcrucible: responded to ping downstairs = 1
109982023-09-22T23:20:18.743ZINFOcrucible: responded to ping {downstairs = 1
10999 "msg":"responded to ping","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:20:18.743880475Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4759,"downstairs":1}
110002023-09-22T23:20:28.746ZINFOcrucible: responded to ping downstairs = 1
110012023-09-22T23:20:28.747ZINFOcrucible: responded to ping downstairs = 1
110022023-09-22T23:20:38.757ZINFOcrucible: responded to ping downstairs = 1
110032023-09-22T23:20:38.758ZINFOcrucible: responded to ping downstairs = 1
110042023-09-22T23:20:48.763ZINFOcrucible: responded to ping downstairs = 1
110052023-09-22T23:20:48.769ZINFOcrucible: responded to ping downstairs = 1
110062023-09-22T23:20:58.766ZINFOcrucible: responded to ping downstairs = 1
110072023-09-22T23:20:58.774ZINFOcrucible: responded to ping downstairs = 1
110082023-09-22T23:21:08.770ZINFOcrucible: responded to ping downstairs = 1
110092023-09-22T23:21:08.778ZINFOcrucible: responded to ping downstairs = 1
110102023-09-22T23:21:17.262ZWARNcrucible: [up] downstairs 0 failed, too many outstanding jobs 57001 upstairs = 1
110112023-09-22T23:21:17.262ZINFOcrucible: [0] client skip 57001 in process jobs because fault = downstairs upstairs = 1
110122023-09-22T23:21:17.331ZINFOcrucible: [0] changed 57001 jobs to fault skipped = downstairs upstairs = 1
110132023-09-22T23:21:17.420ZINFOcrucible: [0] 85682a66-27fd-403c-bf22-d04b4ff66805 (5d828379-bf79-455f-9270-e82849f9ecba) Active Active Active ds_transition to Faulted upstairs = 1
110142023-09-22T23:21:17.420ZINFOcrucible: [0] Transition from Active to Faulted upstairs = 1
110152023-09-22T23:21:17.640ZWARNcrucible: [0] flow control end upstairs = 1
110162023-09-22T23:21:18.782ZINFOcrucible: responded to ping downstairs = 1
110172023-09-22T23:21:18.782ZWARNcrucible: [0] will exit pm_task, this downstairs Faulted upstairs = 1
110182023-09-22T23:21:18.782ZERROcrucible: 127.0.0.1:39040: proc: [0] client work task ended, Ok(Err([0] This downstairs now in Faulted)), so we end too looper = 0 upstairs = 1
110192023-09-22T23:21:18.782ZINFOcrucible: [0] 85682a66-27fd-403c-bf22-d04b4ff66805 Gone missing, transition from Faulted to Faulted upstairs = 1
110202023-09-22T23:21:18.782ZINFOcrucible: [0] 85682a66-27fd-403c-bf22-d04b4ff66805 connection to 127.0.0.1:39040 closed looper = 0 upstairs = 1
110212023-09-22T23:21:18.782ZINFOcrucible: [0] 127.0.0.1:39040 task reports connection:false upstairs = 1
110222023-09-22T23:21:18.782ZINFOcrucible: 85682a66-27fd-403c-bf22-d04b4ff66805 Faulted Active Active upstairs = 1
110232023-09-22T23:21:18.782ZINFOcrucible: [0] 127.0.0.1:39040 task reports offline upstairs = 1
110242023-09-22T23:21:18.782ZERROcrucible: spawn_message_receiver saw disconnect, bailing downstairs = 1
110252023-09-22T23:21:18.791ZINFOcrucible: responded to ping downstairs = 1
110262023-09-22T23:21:19.793ZINFOcrucible: [0] 85682a66-27fd-403c-bf22-d04b4ff66805 looper connected looper = 0 upstairs = 1
110272023-09-22T23:21:19.793ZINFOcrucible: [0] Proc runs for 127.0.0.1:39040 in state Faulted upstairs = 1
110282023-09-22T23:21:19.805ZWARNcrucible: [up] downstairs 0 failed, too many outstanding jobs 57001 upstairs = 1
110292023-09-22T23:21:19.805ZINFOcrucible: [0] client skip 57001 in process jobs because fault = downstairs upstairs = 1
110302023-09-22T23:21:19.873ZINFOcrucible: [0] changed 57001 jobs to fault skipped = downstairs upstairs = 1
110312023-09-22T23:21:19.961ZINFOcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa (7f1ba66d-4c7a-4444-a654-4b5bfde1244a) Active Active Active ds_transition to Faulted upstairs = 1
110322023-09-22T23:21:19.961ZINFOcrucible: [0] Transition from Active to Faulted upstairs = 1
110332023-09-22T23:21:20.465ZWARNcrucible: [0] flow control end upstairs = 1
110342023-09-22T23:21:29.483ZERROcrucible: could not send read response for job 1 = 1001: Broken pipe (os error 32)
110352023-09-22T23:21:29.484ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: 85682a66-27fd-403c-bf22-d04b4ff66805, session_id: 5d828379-bf79-455f-9270-e82849f9ecba, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 1
110362023-09-22T23:21:29.484ZINFOcrucible: responded to ping downstairs = 1
110372023-09-22T23:21:29.484ZINFOcrucible: [0] upstairs guest_io_ready=TRUE, promote! session 5d828379-bf79-455f-9270-e82849f9ecba upstairs = 1
110382023-09-22T23:21:29.484ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: 85682a66-27fd-403c-bf22-d04b4ff66805, session_id: 5d828379-bf79-455f-9270-e82849f9ecba, gen: 1 } downstairs = 1
110392023-09-22T23:21:29.484ZINFOcrucible: negotiate packet RegionInfoPlease downstairs = 1
110402023-09-22T23:21:29.484ZINFOcrucible: [0] downstairs client at 127.0.0.1:39040 has UUID b0da680b-dcef-4405-a0bc-9a419607b615 upstairs = 1
110412023-09-22T23:21:29.484ZINFOcrucible: [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: b0da680b-dcef-4405-a0bc-9a419607b615, encrypted: false, database_read_version: 1, database_write_version: 1 } upstairs = 1
110422023-09-22T23:21:29.484ZINFOcrucible: Returning client:0 UUID:b0da680b-dcef-4405-a0bc-9a419607b615 matches upstairs = 1
110432023-09-22T23:21:29.484ZINFOcrucible: 85682a66-27fd-403c-bf22-d04b4ff66805 Faulted Active Active upstairs = 1
110442023-09-22T23:21:29.484ZINFOcrucible: negotiate packet ExtentVersionsPlease downstairs = 1
110452023-09-22T23:21:29.484ZINFOcrucible: [0] 85682a66-27fd-403c-bf22-d04b4ff66805 (5d828379-bf79-455f-9270-e82849f9ecba) Faulted Active Active ds_transition to LiveRepairReady upstairs = 1
110462023-09-22T23:21:29.484ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady upstairs = 1
110472023-09-22T23:21:29.484ZWARNcrucible: [0] new RM replaced this: Some(RegionMetadata { generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], dirty: [false, false, false, false, false, false, false, false, false, false] }) upstairs = 1
110482023-09-22T23:21:29.484ZWARNcrucible: [0] 85682a66-27fd-403c-bf22-d04b4ff66805 Enter Ready for LiveRepair mode upstairs = 1
110492023-09-22T23:21:29.484ZINFOcrucible: [0] Starts cmd_loop upstairs = 1
110502023-09-22T23:21:29.485ZINFOcrucible: [0] 127.0.0.1:39040 task reports connection:true upstairs = 1
110512023-09-22T23:21:29.485ZINFOcrucible: 85682a66-27fd-403c-bf22-d04b4ff66805 LiveRepairReady Active Active upstairs = 1
110522023-09-22T23:21:29.485ZINFOcrucible: Set check for repair upstairs = 1
110532023-09-22T23:21:30.231ZINFOcrucible: responded to ping downstairs = 2
110542023-09-22T23:21:30.231ZINFOcrucible: responded to ping downstairs = 3
110552023-09-22T23:21:30.487ZINFOcrucible: Checking if live repair is needed upstairs = 1
110562023-09-22T23:21:30.487ZINFOcrucible: [0] 85682a66-27fd-403c-bf22-d04b4ff66805 (5d828379-bf79-455f-9270-e82849f9ecba) LiveRepairReady Active Active ds_transition to LiveRepair upstairs = 1
110572023-09-22T23:21:30.487ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair upstairs = 1
110582023-09-22T23:21:30.487ZINFOcrucible: Live Repair started upstairs = 1
110592023-09-22T23:21:30.487ZWARNcrucible: Live Repair main task begins. task = repair upstairs = 1
110602023-09-22T23:21:30.487ZINFOcrucible: Start Live Repair of extents 0 to 10 task = repair upstairs = 1
110612023-09-22T23:21:30.487ZINFOcrucible: Start extent 0 repair task = repair upstairs = 1
110622023-09-22T23:21:30.487ZINFOcrucible: RE:0 repair extent with ids 58201,58202,58203,58204 deps:[] upstairs = 1
110632023-09-22T23:21:30.487ZINFOcrucible: RE:0 close id:58201 queued, notify DS upstairs = 1
110642023-09-22T23:21:30.487ZINFOcrucible: RE:0 Wait for result from close command 58201:57202 upstairs = 1
110652023-09-22T23:21:30.487ZINFOcrucible: [0] 58201 final dependency list [] = downstairs upstairs = 1
110662023-09-22T23:21:30.487ZINFOcrucible: [0] 58204 final dependency list [JobId(58201), JobId(58202), JobId(58203)] = downstairs upstairs = 1
110672023-09-22T23:21:30.488ZINFOcrucible: [0] 58205 final dependency list [JobId(58204), JobId(58201)] = downstairs upstairs = 1
110682023-09-22T23:21:30.488ZWARNcrucible: Write to Extent 0:0:9 under repair upstairs = 1
110692023-09-22T23:21:30.488ZINFOcrucible: [0] 58206 final dependency list [JobId(58205), JobId(58204), JobId(58201)] = downstairs upstairs = 1
110702023-09-22T23:21:30.497ZINFOcrucible: Repair for extent 0 s:2 d:[ClientId(0)] = downstairs upstairs = 1
110712023-09-22T23:21:30.497ZINFOcrucible: RE:0 Wait for result from repair command 58202:57203 upstairs = 1
110722023-09-22T23:21:30.497ZINFOcrucible: [0] 58202 final dependency list [JobId(58201)] = downstairs upstairs = 1
110732023-09-22T23:21:30.498ZINFOcrucible: RE:0 Wait for result from NoOp command 58203:57204 upstairs = 1
110742023-09-22T23:21:30.498ZINFOcrucible: [0] 58203 final dependency list [JobId(58201), JobId(58202)] = downstairs upstairs = 1
110752023-09-22T23:21:30.500ZINFOcrucible: RE:0 Wait for result from reopen command 58204:57205 upstairs = 1
110762023-09-22T23:21:30.500ZINFOcrucible: Start extent 1 repair task = repair upstairs = 1
110772023-09-22T23:21:30.500ZINFOcrucible: RE:1 repair extent with ids 58225,58226,58227,58228 deps:[JobId(58208), JobId(58207)] upstairs = 1
110782023-09-22T23:21:30.500ZINFOcrucible: RE:1 close id:58225 queued, notify DS upstairs = 1
110792023-09-22T23:21:30.500ZINFOcrucible: RE:1 Wait for result from close command 58225:57226 upstairs = 1
110802023-09-22T23:21:30.501ZINFOcrucible: [0] 58225 final dependency list [] = downstairs upstairs = 1
110812023-09-22T23:21:30.501ZINFOcrucible: [0] 58228 final dependency list [JobId(58225), JobId(58226), JobId(58227)] = downstairs upstairs = 1
110822023-09-22T23:21:30.501ZINFOcrucible: [0] 58229 final dependency list [JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
110832023-09-22T23:21:30.502ZINFOcrucible: [0] 58230 final dependency list [JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
110842023-09-22T23:21:30.502ZINFOcrucible: [0] 58231 final dependency list [JobId(58228), JobId(58225)] = downstairs upstairs = 1
110852023-09-22T23:21:30.503ZWARNcrucible: Write to Extent 1:0:9 under repair upstairs = 1
110862023-09-22T23:21:30.503ZINFOcrucible: [0] 58232 final dependency list [JobId(58231), JobId(58228), JobId(58225)] = downstairs upstairs = 1
110872023-09-22T23:21:30.511ZINFOcrucible: Repair for extent 1 s:2 d:[ClientId(0)] = downstairs upstairs = 1
110882023-09-22T23:21:30.511ZINFOcrucible: RE:1 Wait for result from repair command 58226:57227 upstairs = 1
110892023-09-22T23:21:30.511ZINFOcrucible: [0] 58226 final dependency list [JobId(58225)] = downstairs upstairs = 1
110902023-09-22T23:21:30.511ZINFOcrucible: RE:1 Wait for result from NoOp command 58227:57228 upstairs = 1
110912023-09-22T23:21:30.511ZINFOcrucible: [0] 58227 final dependency list [JobId(58225), JobId(58226)] = downstairs upstairs = 1
110922023-09-22T23:21:30.514ZINFOcrucible: RE:1 Wait for result from reopen command 58228:57229 upstairs = 1
110932023-09-22T23:21:30.514ZINFOcrucible: Start extent 2 repair task = repair upstairs = 1
110942023-09-22T23:21:30.514ZINFOcrucible: RE:2 repair extent with ids 58249,58250,58251,58252 deps:[JobId(58234), JobId(58233), JobId(58210), JobId(58209)] upstairs = 1
110952023-09-22T23:21:30.514ZINFOcrucible: RE:2 close id:58249 queued, notify DS upstairs = 1
110962023-09-22T23:21:30.514ZINFOcrucible: RE:2 Wait for result from close command 58249:57250 upstairs = 1
110972023-09-22T23:21:30.514ZINFOcrucible: [0] 58249 final dependency list [] = downstairs upstairs = 1
110982023-09-22T23:21:30.514ZINFOcrucible: [0] 58252 final dependency list [JobId(58249), JobId(58250), JobId(58251)] = downstairs upstairs = 1
110992023-09-22T23:21:30.515ZINFOcrucible: [0] 58253 final dependency list [JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111002023-09-22T23:21:30.515ZINFOcrucible: [0] 58254 final dependency list [JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111012023-09-22T23:21:30.516ZINFOcrucible: [0] 58255 final dependency list [JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
111022023-09-22T23:21:30.516ZINFOcrucible: [0] 58256 final dependency list [JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
111032023-09-22T23:21:30.517ZINFOcrucible: [0] 58257 final dependency list [JobId(58252), JobId(58249)] = downstairs upstairs = 1
111042023-09-22T23:21:30.517ZWARNcrucible: Write to Extent 2:0:9 under repair upstairs = 1
111052023-09-22T23:21:30.518ZINFOcrucible: [0] 58258 final dependency list [JobId(58257), JobId(58252), JobId(58249)] = downstairs upstairs = 1
111062023-09-22T23:21:30.525ZINFOcrucible: Repair for extent 2 s:2 d:[ClientId(0)] = downstairs upstairs = 1
111072023-09-22T23:21:30.525ZINFOcrucible: RE:2 Wait for result from repair command 58250:57251 upstairs = 1
111082023-09-22T23:21:30.525ZINFOcrucible: [0] 58250 final dependency list [JobId(58249)] = downstairs upstairs = 1
111092023-09-22T23:21:30.525ZINFOcrucible: RE:2 Wait for result from NoOp command 58251:57252 upstairs = 1
111102023-09-22T23:21:30.525ZINFOcrucible: [0] 58251 final dependency list [JobId(58249), JobId(58250)] = downstairs upstairs = 1
111112023-09-22T23:21:30.528ZINFOcrucible: RE:2 Wait for result from reopen command 58252:57253 upstairs = 1
111122023-09-22T23:21:30.528ZINFOcrucible: Start extent 3 repair task = repair upstairs = 1
111132023-09-22T23:21:30.528ZINFOcrucible: RE:3 repair extent with ids 58273,58274,58275,58276 deps:[JobId(58260), JobId(58259), JobId(58236), JobId(58235), JobId(58212), JobId(58211)] upstairs = 1
111142023-09-22T23:21:30.528ZINFOcrucible: RE:3 close id:58273 queued, notify DS upstairs = 1
111152023-09-22T23:21:30.528ZINFOcrucible: RE:3 Wait for result from close command 58273:57274 upstairs = 1
111162023-09-22T23:21:30.528ZINFOcrucible: [0] 58273 final dependency list [] = downstairs upstairs = 1
111172023-09-22T23:21:30.528ZINFOcrucible: [0] 58276 final dependency list [JobId(58273), JobId(58274), JobId(58275)] = downstairs upstairs = 1
111182023-09-22T23:21:30.529ZINFOcrucible: [0] 58277 final dependency list [JobId(58254), JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111192023-09-22T23:21:30.530ZINFOcrucible: [0] 58278 final dependency list [JobId(58277), JobId(58254), JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111202023-09-22T23:21:30.530ZINFOcrucible: [0] 58279 final dependency list [JobId(58256), JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
111212023-09-22T23:21:30.531ZINFOcrucible: [0] 58280 final dependency list [JobId(58279), JobId(58256), JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
111222023-09-22T23:21:30.531ZINFOcrucible: [0] 58281 final dependency list [JobId(58258), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
111232023-09-22T23:21:30.532ZINFOcrucible: [0] 58282 final dependency list [JobId(58281), JobId(58258), JobId(58257), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
111242023-09-22T23:21:30.532ZINFOcrucible: [0] 58283 final dependency list [JobId(58276), JobId(58273)] = downstairs upstairs = 1
111252023-09-22T23:21:30.533ZWARNcrucible: Write to Extent 3:0:9 under repair upstairs = 1
111262023-09-22T23:21:30.533ZINFOcrucible: [0] 58284 final dependency list [JobId(58283), JobId(58276), JobId(58273)] = downstairs upstairs = 1
111272023-09-22T23:21:30.539ZINFOcrucible: Repair for extent 3 s:2 d:[ClientId(0)] = downstairs upstairs = 1
111282023-09-22T23:21:30.539ZINFOcrucible: RE:3 Wait for result from repair command 58274:57275 upstairs = 1
111292023-09-22T23:21:30.539ZINFOcrucible: [0] 58274 final dependency list [JobId(58273)] = downstairs upstairs = 1
111302023-09-22T23:21:30.540ZINFOcrucible: RE:3 Wait for result from NoOp command 58275:57276 upstairs = 1
111312023-09-22T23:21:30.540ZINFOcrucible: [0] 58275 final dependency list [JobId(58273), JobId(58274)] = downstairs upstairs = 1
111322023-09-22T23:21:30.543ZINFOcrucible: RE:3 Wait for result from reopen command 58276:57277 upstairs = 1
111332023-09-22T23:21:30.543ZINFOcrucible: Start extent 4 repair task = repair upstairs = 1
111342023-09-22T23:21:30.543ZINFOcrucible: RE:4 repair extent with ids 58297,58298,58299,58300 deps:[JobId(58286), JobId(58285), JobId(58262), JobId(58261), JobId(58238), JobId(58237), JobId(58214), JobId(58213)] upstairs = 1
111352023-09-22T23:21:30.543ZINFOcrucible: RE:4 close id:58297 queued, notify DS upstairs = 1
111362023-09-22T23:21:30.543ZINFOcrucible: RE:4 Wait for result from close command 58297:57298 upstairs = 1
111372023-09-22T23:21:30.543ZINFOcrucible: [0] 58297 final dependency list [] = downstairs upstairs = 1
111382023-09-22T23:21:30.543ZINFOcrucible: [0] 58300 final dependency list [JobId(58297), JobId(58298), JobId(58299)] = downstairs upstairs = 1
111392023-09-22T23:21:30.544ZINFOcrucible: [0] 58301 final dependency list [JobId(58278), JobId(58254), JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111402023-09-22T23:21:30.544ZINFOcrucible: [0] 58302 final dependency list [JobId(58301), JobId(58278), JobId(58277), JobId(58254), JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111412023-09-22T23:21:30.545ZINFOcrucible: [0] 58303 final dependency list [JobId(58280), JobId(58256), JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
111422023-09-22T23:21:30.545ZINFOcrucible: [0] 58304 final dependency list [JobId(58303), JobId(58280), JobId(58279), JobId(58256), JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
111432023-09-22T23:21:30.546ZINFOcrucible: [0] 58305 final dependency list [JobId(58282), JobId(58258), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
111442023-09-22T23:21:30.547ZINFOcrucible: [0] 58306 final dependency list [JobId(58305), JobId(58282), JobId(58281), JobId(58258), JobId(58257), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
111452023-09-22T23:21:30.547ZINFOcrucible: [0] 58307 final dependency list [JobId(58284), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
111462023-09-22T23:21:30.548ZINFOcrucible: [0] 58308 final dependency list [JobId(58307), JobId(58284), JobId(58283), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
111472023-09-22T23:21:30.548ZINFOcrucible: [0] 58309 final dependency list [JobId(58300), JobId(58297)] = downstairs upstairs = 1
111482023-09-22T23:21:30.549ZWARNcrucible: Write to Extent 4:0:9 under repair upstairs = 1
111492023-09-22T23:21:30.549ZINFOcrucible: [0] 58310 final dependency list [JobId(58309), JobId(58300), JobId(58297)] = downstairs upstairs = 1
111502023-09-22T23:21:30.554ZINFOcrucible: Repair for extent 4 s:2 d:[ClientId(0)] = downstairs upstairs = 1
111512023-09-22T23:21:30.554ZINFOcrucible: RE:4 Wait for result from repair command 58298:57299 upstairs = 1
111522023-09-22T23:21:30.554ZINFOcrucible: [0] 58298 final dependency list [JobId(58297)] = downstairs upstairs = 1
111532023-09-22T23:21:30.555ZINFOcrucible: RE:4 Wait for result from NoOp command 58299:57300 upstairs = 1
111542023-09-22T23:21:30.555ZINFOcrucible: [0] 58299 final dependency list [JobId(58297), JobId(58298)] = downstairs upstairs = 1
111552023-09-22T23:21:30.558ZINFOcrucible: RE:4 Wait for result from reopen command 58300:57301 upstairs = 1
111562023-09-22T23:21:30.558ZINFOcrucible: Start extent 5 repair task = repair upstairs = 1
111572023-09-22T23:21:30.558ZINFOcrucible: RE:5 repair extent with ids 58321,58322,58323,58324 deps:[JobId(58312), JobId(58311), JobId(58288), JobId(58287), JobId(58264), JobId(58263), JobId(58240), JobId(58239), JobId(58216), JobId(58215)] upstairs = 1
111582023-09-22T23:21:30.558ZINFOcrucible: RE:5 close id:58321 queued, notify DS upstairs = 1
111592023-09-22T23:21:30.558ZINFOcrucible: RE:5 Wait for result from close command 58321:57322 upstairs = 1
111602023-09-22T23:21:30.558ZINFOcrucible: [0] 58321 final dependency list [] = downstairs upstairs = 1
111612023-09-22T23:21:30.558ZINFOcrucible: [0] 58324 final dependency list [JobId(58321), JobId(58322), JobId(58323)] = downstairs upstairs = 1
111622023-09-22T23:21:30.559ZINFOcrucible: [0] 58325 final dependency list [JobId(58302), JobId(58278), JobId(58254), JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111632023-09-22T23:21:30.559ZINFOcrucible: [0] 58326 final dependency list [JobId(58325), JobId(58302), JobId(58301), JobId(58278), JobId(58277), JobId(58254), JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111642023-09-22T23:21:30.560ZINFOcrucible: [0] 58327 final dependency list [JobId(58304), JobId(58280), JobId(58256), JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
111652023-09-22T23:21:30.561ZINFOcrucible: [0] 58328 final dependency list [JobId(58327), JobId(58304), JobId(58303), JobId(58280), JobId(58279), JobId(58256), JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
111662023-09-22T23:21:30.561ZINFOcrucible: [0] 58329 final dependency list [JobId(58306), JobId(58282), JobId(58258), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
111672023-09-22T23:21:30.562ZINFOcrucible: [0] 58330 final dependency list [JobId(58329), JobId(58306), JobId(58305), JobId(58282), JobId(58281), JobId(58258), JobId(58257), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
111682023-09-22T23:21:30.562ZINFOcrucible: [0] 58331 final dependency list [JobId(58308), JobId(58284), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
111692023-09-22T23:21:30.563ZINFOcrucible: [0] 58332 final dependency list [JobId(58331), JobId(58308), JobId(58307), JobId(58284), JobId(58283), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
111702023-09-22T23:21:30.564ZINFOcrucible: [0] 58333 final dependency list [JobId(58310), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
111712023-09-22T23:21:30.564ZINFOcrucible: [0] 58334 final dependency list [JobId(58333), JobId(58310), JobId(58309), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
111722023-09-22T23:21:30.565ZINFOcrucible: [0] 58335 final dependency list [JobId(58324), JobId(58321)] = downstairs upstairs = 1
111732023-09-22T23:21:30.565ZWARNcrucible: Write to Extent 5:0:9 under repair upstairs = 1
111742023-09-22T23:21:30.565ZINFOcrucible: [0] 58336 final dependency list [JobId(58335), JobId(58324), JobId(58321)] = downstairs upstairs = 1
111752023-09-22T23:21:30.570ZINFOcrucible: Repair for extent 5 s:2 d:[ClientId(0)] = downstairs upstairs = 1
111762023-09-22T23:21:30.570ZINFOcrucible: RE:5 Wait for result from repair command 58322:57323 upstairs = 1
111772023-09-22T23:21:30.570ZINFOcrucible: [0] 58322 final dependency list [JobId(58321)] = downstairs upstairs = 1
111782023-09-22T23:21:30.571ZINFOcrucible: RE:5 Wait for result from NoOp command 58323:57324 upstairs = 1
111792023-09-22T23:21:30.571ZINFOcrucible: [0] 58323 final dependency list [JobId(58321), JobId(58322)] = downstairs upstairs = 1
111802023-09-22T23:21:30.574ZINFOcrucible: RE:5 Wait for result from reopen command 58324:57325 upstairs = 1
111812023-09-22T23:21:30.574ZINFOcrucible: Start extent 6 repair task = repair upstairs = 1
111822023-09-22T23:21:30.574ZINFOcrucible: RE:6 repair extent with ids 58345,58346,58347,58348 deps:[JobId(58338), JobId(58337), JobId(58314), JobId(58313), JobId(58290), JobId(58289), JobId(58266), JobId(58265), JobId(58242), JobId(58241), JobId(58218), JobId(58217)] upstairs = 1
111832023-09-22T23:21:30.574ZINFOcrucible: RE:6 close id:58345 queued, notify DS upstairs = 1
111842023-09-22T23:21:30.574ZINFOcrucible: RE:6 Wait for result from close command 58345:57346 upstairs = 1
111852023-09-22T23:21:30.574ZINFOcrucible: [0] 58345 final dependency list [] = downstairs upstairs = 1
111862023-09-22T23:21:30.574ZINFOcrucible: [0] 58348 final dependency list [JobId(58345), JobId(58346), JobId(58347)] = downstairs upstairs = 1
111872023-09-22T23:21:30.575ZINFOcrucible: [0] 58349 final dependency list [JobId(58326), JobId(58302), JobId(58278), JobId(58254), JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111882023-09-22T23:21:30.575ZINFOcrucible: [0] 58350 final dependency list [JobId(58349), JobId(58326), JobId(58325), JobId(58302), JobId(58301), JobId(58278), JobId(58277), JobId(58254), JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111892023-09-22T23:21:30.576ZINFOcrucible: [0] 58351 final dependency list [JobId(58328), JobId(58304), JobId(58280), JobId(58256), JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
111902023-09-22T23:21:30.576ZINFOcrucible: [0] 58352 final dependency list [JobId(58351), JobId(58328), JobId(58327), JobId(58304), JobId(58303), JobId(58280), JobId(58279), JobId(58256), JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
111912023-09-22T23:21:30.577ZINFOcrucible: [0] 58353 final dependency list [JobId(58330), JobId(58306), JobId(58282), JobId(58258), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
111922023-09-22T23:21:30.578ZINFOcrucible: [0] 58354 final dependency list [JobId(58353), JobId(58330), JobId(58329), JobId(58306), JobId(58305), JobId(58282), JobId(58281), JobId(58258), JobId(58257), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
111932023-09-22T23:21:30.578ZINFOcrucible: [0] 58355 final dependency list [JobId(58332), JobId(58308), JobId(58284), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
111942023-09-22T23:21:30.579ZINFOcrucible: [0] 58356 final dependency list [JobId(58355), JobId(58332), JobId(58331), JobId(58308), JobId(58307), JobId(58284), JobId(58283), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
111952023-09-22T23:21:30.579ZINFOcrucible: [0] 58357 final dependency list [JobId(58334), JobId(58310), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
111962023-09-22T23:21:30.580ZINFOcrucible: [0] 58358 final dependency list [JobId(58357), JobId(58334), JobId(58333), JobId(58310), JobId(58309), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
111972023-09-22T23:21:30.581ZINFOcrucible: [0] 58359 final dependency list [JobId(58336), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
111982023-09-22T23:21:30.581ZINFOcrucible: [0] 58360 final dependency list [JobId(58359), JobId(58336), JobId(58335), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
111992023-09-22T23:21:30.582ZINFOcrucible: [0] 58361 final dependency list [JobId(58348), JobId(58345)] = downstairs upstairs = 1
112002023-09-22T23:21:30.582ZWARNcrucible: Write to Extent 6:0:9 under repair upstairs = 1
112012023-09-22T23:21:30.583ZINFOcrucible: [0] 58362 final dependency list [JobId(58361), JobId(58348), JobId(58345)] = downstairs upstairs = 1
112022023-09-22T23:21:30.586ZINFOcrucible: Repair for extent 6 s:2 d:[ClientId(0)] = downstairs upstairs = 1
112032023-09-22T23:21:30.586ZINFOcrucible: RE:6 Wait for result from repair command 58346:57347 upstairs = 1
112042023-09-22T23:21:30.586ZINFOcrucible: [0] 58346 final dependency list [JobId(58345)] = downstairs upstairs = 1
112052023-09-22T23:21:30.587ZINFOcrucible: RE:6 Wait for result from NoOp command 58347:57348 upstairs = 1
112062023-09-22T23:21:30.587ZINFOcrucible: [0] 58347 final dependency list [JobId(58345), JobId(58346)] = downstairs upstairs = 1
112072023-09-22T23:21:30.590ZINFOcrucible: RE:6 Wait for result from reopen command 58348:57349 upstairs = 1
112082023-09-22T23:21:30.590ZINFOcrucible: Start extent 7 repair task = repair upstairs = 1
112092023-09-22T23:21:30.590ZINFOcrucible: RE:7 repair extent with ids 58369,58370,58371,58372 deps:[JobId(58364), JobId(58363), JobId(58340), JobId(58339), JobId(58316), JobId(58315), JobId(58292), JobId(58291), JobId(58268), JobId(58267), JobId(58244), JobId(58243), JobId(58220), JobId(58219)] upstairs = 1
112102023-09-22T23:21:30.590ZINFOcrucible: RE:7 close id:58369 queued, notify DS upstairs = 1
112112023-09-22T23:21:30.590ZINFOcrucible: RE:7 Wait for result from close command 58369:57370 upstairs = 1
112122023-09-22T23:21:30.590ZINFOcrucible: [0] 58369 final dependency list [] = downstairs upstairs = 1
112132023-09-22T23:21:30.590ZINFOcrucible: [0] 58372 final dependency list [JobId(58369), JobId(58370), JobId(58371)] = downstairs upstairs = 1
112142023-09-22T23:21:30.591ZINFOcrucible: [0] 58373 final dependency list [JobId(58350), JobId(58326), JobId(58302), JobId(58278), JobId(58254), JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
112152023-09-22T23:21:30.591ZINFOcrucible: [0] 58374 final dependency list [JobId(58373), JobId(58350), JobId(58349), JobId(58326), JobId(58325), JobId(58302), JobId(58301), JobId(58278), JobId(58277), JobId(58254), JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
112162023-09-22T23:21:30.592ZINFOcrucible: [0] 58375 final dependency list [JobId(58352), JobId(58328), JobId(58304), JobId(58280), JobId(58256), JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
112172023-09-22T23:21:30.593ZINFOcrucible: [0] 58376 final dependency list [JobId(58375), JobId(58352), JobId(58351), JobId(58328), JobId(58327), JobId(58304), JobId(58303), JobId(58280), JobId(58279), JobId(58256), JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
112182023-09-22T23:21:30.593ZINFOcrucible: [0] 58377 final dependency list [JobId(58354), JobId(58330), JobId(58306), JobId(58282), JobId(58258), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
112192023-09-22T23:21:30.594ZINFOcrucible: [0] 58378 final dependency list [JobId(58377), JobId(58354), JobId(58353), JobId(58330), JobId(58329), JobId(58306), JobId(58305), JobId(58282), JobId(58281), JobId(58258), JobId(58257), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
112202023-09-22T23:21:30.595ZINFOcrucible: [0] 58379 final dependency list [JobId(58356), JobId(58332), JobId(58308), JobId(58284), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
112212023-09-22T23:21:30.595ZINFOcrucible: [0] 58380 final dependency list [JobId(58379), JobId(58356), JobId(58355), JobId(58332), JobId(58331), JobId(58308), JobId(58307), JobId(58284), JobId(58283), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
112222023-09-22T23:21:30.596ZINFOcrucible: [0] 58381 final dependency list [JobId(58358), JobId(58334), JobId(58310), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
112232023-09-22T23:21:30.596ZINFOcrucible: [0] 58382 final dependency list [JobId(58381), JobId(58358), JobId(58357), JobId(58334), JobId(58333), JobId(58310), JobId(58309), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
112242023-09-22T23:21:30.597ZINFOcrucible: [0] 58383 final dependency list [JobId(58360), JobId(58336), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
112252023-09-22T23:21:30.598ZINFOcrucible: [0] 58384 final dependency list [JobId(58383), JobId(58360), JobId(58359), JobId(58336), JobId(58335), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
112262023-09-22T23:21:30.598ZINFOcrucible: [0] 58385 final dependency list [JobId(58362), JobId(58348), JobId(58347), JobId(58346), JobId(58345)] = downstairs upstairs = 1
112272023-09-22T23:21:30.599ZINFOcrucible: [0] 58386 final dependency list [JobId(58385), JobId(58362), JobId(58361), JobId(58348), JobId(58347), JobId(58346), JobId(58345)] = downstairs upstairs = 1
112282023-09-22T23:21:30.600ZINFOcrucible: [0] 58387 final dependency list [JobId(58372), JobId(58369)] = downstairs upstairs = 1
112292023-09-22T23:21:30.600ZWARNcrucible: Write to Extent 7:0:9 under repair upstairs = 1
112302023-09-22T23:21:30.600ZINFOcrucible: [0] 58388 final dependency list [JobId(58387), JobId(58372), JobId(58369)] = downstairs upstairs = 1
112312023-09-22T23:21:30.603ZINFOcrucible: Repair for extent 7 s:2 d:[ClientId(0)] = downstairs upstairs = 1
112322023-09-22T23:21:30.603ZINFOcrucible: RE:7 Wait for result from repair command 58370:57371 upstairs = 1
112332023-09-22T23:21:30.603ZINFOcrucible: [0] 58370 final dependency list [JobId(58369)] = downstairs upstairs = 1
112342023-09-22T23:21:30.603ZINFOcrucible: RE:7 Wait for result from NoOp command 58371:57372 upstairs = 1
112352023-09-22T23:21:30.603ZINFOcrucible: [0] 58371 final dependency list [JobId(58369), JobId(58370)] = downstairs upstairs = 1
112362023-09-22T23:21:30.607ZINFOcrucible: RE:7 Wait for result from reopen command 58372:57373 upstairs = 1
112372023-09-22T23:21:30.607ZINFOcrucible: Start extent 8 repair task = repair upstairs = 1
112382023-09-22T23:21:30.607ZINFOcrucible: RE:8 repair extent with ids 58393,58394,58395,58396 deps:[JobId(58390), JobId(58389), JobId(58366), JobId(58365), JobId(58342), JobId(58341), JobId(58318), JobId(58317), JobId(58294), JobId(58293), JobId(58270), JobId(58269), JobId(58246), JobId(58245), JobId(58222), JobId(58221)] upstairs = 1
112392023-09-22T23:21:30.607ZINFOcrucible: RE:8 close id:58393 queued, notify DS upstairs = 1
112402023-09-22T23:21:30.607ZINFOcrucible: RE:8 Wait for result from close command 58393:57394 upstairs = 1
112412023-09-22T23:21:30.607ZINFOcrucible: [0] 58393 final dependency list [] = downstairs upstairs = 1
112422023-09-22T23:21:30.607ZINFOcrucible: [0] 58396 final dependency list [JobId(58393), JobId(58394), JobId(58395)] = downstairs upstairs = 1
112432023-09-22T23:21:30.608ZINFOcrucible: [0] 58397 final dependency list [JobId(58374), JobId(58350), JobId(58326), JobId(58302), JobId(58278), JobId(58254), JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
112442023-09-22T23:21:30.608ZINFOcrucible: [0] 58398 final dependency list [JobId(58397), JobId(58374), JobId(58373), JobId(58350), JobId(58349), JobId(58326), JobId(58325), JobId(58302), JobId(58301), JobId(58278), JobId(58277), JobId(58254), JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
112452023-09-22T23:21:30.609ZINFOcrucible: [0] 58399 final dependency list [JobId(58376), JobId(58352), JobId(58328), JobId(58304), JobId(58280), JobId(58256), JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
112462023-09-22T23:21:30.610ZINFOcrucible: [0] 58400 final dependency list [JobId(58399), JobId(58376), JobId(58375), JobId(58352), JobId(58351), JobId(58328), JobId(58327), JobId(58304), JobId(58303), JobId(58280), JobId(58279), JobId(58256), JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
112472023-09-22T23:21:30.610ZINFOcrucible: [0] 58401 final dependency list [JobId(58378), JobId(58354), JobId(58330), JobId(58306), JobId(58282), JobId(58258), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
112482023-09-22T23:21:30.611ZINFOcrucible: [0] 58402 final dependency list [JobId(58401), JobId(58378), JobId(58377), JobId(58354), JobId(58353), JobId(58330), JobId(58329), JobId(58306), JobId(58305), JobId(58282), JobId(58281), JobId(58258), JobId(58257), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
112492023-09-22T23:21:30.611ZINFOcrucible: [0] 58403 final dependency list [JobId(58380), JobId(58356), JobId(58332), JobId(58308), JobId(58284), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
112502023-09-22T23:21:30.612ZINFOcrucible: [0] 58404 final dependency list [JobId(58403), JobId(58380), JobId(58379), JobId(58356), JobId(58355), JobId(58332), JobId(58331), JobId(58308), JobId(58307), JobId(58284), JobId(58283), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
112512023-09-22T23:21:30.613ZINFOcrucible: [0] 58405 final dependency list [JobId(58382), JobId(58358), JobId(58334), JobId(58310), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
112522023-09-22T23:21:30.613ZINFOcrucible: [0] 58406 final dependency list [JobId(58405), JobId(58382), JobId(58381), JobId(58358), JobId(58357), JobId(58334), JobId(58333), JobId(58310), JobId(58309), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
112532023-09-22T23:21:30.614ZINFOcrucible: [0] 58407 final dependency list [JobId(58384), JobId(58360), JobId(58336), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
112542023-09-22T23:21:30.615ZINFOcrucible: [0] 58408 final dependency list [JobId(58407), JobId(58384), JobId(58383), JobId(58360), JobId(58359), JobId(58336), JobId(58335), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
112552023-09-22T23:21:30.615ZINFOcrucible: [0] 58409 final dependency list [JobId(58386), JobId(58362), JobId(58348), JobId(58347), JobId(58346), JobId(58345)] = downstairs upstairs = 1
112562023-09-22T23:21:30.616ZINFOcrucible: [0] 58410 final dependency list [JobId(58409), JobId(58386), JobId(58385), JobId(58362), JobId(58361), JobId(58348), JobId(58347), JobId(58346), JobId(58345)] = downstairs upstairs = 1
112572023-09-22T23:21:30.616ZINFOcrucible: [0] 58411 final dependency list [JobId(58388), JobId(58372), JobId(58371), JobId(58370), JobId(58369)] = downstairs upstairs = 1
112582023-09-22T23:21:30.617ZINFOcrucible: [0] 58412 final dependency list [JobId(58411), JobId(58388), JobId(58387), JobId(58372), JobId(58371), JobId(58370), JobId(58369)] = downstairs upstairs = 1
112592023-09-22T23:21:30.618ZINFOcrucible: [0] 58413 final dependency list [JobId(58396), JobId(58393)] = downstairs upstairs = 1
112602023-09-22T23:21:30.618ZWARNcrucible: Write to Extent 8:0:9 under repair upstairs = 1
112612023-09-22T23:21:30.618ZINFOcrucible: [0] 58414 final dependency list [JobId(58413), JobId(58396), JobId(58393)] = downstairs upstairs = 1
112622023-09-22T23:21:30.620ZINFOcrucible: Repair for extent 8 s:2 d:[ClientId(0)] = downstairs upstairs = 1
112632023-09-22T23:21:30.620ZINFOcrucible: RE:8 Wait for result from repair command 58394:57395 upstairs = 1
112642023-09-22T23:21:30.620ZINFOcrucible: [0] 58394 final dependency list [JobId(58393)] = downstairs upstairs = 1
112652023-09-22T23:21:30.621ZINFOcrucible: RE:8 Wait for result from NoOp command 58395:57396 upstairs = 1
112662023-09-22T23:21:30.621ZINFOcrucible: [0] 58395 final dependency list [JobId(58393), JobId(58394)] = downstairs upstairs = 1
112672023-09-22T23:21:30.624ZINFOcrucible: RE:8 Wait for result from reopen command 58396:57397 upstairs = 1
112682023-09-22T23:21:30.624ZINFOcrucible: Start extent 9 repair task = repair upstairs = 1
112692023-09-22T23:21:30.624ZINFOcrucible: RE:9 repair extent with ids 58417,58418,58419,58420 deps:[JobId(58416), JobId(58415), JobId(58392), JobId(58391), JobId(58368), JobId(58367), JobId(58344), JobId(58343), JobId(58320), JobId(58319), JobId(58296), JobId(58295), JobId(58272), JobId(58271), JobId(58248), JobId(58247), JobId(58224), JobId(58223)] upstairs = 1
112702023-09-22T23:21:30.624ZINFOcrucible: RE:9 close id:58417 queued, notify DS upstairs = 1
112712023-09-22T23:21:30.624ZINFOcrucible: RE:9 Wait for result from close command 58417:57418 upstairs = 1
112722023-09-22T23:21:30.624ZINFOcrucible: [0] 58417 final dependency list [] = downstairs upstairs = 1
112732023-09-22T23:21:30.624ZINFOcrucible: [0] 58420 final dependency list [JobId(58417), JobId(58418), JobId(58419)] = downstairs upstairs = 1
112742023-09-22T23:21:30.625ZINFOcrucible: [0] 58421 final dependency list [JobId(58398), JobId(58374), JobId(58350), JobId(58326), JobId(58302), JobId(58278), JobId(58254), JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
112752023-09-22T23:21:30.626ZINFOcrucible: [0] 58422 final dependency list [JobId(58421), JobId(58398), JobId(58397), JobId(58374), JobId(58373), JobId(58350), JobId(58349), JobId(58326), JobId(58325), JobId(58302), JobId(58301), JobId(58278), JobId(58277), JobId(58254), JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
112762023-09-22T23:21:30.626ZINFOcrucible: [0] 58423 final dependency list [JobId(58400), JobId(58376), JobId(58352), JobId(58328), JobId(58304), JobId(58280), JobId(58256), JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
112772023-09-22T23:21:30.627ZINFOcrucible: [0] 58424 final dependency list [JobId(58423), JobId(58400), JobId(58399), JobId(58376), JobId(58375), JobId(58352), JobId(58351), JobId(58328), JobId(58327), JobId(58304), JobId(58303), JobId(58280), JobId(58279), JobId(58256), JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
112782023-09-22T23:21:30.627ZINFOcrucible: [0] 58425 final dependency list [JobId(58402), JobId(58378), JobId(58354), JobId(58330), JobId(58306), JobId(58282), JobId(58258), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
112792023-09-22T23:21:30.628ZINFOcrucible: [0] 58426 final dependency list [JobId(58425), JobId(58402), JobId(58401), JobId(58378), JobId(58377), JobId(58354), JobId(58353), JobId(58330), JobId(58329), JobId(58306), JobId(58305), JobId(58282), JobId(58281), JobId(58258), JobId(58257), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
112802023-09-22T23:21:30.629ZINFOcrucible: [0] 58427 final dependency list [JobId(58404), JobId(58380), JobId(58356), JobId(58332), JobId(58308), JobId(58284), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
112812023-09-22T23:21:30.629ZINFOcrucible: [0] 58428 final dependency list [JobId(58427), JobId(58404), JobId(58403), JobId(58380), JobId(58379), JobId(58356), JobId(58355), JobId(58332), JobId(58331), JobId(58308), JobId(58307), JobId(58284), JobId(58283), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
112822023-09-22T23:21:30.630ZINFOcrucible: [0] 58429 final dependency list [JobId(58406), JobId(58382), JobId(58358), JobId(58334), JobId(58310), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
112832023-09-22T23:21:30.631ZINFOcrucible: [0] 58430 final dependency list [JobId(58429), JobId(58406), JobId(58405), JobId(58382), JobId(58381), JobId(58358), JobId(58357), JobId(58334), JobId(58333), JobId(58310), JobId(58309), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
112842023-09-22T23:21:30.631ZINFOcrucible: [0] 58431 final dependency list [JobId(58408), JobId(58384), JobId(58360), JobId(58336), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
112852023-09-22T23:21:30.632ZINFOcrucible: [0] 58432 final dependency list [JobId(58431), JobId(58408), JobId(58407), JobId(58384), JobId(58383), JobId(58360), JobId(58359), JobId(58336), JobId(58335), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
112862023-09-22T23:21:30.633ZINFOcrucible: [0] 58433 final dependency list [JobId(58410), JobId(58386), JobId(58362), JobId(58348), JobId(58347), JobId(58346), JobId(58345)] = downstairs upstairs = 1
112872023-09-22T23:21:30.633ZINFOcrucible: [0] 58434 final dependency list [JobId(58433), JobId(58410), JobId(58409), JobId(58386), JobId(58385), JobId(58362), JobId(58361), JobId(58348), JobId(58347), JobId(58346), JobId(58345)] = downstairs upstairs = 1
112882023-09-22T23:21:30.634ZINFOcrucible: [0] 58435 final dependency list [JobId(58412), JobId(58388), JobId(58372), JobId(58371), JobId(58370), JobId(58369)] = downstairs upstairs = 1
112892023-09-22T23:21:30.634ZINFOcrucible: [0] 58436 final dependency list [JobId(58435), JobId(58412), JobId(58411), JobId(58388), JobId(58387), JobId(58372), JobId(58371), JobId(58370), JobId(58369)] = downstairs upstairs = 1
112902023-09-22T23:21:30.635ZINFOcrucible: [0] 58437 final dependency list [JobId(58414), JobId(58396), JobId(58395), JobId(58394), JobId(58393)] = downstairs upstairs = 1
112912023-09-22T23:21:30.636ZINFOcrucible: [0] 58438 final dependency list [JobId(58437), JobId(58414), JobId(58413), JobId(58396), JobId(58395), JobId(58394), JobId(58393)] = downstairs upstairs = 1
112922023-09-22T23:21:30.636ZINFOcrucible: [0] 58439 final dependency list [JobId(58420), JobId(58417)] = downstairs upstairs = 1
112932023-09-22T23:21:30.637ZWARNcrucible: Write to Extent 9:0:9 under repair upstairs = 1
112942023-09-22T23:21:30.637ZINFOcrucible: [0] 58440 final dependency list [JobId(58439), JobId(58420), JobId(58417)] = downstairs upstairs = 1
112952023-09-22T23:21:30.638ZINFOcrucible: Repair for extent 9 s:2 d:[ClientId(0)] = downstairs upstairs = 1
112962023-09-22T23:21:30.638ZINFOcrucible: RE:9 Wait for result from repair command 58418:57419 upstairs = 1
112972023-09-22T23:21:30.638ZINFOcrucible: [0] 58418 final dependency list [JobId(58417)] = downstairs upstairs = 1
112982023-09-22T23:21:30.638ZINFOcrucible: RE:9 Wait for result from NoOp command 58419:57420 upstairs = 1
112992023-09-22T23:21:30.638ZINFOcrucible: [0] 58419 final dependency list [JobId(58417), JobId(58418)] = downstairs upstairs = 1
113002023-09-22T23:21:30.642ZINFOcrucible: RE:9 Wait for result from reopen command 58420:57421 upstairs = 1
113012023-09-22T23:21:30.642ZINFOcrucible: LiveRepair final flush submitted upstairs = 1
113022023-09-22T23:21:30.642ZINFOcrucible: [0] 58441 final dependency list [JobId(58440), JobId(58439), JobId(58438), JobId(58437), JobId(58436), JobId(58435), JobId(58434), JobId(58433), JobId(58432), JobId(58431), JobId(58430), JobId(58429), JobId(58428), JobId(58427), JobId(58426), JobId(58425), JobId(58424), JobId(58423), JobId(58422), JobId(58421), JobId(58420), JobId(58419), JobId(58418), JobId(58417), JobId(58414), JobId(58413), JobId(58412), JobId(58411), JobId(58410), JobId(58409), JobId(58408), JobId(58407), JobId(58406), JobId(58405), JobId(58404), JobId(58403), JobId(58402), JobId(58401), JobId(58400), JobId(58399), JobId(58398), JobId(58397), JobId(58396), JobId(58395), JobId(58394), JobId(58393), JobId(58388), JobId(58387), JobId(58386), JobId(58385), JobId(58384), JobId(58383), JobId(58382), JobId(58381), JobId(58380), JobId(58379), JobId(58378), JobId(58377), JobId(58376), JobId(58375), JobId(58374), JobId(58373), JobId(58372), JobId(58371), JobId(58370), JobId(58369), JobId(58362), JobId(58361), JobId(58360), JobId(58359), JobId(58358), JobId(58357), JobId(58356), JobId(58355), JobId(58354), JobId(58353), JobId(58352), JobId(58351), JobId(58350), JobId(58349), JobId(58348), JobId(58347), JobId(58346), JobId(58345), JobId(58336), JobId(58335), JobId(58334), JobId(58333), JobId(58332), JobId(58331), JobId(58330), JobId(58329), JobId(58328), JobId(58327), JobId(58326), JobId(58325), JobId(58324), JobId(58323), JobId(58322), JobId(58321), JobId(58310), JobId(58309), JobId(58308), JobId(58307), JobId(58306), JobId(58305), JobId(58304), JobId(58303), JobId(58302), JobId(58301), JobId(58300), JobId(58299), JobId(58298), JobId(58297), JobId(58284), JobId(58283), JobId(58282), JobId(58281), JobId(58280), JobId(58279), JobId(58278), JobId(58277), JobId(58276), JobId(58275), JobId(58274), JobId(58273), JobId(58258), JobId(58257), JobId(58256), JobId(58255), JobId(58254), JobId(58253), JobId(58252), JobId(58251), JobId(58250), JobId(58249), JobId(58232), JobId(58231), JobId(58230), JobId(58229), JobId(58228), JobId(58227), JobId(58226), JobId(58225), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
113032023-09-22T23:21:30.643ZINFOcrucible: LiveRepair final flush completed upstairs = 1
113042023-09-22T23:21:30.643ZINFOcrucible: [0] 85682a66-27fd-403c-bf22-d04b4ff66805 (5d828379-bf79-455f-9270-e82849f9ecba) LiveRepair Active Active ds_transition to Active upstairs = 1
113052023-09-22T23:21:30.643ZINFOcrucible: [0] Transition from LiveRepair to Active upstairs = 1
113062023-09-22T23:21:30.643ZWARNcrucible: Live Repair returns Ok(()) upstairs = 1
11307 test dummy_downstairs_tests::protocol_test::test_successful_live_repair ... ok
113082023-09-22T23:21:31.926ZINFOcrucible: sent read response for job 0 = 1000
113092023-09-22T23:21:31.926ZINFOcrucible: sent read response for job 1 = 1001
113102023-09-22T23:21:31.926ZINFOcrucible: responded to ping downstairs = 1
113112023-09-22T23:21:31.926ZERROcrucible: [0] Dropping job 1000, this downstairs is faulted upstairs = 1
113122023-09-22T23:21:31.926ZERROcrucible: process_ds_operation error: No longer active upstairs = 1
113132023-09-22T23:21:31.926ZWARNcrucible: [0] will exit pm_task, this downstairs Faulted upstairs = 1
113142023-09-22T23:21:31.927ZERROcrucible: 127.0.0.1:36984: proc: [0] client work task ended, Ok(Err([0] This downstairs now in Faulted)), so we end too looper = 0 upstairs = 1
113152023-09-22T23:21:31.927ZINFOcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa Gone missing, transition from Faulted to Faulted upstairs = 1
113162023-09-22T23:21:31.927ZINFOcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa connection to 127.0.0.1:36984 closed looper = 0 upstairs = 1
113172023-09-22T23:21:31.927ZINFOcrucible: [0] 127.0.0.1:36984 task reports connection:false upstairs = 1
113182023-09-22T23:21:31.927ZINFOcrucible: 28a12560-c07c-41d3-8d35-7e06b0c337aa Faulted Active Active upstairs = 1
113192023-09-22T23:21:31.927ZINFOcrucible: [0] 127.0.0.1:36984 task reports offline upstairs = 1
113202023-09-22T23:21:31.927ZERROcrucible: spawn_message_receiver saw disconnect, bailing downstairs = 1
113212023-09-22T23:21:31.927ZINFOcrucible: sent read response for job 2 = 1002
113222023-09-22T23:21:31.927ZERROcrucible: could not send read response for job 3 = 1003: Broken pipe (os error 32)
113232023-09-22T23:21:32.672ZINFOcrucible: responded to ping downstairs = 2
113242023-09-22T23:21:32.672ZINFOcrucible: responded to ping downstairs = 3
113252023-09-22T23:21:32.927ZINFOcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa looper connected looper = 0 upstairs = 1
113262023-09-22T23:21:32.927ZINFOcrucible: [0] Proc runs for 127.0.0.1:36984 in state Faulted upstairs = 1
113272023-09-22T23:21:32.927ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: 28a12560-c07c-41d3-8d35-7e06b0c337aa, session_id: 7f1ba66d-4c7a-4444-a654-4b5bfde1244a, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 1
113282023-09-22T23:21:32.928ZINFOcrucible: [0] upstairs guest_io_ready=TRUE, promote! session 7f1ba66d-4c7a-4444-a654-4b5bfde1244a upstairs = 1
113292023-09-22T23:21:32.928ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: 28a12560-c07c-41d3-8d35-7e06b0c337aa, session_id: 7f1ba66d-4c7a-4444-a654-4b5bfde1244a, gen: 1 } downstairs = 1
113302023-09-22T23:21:32.928ZINFOcrucible: negotiate packet RegionInfoPlease downstairs = 1
113312023-09-22T23:21:32.928ZINFOcrucible: [0] downstairs client at 127.0.0.1:36984 has UUID 8dfe8526-b15f-436d-8c37-dad574df0099 upstairs = 1
113322023-09-22T23:21:32.928ZINFOcrucible: [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 8dfe8526-b15f-436d-8c37-dad574df0099, encrypted: false, database_read_version: 1, database_write_version: 1 } upstairs = 1
113332023-09-22T23:21:32.928ZINFOcrucible: Returning client:0 UUID:8dfe8526-b15f-436d-8c37-dad574df0099 matches upstairs = 1
113342023-09-22T23:21:32.928ZINFOcrucible: 28a12560-c07c-41d3-8d35-7e06b0c337aa Faulted Active Active upstairs = 1
113352023-09-22T23:21:32.928ZINFOcrucible: negotiate packet ExtentVersionsPlease downstairs = 1
113362023-09-22T23:21:32.928ZINFOcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa (7f1ba66d-4c7a-4444-a654-4b5bfde1244a) Faulted Active Active ds_transition to LiveRepairReady upstairs = 1
113372023-09-22T23:21:32.928ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady upstairs = 1
113382023-09-22T23:21:32.928ZWARNcrucible: [0] new RM replaced this: Some(RegionMetadata { generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], dirty: [false, false, false, false, false, false, false, false, false, false] }) upstairs = 1
113392023-09-22T23:21:32.928ZWARNcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa Enter Ready for LiveRepair mode upstairs = 1
113402023-09-22T23:21:32.928ZINFOcrucible: [0] Starts cmd_loop upstairs = 1
113412023-09-22T23:21:32.928ZINFOcrucible: [0] 127.0.0.1:36984 task reports connection:true upstairs = 1
113422023-09-22T23:21:32.928ZINFOcrucible: 28a12560-c07c-41d3-8d35-7e06b0c337aa LiveRepairReady Active Active upstairs = 1
113432023-09-22T23:21:32.928ZINFOcrucible: Set check for repair upstairs = 1
113442023-09-22T23:21:33.930ZINFOcrucible: Checking if live repair is needed upstairs = 1
113452023-09-22T23:21:33.930ZINFOcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa (7f1ba66d-4c7a-4444-a654-4b5bfde1244a) LiveRepairReady Active Active ds_transition to LiveRepair upstairs = 1
113462023-09-22T23:21:33.930ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair upstairs = 1
113472023-09-22T23:21:33.930ZINFOcrucible: Live Repair started upstairs = 1
113482023-09-22T23:21:33.930ZWARNcrucible: Live Repair main task begins. task = repair upstairs = 1
113492023-09-22T23:21:33.930ZINFOcrucible: Start Live Repair of extents 0 to 10 task = repair upstairs = 1
113502023-09-22T23:21:33.930ZINFOcrucible: Start extent 0 repair task = repair upstairs = 1
113512023-09-22T23:21:33.930ZINFOcrucible: RE:0 repair extent with ids 58201,58202,58203,58204 deps:[] upstairs = 1
113522023-09-22T23:21:33.930ZINFOcrucible: RE:0 close id:58201 queued, notify DS upstairs = 1
113532023-09-22T23:21:33.930ZINFOcrucible: RE:0 Wait for result from close command 58201:57202 upstairs = 1
113542023-09-22T23:21:33.930ZINFOcrucible: [0] 58201 final dependency list [] = downstairs upstairs = 1
113552023-09-22T23:21:33.930ZINFOcrucible: [0] 58204 final dependency list [JobId(58201), JobId(58202), JobId(58203)] = downstairs upstairs = 1
113562023-09-22T23:21:33.931ZINFOcrucible: Repair for extent 0 s:2 d:[ClientId(0)] = downstairs upstairs = 1
113572023-09-22T23:21:33.931ZINFOcrucible: RE:0 Wait for result from repair command 58202:57203 upstairs = 1
113582023-09-22T23:21:33.931ZINFOcrucible: [0] 58202 final dependency list [JobId(58201)] = downstairs upstairs = 1
113592023-09-22T23:21:33.931ZERROcrucible: dropping ds1 now!
113602023-09-22T23:21:33.931ZERROcrucible: reconnecting ds1 now!
113612023-09-22T23:21:33.931ZERROcrucible: [0] job id 58202 saw error GenericError("bad news, networks are tricky") upstairs = 1
113622023-09-22T23:21:33.932ZERROcrucible: [0] DS Reports error Err(GenericError("bad news, networks are tricky")) on job 58202, DownstairsIO { ds_id: JobId(58202), guest_id: 57203, work: ExtentLiveRepair { dependencies: [JobId(58201)], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:45462, repair_downstairs: [ClientId(0)] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 9 }) } = downstairs upstairs = 1
113632023-09-22T23:21:33.932ZERROcrucible: [0] Reports error GenericError("bad news, networks are tricky") on job 58202, DownstairsIO { ds_id: JobId(58202), guest_id: 57203, work: ExtentLiveRepair { dependencies: [JobId(58201)], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:45462, repair_downstairs: [ClientId(0)] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 9 }) } = downstairs upstairs = 1
113642023-09-22T23:21:33.932ZINFOcrucible: [0] client skip 3 in process jobs because fault = downstairs upstairs = 1
113652023-09-22T23:21:33.932ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs upstairs = 1
113662023-09-22T23:21:33.932ZINFOcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa (7f1ba66d-4c7a-4444-a654-4b5bfde1244a) LiveRepair Active Active ds_transition to Faulted upstairs = 1
113672023-09-22T23:21:33.932ZINFOcrucible: [0] Transition from LiveRepair to Faulted upstairs = 1
113682023-09-22T23:21:33.932ZWARNcrucible: [0] will exit pm_task, this downstairs Faulted upstairs = 1
113692023-09-22T23:21:33.932ZERROcrucible: 127.0.0.1:36984: proc: [0] client work task ended, Ok(Err([0] This downstairs now in Faulted)), so we end too looper = 0 upstairs = 1
113702023-09-22T23:21:33.932ZINFOcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa Gone missing, transition from Faulted to Faulted upstairs = 1
113712023-09-22T23:21:33.932ZINFOcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa connection to 127.0.0.1:36984 closed looper = 0 upstairs = 1
113722023-09-22T23:21:33.932ZINFOcrucible: [0] 127.0.0.1:36984 task reports connection:false upstairs = 1
113732023-09-22T23:21:33.932ZINFOcrucible: 28a12560-c07c-41d3-8d35-7e06b0c337aa Faulted Active Active upstairs = 1
113742023-09-22T23:21:33.932ZINFOcrucible: [0] 127.0.0.1:36984 task reports offline upstairs = 1
113752023-09-22T23:21:33.932ZERROcrucible: Extent 0 close id:58202 Failed: IO Error: 1 out of 3 downstairs failed to complete this IO upstairs = 1
113762023-09-22T23:21:33.932ZINFOcrucible: RE:0 Wait for result from NoOp command 58203:57204 upstairs = 1
113772023-09-22T23:21:33.932ZERROcrucible: spawn_message_receiver saw disconnect, bailing downstairs = 1
113782023-09-22T23:21:34.933ZERROcrucible: ds1 negotiate start now!
113792023-09-22T23:21:34.933ZINFOcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa looper connected looper = 0 upstairs = 1
113802023-09-22T23:21:34.933ZINFOcrucible: [0] Proc runs for 127.0.0.1:36984 in state Faulted upstairs = 1
113812023-09-22T23:21:34.933ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: 28a12560-c07c-41d3-8d35-7e06b0c337aa, session_id: 7f1ba66d-4c7a-4444-a654-4b5bfde1244a, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 1
113822023-09-22T23:21:34.933ZINFOcrucible: [0] upstairs guest_io_ready=TRUE, promote! session 7f1ba66d-4c7a-4444-a654-4b5bfde1244a upstairs = 1
113832023-09-22T23:21:34.933ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: 28a12560-c07c-41d3-8d35-7e06b0c337aa, session_id: 7f1ba66d-4c7a-4444-a654-4b5bfde1244a, gen: 1 } downstairs = 1
113842023-09-22T23:21:34.933ZINFOcrucible: negotiate packet RegionInfoPlease downstairs = 1
113852023-09-22T23:21:34.933ZERROcrucible: ds1 negotiate extent versions please now!
113862023-09-22T23:21:34.934ZINFOcrucible: [0] downstairs client at 127.0.0.1:36984 has UUID 8dfe8526-b15f-436d-8c37-dad574df0099 upstairs = 1
113872023-09-22T23:21:34.934ZINFOcrucible: [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 8dfe8526-b15f-436d-8c37-dad574df0099, encrypted: false, database_read_version: 1, database_write_version: 1 } upstairs = 1
113882023-09-22T23:21:34.934ZINFOcrucible: Returning client:0 UUID:8dfe8526-b15f-436d-8c37-dad574df0099 matches upstairs = 1
113892023-09-22T23:21:34.934ZINFOcrucible: 28a12560-c07c-41d3-8d35-7e06b0c337aa Faulted Active Active upstairs = 1
113902023-09-22T23:21:34.934ZINFOcrucible: negotiate packet ExtentVersionsPlease downstairs = 1
113912023-09-22T23:21:34.934ZERROcrucible: ds1 spawn message receiver now!
113922023-09-22T23:21:34.934ZINFOcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa (7f1ba66d-4c7a-4444-a654-4b5bfde1244a) Faulted Active Active ds_transition to LiveRepairReady upstairs = 1
113932023-09-22T23:21:34.934ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady upstairs = 1
113942023-09-22T23:21:34.934ZWARNcrucible: [0] new RM replaced this: Some(RegionMetadata { generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], dirty: [false, false, false, false, false, false, false, false, false, false] }) upstairs = 1
113952023-09-22T23:21:34.934ZWARNcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa Enter Ready for LiveRepair mode upstairs = 1
113962023-09-22T23:21:34.934ZINFOcrucible: [0] Starts cmd_loop upstairs = 1
113972023-09-22T23:21:34.934ZINFOcrucible: [0] 127.0.0.1:36984 task reports connection:true upstairs = 1
113982023-09-22T23:21:34.934ZINFOcrucible: 28a12560-c07c-41d3-8d35-7e06b0c337aa LiveRepairReady Active Active upstairs = 1
113992023-09-22T23:21:34.934ZINFOcrucible: Set check for repair upstairs = 1
114002023-09-22T23:21:34.934ZINFOcrucible: RE:0 Wait for result from reopen command 58204:57205 upstairs = 1
114012023-09-22T23:21:34.934ZWARNcrucible: RE:0 Bailing with error upstairs = 1
114022023-09-22T23:21:34.934ZWARNcrucible: Error After extent 0 repair task = repair upstairs = 1
114032023-09-22T23:21:34.934ZINFOcrucible: Start extent 1 repair task = repair upstairs = 1
114042023-09-22T23:21:34.934ZINFOcrucible: extent 1 repair has failed task = repair upstairs = 1
114052023-09-22T23:21:34.934ZWARNcrucible: Exit repair at extent 1 task = repair upstairs = 1
114062023-09-22T23:21:34.935ZINFOcrucible: LiveRepair final flush submitted upstairs = 1
114072023-09-22T23:21:34.936ZINFOcrucible: LiveRepair final flush completed upstairs = 1
114082023-09-22T23:21:34.936ZWARNcrucible: Live Repair returns Ok(()) upstairs = 1
114092023-09-22T23:21:35.935ZINFOcrucible: Checking if live repair is needed upstairs = 1
114102023-09-22T23:21:35.935ZINFOcrucible: [0] 28a12560-c07c-41d3-8d35-7e06b0c337aa (7f1ba66d-4c7a-4444-a654-4b5bfde1244a) LiveRepairReady Active Active ds_transition to LiveRepair upstairs = 1
114112023-09-22T23:21:35.935ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair upstairs = 1
114122023-09-22T23:21:35.935ZINFOcrucible: Live Repair started upstairs = 1
114132023-09-22T23:21:35.935ZWARNcrucible: Live Repair main task begins. task = repair upstairs = 1
114142023-09-22T23:21:35.935ZINFOcrucible: Start Live Repair of extents 0 to 10 task = repair upstairs = 1
114152023-09-22T23:21:35.935ZINFOcrucible: Start extent 0 repair task = repair upstairs = 1
114162023-09-22T23:21:35.935ZINFOcrucible: RE:0 repair extent with ids 58206,58207,58208,58209 deps:[] upstairs = 1
114172023-09-22T23:21:35.935ZINFOcrucible: RE:0 close id:58206 queued, notify DS upstairs = 1
114182023-09-22T23:21:35.935ZINFOcrucible: RE:0 Wait for result from close command 58206:57207 upstairs = 1
114192023-09-22T23:21:35.935ZINFOcrucible: [0] 58206 final dependency list [] = downstairs upstairs = 1
114202023-09-22T23:21:35.935ZINFOcrucible: [0] 58209 final dependency list [JobId(58206), JobId(58207), JobId(58208)] = downstairs upstairs = 1
11421 test dummy_downstairs_tests::protocol_test::test_error_during_live_repair_no_halt ... ok
11422 
11423 test result: ok. 351 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 397.39s
11424 
11425 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_agent-acbf118b39db417b --nocapture`
11426 
11427 running 11 tests
11428 {"id":"abc","state":"requested","block_size":4096,"extent_size":4096,"extent_count":100,"encrypted":false,"port_number":1701,"cert_pem":null,"key_pem":null,"root_pem":null}
11429 Sep 22 23:21:35.992 INFO region f7100c32-c286-432a-9d07-f9e1b092608d state: Requested, component: test_harness_datafile
11430 Sep 22 23:21:35.992 INFO region 064d55d6-7fae-45ce-a14e-8a514525e144 state: Requested, component: test_harness_datafile
11431 Sep 22 23:21:35.992 INFO region d7a15ef3-a107-4974-8fb9-03e5d5353ced state: Requested, component: test_harness_datafile
11432 Sep 22 23:21:35.992 INFO region ea20e721-32d0-4fd5-938b-1abf7dd2728d state: Requested, component: test_harness_datafile
11433 Sep 22 23:21:35.992 INFO region fb7a6512-7a38-4a4d-9682-5d11d0d740e8 state: Requested, component: test_harness_datafile
11434 Sep 22 23:21:35.995 INFO region f7100c32-c286-432a-9d07-f9e1b092608d state: Requested -> Created, component: test_harness_datafile
11435 Sep 22 23:21:35.995 INFO region ea20e721-32d0-4fd5-938b-1abf7dd2728d state: Requested -> Created, component: test_harness_datafile
11436 Sep 22 23:21:35.995 INFO region fb7a6512-7a38-4a4d-9682-5d11d0d740e8 state: Requested -> Tombstoned, component: test_harness_datafile
11437 test model::test::basic ... ok
11438 test test::test_collect_behaviour ... ok
11439 Sep 22 23:21:35.995 INFO region 064d55d6-7fae-45ce-a14e-8a514525e144 state: Requested -> Tombstoned, component: test_harness_datafile
11440 Sep 22 23:21:35.995 INFO region d7a15ef3-a107-4974-8fb9-03e5d5353ced state: Requested -> Created, component: test_harness_datafile
11441 Sep 22 23:21:35.996 INFO region dcfce67e-c4ff-48cd-9963-6b8373ecd4b6 state: Requested, component: test_harness_datafile
11442 Sep 22 23:21:35.996 INFO creating missing downstairs instance downstairs-d7a15ef3-a107-4974-8fb9-03e5d5353ced
11443 Sep 22 23:21:35.996 INFO creating missing downstairs instance downstairs-f7100c32-c286-432a-9d07-f9e1b092608d
11444 Sep 22 23:21:35.996 INFO creating missing downstairs instance downstairs-ea20e721-32d0-4fd5-938b-1abf7dd2728d
11445 Sep 22 23:21:35.996 INFO region dcfce67e-c4ff-48cd-9963-6b8373ecd4b6 state: Requested -> Failed, component: test_harness_datafile
11446 Sep 22 23:21:35.996 INFO region 5a285992-c4f9-4c6c-8322-87d73bb5c18f state: Requested, component: test_harness_datafile
11447 Sep 22 23:21:35.996 INFO ok, have oxide/crucible/downstairs:downstairs-ea20e721-32d0-4fd5-938b-1abf7dd2728d
11448 Sep 22 23:21:35.996 INFO region 5a285992-c4f9-4c6c-8322-87d73bb5c18f state: Requested -> Created, component: test_harness_datafile
11449 test datafile::test::test_stat_parsing ... ok
11450 Sep 22 23:21:35.996 INFO ok, have oxide/crucible/downstairs:downstairs-f7100c32-c286-432a-9d07-f9e1b092608d
11451 Sep 22 23:21:35.996 INFO ok, have oxide/crucible/downstairs:downstairs-d7a15ef3-a107-4974-8fb9-03e5d5353ced
11452 Sep 22 23:21:35.996 INFO reconfig required, no property group
11453 Sep 22 23:21:35.996 INFO reconfig required, no property group
11454 Sep 22 23:21:35.996 INFO reconfig required, no property group
11455 test test::test_smf_datafile_race_region ... ok
11456 test test::test_smf_region_failed ... ok
11457 Sep 22 23:21:35.996 INFO creating config property group
11458 Sep 22 23:21:35.996 INFO creating config property group
11459 Sep 22 23:21:35.996 INFO creating config property group
11460 test test::test_smf_region_create_then_destroy ... ok
11461 Sep 22 23:21:35.996 INFO reconfiguring oxide/crucible/downstairs:downstairs-d7a15ef3-a107-4974-8fb9-03e5d5353ced
11462 Sep 22 23:21:35.996 INFO reconfiguring oxide/crucible/downstairs:downstairs-f7100c32-c286-432a-9d07-f9e1b092608d
11463 Sep 22 23:21:35.996 INFO reconfiguring oxide/crucible/downstairs:downstairs-ea20e721-32d0-4fd5-938b-1abf7dd2728d
11464 Sep 22 23:21:35.996 INFO creating missing downstairs instance downstairs-5a285992-c4f9-4c6c-8322-87d73bb5c18f
11465 Sep 22 23:21:35.996 INFO ok, have oxide/crucible/downstairs:downstairs-5a285992-c4f9-4c6c-8322-87d73bb5c18f
11466 Sep 22 23:21:35.996 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmpykMspN/regions/f7100c32-c286-432a-9d07-f9e1b092608d
11467 Sep 22 23:21:35.996 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmpk5bUdb/regions/d7a15ef3-a107-4974-8fb9-03e5d5353ced
11468 Sep 22 23:21:35.996 INFO reconfig required, no property group
11469 Sep 22 23:21:35.996 INFO creating config property group
11470 Sep 22 23:21:35.996 INFO ensure port SCF_TYPE_COUNT 1000
11471 Sep 22 23:21:35.996 INFO ensure port SCF_TYPE_COUNT 1000
11472 Sep 22 23:21:35.996 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmp5GQNwb/regions/ea20e721-32d0-4fd5-938b-1abf7dd2728d
11473 Sep 22 23:21:35.996 INFO reconfiguring oxide/crucible/downstairs:downstairs-5a285992-c4f9-4c6c-8322-87d73bb5c18f
11474 Sep 22 23:21:35.996 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11475 Sep 22 23:21:35.996 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11476 Sep 22 23:21:35.996 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmp1EK8D2/regions/5a285992-c4f9-4c6c-8322-87d73bb5c18f
11477 Sep 22 23:21:35.996 INFO commit
11478 Sep 22 23:21:35.996 INFO ensure port SCF_TYPE_COUNT 1000
11479 Sep 22 23:21:35.996 INFO commit
11480 Sep 22 23:21:35.996 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11481 Sep 22 23:21:35.996 INFO ok!
11482 Sep 22 23:21:35.996 INFO commit
11483 Sep 22 23:21:35.996 INFO ensure port SCF_TYPE_COUNT 1000
11484 Sep 22 23:21:35.996 INFO ok!
11485 Sep 22 23:21:35.996 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11486 Sep 22 23:21:35.996 INFO ok!
11487 Sep 22 23:21:35.996 INFO commit
11488 Sep 22 23:21:35.996 INFO region d7a15ef3-a107-4974-8fb9-03e5d5353ced state: Created -> Tombstoned, component: test_harness_datafile
11489 Sep 22 23:21:35.996 INFO ok!
11490 Sep 22 23:21:35.997 INFO creating missing downstairs instance downstairs-ea20e721-32d0-4fd5-938b-1abf7dd2728d
11491 Sep 22 23:21:35.997 INFO ok, have oxide/crucible/downstairs:downstairs-ea20e721-32d0-4fd5-938b-1abf7dd2728d
11492 Sep 22 23:21:35.997 INFO creating region 5a285992-c4f9-4c6c-8322-87d73bb5c18f snapshot 789d4889-da87-4e0a-b406-52cd4a361df2, component: test_snapshot_interface
11493 Sep 22 23:21:35.997 INFO creating region f7100c32-c286-432a-9d07-f9e1b092608d snapshot a285abac-db97-457e-987b-06a4be752bd5, component: test_snapshot_interface
11494 Sep 22 23:21:35.997 INFO reconfig required, no property group
11495 Sep 22 23:21:35.997 INFO creating config property group
11496 Sep 22 23:21:35.997 INFO reconfiguring oxide/crucible/downstairs:downstairs-ea20e721-32d0-4fd5-938b-1abf7dd2728d
11497 Sep 22 23:21:35.997 INFO creating region 5a285992-c4f9-4c6c-8322-87d73bb5c18f snapshot 789d4889-da87-4e0a-b406-52cd4a361df2 dir "/tmp/.tmp1EK8D2/regions/5a285992-c4f9-4c6c-8322-87d73bb5c18f/.zfs/snapshot/789d4889-da87-4e0a-b406-52cd4a361df2", component: test_snapshot_interface
11498 Sep 22 23:21:35.997 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmp5GQNwb/regions/ea20e721-32d0-4fd5-938b-1abf7dd2728d
11499 Sep 22 23:21:35.997 INFO creating region f7100c32-c286-432a-9d07-f9e1b092608d snapshot a285abac-db97-457e-987b-06a4be752bd5 dir "/tmp/.tmpykMspN/regions/f7100c32-c286-432a-9d07-f9e1b092608d/.zfs/snapshot/a285abac-db97-457e-987b-06a4be752bd5", component: test_snapshot_interface
11500 Sep 22 23:21:35.997 INFO disabling downstairs instance: downstairs-d7a15ef3-a107-4974-8fb9-03e5d5353ced (instance states: (Some(Online), None))
11501 Sep 22 23:21:35.997 INFO ensure port SCF_TYPE_COUNT 1000
11502 Sep 22 23:21:35.997 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11503 Sep 22 23:21:35.997 INFO commit
11504 Sep 22 23:21:35.997 INFO ok!
11505 Sep 22 23:21:35.997 INFO requesting running snapshot 5a285992-c4f9-4c6c-8322-87d73bb5c18f-789d4889-da87-4e0a-b406-52cd4a361df2 state: Requested, component: test_harness_datafile
11506 Sep 22 23:21:35.997 INFO requesting running snapshot f7100c32-c286-432a-9d07-f9e1b092608d-a285abac-db97-457e-987b-06a4be752bd5 state: Requested, component: test_harness_datafile
11507 test test::test_smf_region ... ok
11508 Sep 22 23:21:35.997 DEBG found expected downstairs instance: downstairs-5a285992-c4f9-4c6c-8322-87d73bb5c18f
11509 Sep 22 23:21:35.997 DEBG found expected downstairs instance: downstairs-f7100c32-c286-432a-9d07-f9e1b092608d
11510 test test::test_smf_region_bounce_idempotent ... ok
11511 Sep 22 23:21:35.997 DEBG do not need to reconfigure oxide/crucible/downstairs:downstairs-5a285992-c4f9-4c6c-8322-87d73bb5c18f
11512 Sep 22 23:21:35.997 DEBG do not need to reconfigure oxide/crucible/downstairs:downstairs-f7100c32-c286-432a-9d07-f9e1b092608d
11513 Sep 22 23:21:35.997 INFO creating missing snapshot instance snapshot-5a285992-c4f9-4c6c-8322-87d73bb5c18f-789d4889-da87-4e0a-b406-52cd4a361df2
11514 Sep 22 23:21:35.997 INFO creating missing snapshot instance snapshot-f7100c32-c286-432a-9d07-f9e1b092608d-a285abac-db97-457e-987b-06a4be752bd5
11515 Sep 22 23:21:35.997 INFO ok, have oxide/crucible/downstairs:snapshot-5a285992-c4f9-4c6c-8322-87d73bb5c18f-789d4889-da87-4e0a-b406-52cd4a361df2
11516 Sep 22 23:21:35.997 INFO ok, have oxide/crucible/downstairs:snapshot-f7100c32-c286-432a-9d07-f9e1b092608d-a285abac-db97-457e-987b-06a4be752bd5
11517 Sep 22 23:21:35.997 INFO reconfig required, no property group
11518 Sep 22 23:21:35.997 INFO reconfig required, no property group
11519 Sep 22 23:21:35.997 INFO reconfiguring oxide/crucible/downstairs:snapshot-5a285992-c4f9-4c6c-8322-87d73bb5c18f-789d4889-da87-4e0a-b406-52cd4a361df2
11520 Sep 22 23:21:35.997 INFO reconfiguring oxide/crucible/downstairs:snapshot-f7100c32-c286-432a-9d07-f9e1b092608d-a285abac-db97-457e-987b-06a4be752bd5
11521 Sep 22 23:21:35.997 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmp1EK8D2/regions/5a285992-c4f9-4c6c-8322-87d73bb5c18f/.zfs/snapshot/789d4889-da87-4e0a-b406-52cd4a361df2
11522 Sep 22 23:21:35.997 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmpykMspN/regions/f7100c32-c286-432a-9d07-f9e1b092608d/.zfs/snapshot/a285abac-db97-457e-987b-06a4be752bd5
11523 Sep 22 23:21:35.997 INFO ensure port SCF_TYPE_COUNT 1001
11524 Sep 22 23:21:35.997 INFO ensure port SCF_TYPE_COUNT 1001
11525 Sep 22 23:21:35.997 INFO ensure mode SCF_TYPE_ASTRING ro
11526 Sep 22 23:21:35.997 INFO ensure mode SCF_TYPE_ASTRING ro
11527 Sep 22 23:21:35.997 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11528 Sep 22 23:21:35.997 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11529 Sep 22 23:21:35.997 INFO commit
11530 Sep 22 23:21:35.997 INFO commit
11531 Sep 22 23:21:35.997 INFO ok!
11532 Sep 22 23:21:35.997 INFO ok!
11533 Sep 22 23:21:35.997 INFO region 5a285992-c4f9-4c6c-8322-87d73bb5c18f running snapshot 789d4889-da87-4e0a-b406-52cd4a361df2 state: Requested -> Created, component: test_harness_datafile
11534 Sep 22 23:21:35.997 INFO removing running snapshot f7100c32-c286-432a-9d07-f9e1b092608d-a285abac-db97-457e-987b-06a4be752bd5, component: test_harness_datafile
11535 Sep 22 23:21:35.998 INFO creating missing downstairs instance downstairs-5a285992-c4f9-4c6c-8322-87d73bb5c18f
11536 Sep 22 23:21:35.998 INFO ok, have oxide/crucible/downstairs:downstairs-5a285992-c4f9-4c6c-8322-87d73bb5c18f
11537 Sep 22 23:21:35.998 DEBG found expected downstairs instance: downstairs-f7100c32-c286-432a-9d07-f9e1b092608d
11538 Sep 22 23:21:35.998 INFO disabling snapshot instance: snapshot-f7100c32-c286-432a-9d07-f9e1b092608d-a285abac-db97-457e-987b-06a4be752bd5 (instance states: (Some(Online), None))
11539 Sep 22 23:21:35.998 INFO reconfig required, no property group
11540 Sep 22 23:21:35.998 INFO creating config property group
11541 Sep 22 23:21:35.998 INFO reconfiguring oxide/crucible/downstairs:downstairs-5a285992-c4f9-4c6c-8322-87d73bb5c18f
11542 Sep 22 23:21:35.998 DEBG do not need to reconfigure oxide/crucible/downstairs:downstairs-f7100c32-c286-432a-9d07-f9e1b092608d
11543 Sep 22 23:21:35.998 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmp1EK8D2/regions/5a285992-c4f9-4c6c-8322-87d73bb5c18f
11544 Sep 22 23:21:35.998 INFO ensure port SCF_TYPE_COUNT 1000
11545 Sep 22 23:21:35.998 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11546 Sep 22 23:21:35.998 INFO commit
11547 Sep 22 23:21:35.998 INFO creating missing downstairs instance downstairs-f7100c32-c286-432a-9d07-f9e1b092608d
11548 Sep 22 23:21:35.998 INFO ok, have oxide/crucible/downstairs:downstairs-f7100c32-c286-432a-9d07-f9e1b092608d
11549 Sep 22 23:21:35.998 INFO ok!
11550 Sep 22 23:21:35.998 INFO reconfig required, no property group
11551 Sep 22 23:21:35.998 INFO creating config property group
11552 Sep 22 23:21:35.998 INFO reconfiguring oxide/crucible/downstairs:downstairs-f7100c32-c286-432a-9d07-f9e1b092608d
11553 Sep 22 23:21:35.998 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmpykMspN/regions/f7100c32-c286-432a-9d07-f9e1b092608d
11554 Sep 22 23:21:35.998 INFO creating missing snapshot instance snapshot-5a285992-c4f9-4c6c-8322-87d73bb5c18f-789d4889-da87-4e0a-b406-52cd4a361df2
11555 Sep 22 23:21:35.998 INFO ensure port SCF_TYPE_COUNT 1000
11556 Sep 22 23:21:35.998 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11557 Sep 22 23:21:35.998 INFO ok, have oxide/crucible/downstairs:snapshot-5a285992-c4f9-4c6c-8322-87d73bb5c18f-789d4889-da87-4e0a-b406-52cd4a361df2
11558 Sep 22 23:21:35.998 INFO commit
11559 Sep 22 23:21:35.998 INFO reconfig required, no property group
11560 Sep 22 23:21:35.998 INFO reconfiguring oxide/crucible/downstairs:snapshot-5a285992-c4f9-4c6c-8322-87d73bb5c18f-789d4889-da87-4e0a-b406-52cd4a361df2
11561 Sep 22 23:21:35.998 INFO ok!
11562 Sep 22 23:21:35.998 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmp1EK8D2/regions/5a285992-c4f9-4c6c-8322-87d73bb5c18f/.zfs/snapshot/789d4889-da87-4e0a-b406-52cd4a361df2
11563 Sep 22 23:21:35.998 INFO ensure port SCF_TYPE_COUNT 1001
11564 Sep 22 23:21:35.998 INFO ensure mode SCF_TYPE_ASTRING ro
11565 Sep 22 23:21:35.998 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11566 Sep 22 23:21:35.998 INFO commit
11567 Sep 22 23:21:35.998 INFO ok!
11568 test test::test_smf_datafile_race_running_snapshots ... ok
11569 test test::test_smf_running_snapshot ... ok
11570 test tests::test_crucible_agent_openapi ... ok
11571 
11572 test result: ok. 11 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.02s
11573 
11574 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_agent_client-6f3900e8033b57ec --nocapture`
11575 
11576 running 0 tests
11577 
11578 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
11579 
11580 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_client_types-5ba9f9d411803900 --nocapture`
11581 
11582 running 0 tests
11583 
11584 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
11585 
11586 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_common-9a32809bdbdf85c4 --nocapture`
11587 
11588 running 2 tests
11589 test region::test::test_basic_region ... ok
11590 test region::test::test_region_validate_io ... ok
11591 
11592 test result: ok. 2 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
11593 
11594 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_control_client-d0a58354872d46d9 --nocapture`
11595 
11596 running 0 tests
11597 
11598 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
11599 
11600 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_downstairs-dce67baac661a5f4 --nocapture`
11601 
11602 running 156 tests
11603 test dump::test::color_compare ... ok
11604 test dump::test::color_compare_one ... ok
11605 test dump::test::color_compare_red0 ... ok
11606 test dump::test::color_compare_red01 ... ok
11607 test dump::test::color_compare_red01_2 ... ok
11608 test dump::test::color_compare_red02_2 ... ok
11609 test dump::test::color_compare_red02 ... ok
11610 test dump::test::color_compare_red1 ... ok
11611 test dump::test::color_compare_red12 ... ok
11612 test dump::test::color_compare_red12_2 ... ok
11613 test dump::test::color_compare_red2 ... ok
11614 test dump::test::color_compare_two ... ok
11615 test dump::test::color_compare_two_red0 ... ok
11616 test dump::test::color_compare_two_red1 ... ok
11617 test region::test::copy_path_basic ... ok
11618 thread 'region::test::bad_import_region' panicked at 'called `Result::unwrap()` on an `Err` value: Error open "/tmp/12345678-1111-2222-3333-123456789999/notadir/region.json": file not found opening region config "/tmp/12345678-1111-2222-3333-123456789999/notadir/region.json"', downstairs/src/region.rs:3466:10
11619 note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
11620 {{""msg"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible":"current number of open files limit 65536 is already the maximum","level":,"v":0,"name":"crucible","level":3030{"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30{"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30{"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30{"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30{"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.073836316Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"time":"2023-09-22T23:21:36.074315297Z","hostname":","pid":4766ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
11621 }
11622 {{"msg":""msg":"Created new region file \"Created new region file /tmp/.tmpHePGDE/region.json\"\""/tmp/.tmpJVZno2/region.json,"\"v"":0,","v"name"::"0crucible",","name"level"::"30crucible","level":30,"time":"2023-09-22T23:21:36.074644584Z",,""time":hostname":""2023-09-22T23:21:36.074647494Z"ip-10-150-1-55.us-west-2.compute.internal",,""hostname"pid"::"4766}
11623 ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
11624 ,"time":"2023-09-22T23:21:36.074216787Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
116252023-09-22T23:21:36.075ZINFOcrucible: Created new region file "/tmp/.tmpzvev7V/region.json"
11626 ,"time":"2023-09-22T23:21:36.073883476Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
116272023-09-22T23:21:36.075ZINFOcrucible: Created new region file "/tmp/.tmpX75Ahj/region.json"
116282023-09-22T23:21:36.078ZINFOcrucible: current number of open files limit 65536 is already the maximum
11629 {"msg":"Created new region file \"/tmp/.tmpoRNhEc/region.json\"","v":0,"name"test region::test::bad_import_region - should panic ... :"ok
11630 crucible","level":30,"time":"2023-09-22T23:21:36.078740094Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
11631 ,"time":"2023-09-22T23:21:36.073991861Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
116322023-09-22T23:21:36.078ZINFOcrucible: Created new region file "/tmp/.tmpFHgHPW/region.json"
11633 ,"time":"2023-09-22T23:21:36.074048022Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
116342023-09-22T23:21:36.079ZINFOcrucible: Created new region file "/tmp/.tmpho5Xja/region.json"
11635 ,"time":"2023-09-22T23:21:36.074033219Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
116362023-09-22T23:21:36.080ZINFOcrucible: Created new region file "/tmp/.tmpJGYNMa/region.json"
116372023-09-22T23:21:36.081ZINFOcrucible: current number of open files limit 65536 is already the maximum
116382023-09-22T23:21:36.081ZINFOcrucible: Created new region file "/tmp/.tmp05J1Ma/region.json"
116392023-09-22T23:21:36.091ZINFOcrucible: current number of open files limit 65536 is already the maximum
116402023-09-22T23:21:36.091ZINFOcrucible: Database read version 1
116412023-09-22T23:21:36.091ZINFOcrucible: Database write version 1
11642 test region::test::duplicate_context_insert ... ok
11643 test region::test::encryption_context ... ok
11644 test region::test::extent_dir_max ... ok
11645 test region::test::extent_dir_basic ... ok
11646 test region::test::extent_dir_min ... ok
11647 thread 'region::test::extent_io_bad_block' panicked at 'called `Result::unwrap()` on an `Err` value: OffsetInvalid', downstairs/src/region.rs:3506:53
11648 thread 'region::test::extent_io_invalid_block_buf' panicked at 'called `Result::unwrap()` on an `Err` value: DataLenUnaligned', downstairs/src/region.rs:3516:52
11649 test region::test::extent_io_bad_block - should panic ... ok
11650 test region::test::extent_io_invalid_block_buf - should panic ... ok
11651 thread 'region::test::extent_io_invalid_large' panicked at 'called `Result::unwrap()` on an `Err` value: DataLenUnaligned', downstairs/src/region.rs:3526:51
11652 thread 'region::test::extent_io_non_aligned_large' panicked at 'called `Result::unwrap()` on an `Err` value: DataLenUnaligned', downstairs/src/region.rs:3486:51
11653 test region::test::extent_io_invalid_large - should panic ... ok
11654 test region::test::extent_io_non_aligned_large - should panic ... ok
11655 thread 'region::test::extent_io_non_aligned_small' panicked at 'called `Result::unwrap()` on an `Err` value: DataLenUnaligned', downstairs/src/region.rs:3496:51
11656 {"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","test region::test::copy_extent_dir ... level"ok
11657 :30,"time":"2023-09-22T23:21:36.099286301Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
116582023-09-22T23:21:36.099ZINFOcrucible: Created new region file "/tmp/.tmpiuTKWT/region.json"
11659 test region::test::extent_name_basic ... ok
11660 test region::test::extent_name_basic_ext ... ok
11661 test region::test::extent_io_non_aligned_small - should panic ... ok
11662 test region::test::extent_name_basic_ext_shm ... ok
11663 test region::test::extent_name_basic_ext_wal ... ok
11664 test region::test::extent_name_basic_three ... ok
11665 test region::test::extent_name_basic_two ... ok
11666 test region::test::extent_name_max ... ok
11667 test region::test::extent_io_valid ... ok
11668 test region::test::extent_name_min ... ok
11669 test region::test::extent_path_max ... ok
11670 test region::test::close_extent ... ok
11671 test region::test::extent_path_mid_lo ... ok
11672 EXT BLOCKS GEN0 FL0 D0
11673 0 000-009test region::test::extent_path_mid_hi ... ok
11674  0  0 test region::test::extent_path_min ... ok
11675 F
11676 1 010-019  0  0 F
11677 Max gen: 0, Max flush: 0
11678 test region::test::extent_path_three ... ok
11679 test region::test::copy_extent_dir_twice ... ok
116802023-09-22T23:21:36.103ZINFOcrucible: current number of open files limit 65536 is already the maximum
116812023-09-22T23:21:36.103ZINFOcrucible: Created new region file "/tmp/.tmpi6lJFh/region.json"
116822023-09-22T23:21:36.103ZINFOcrucible: current number of open files limit 65536 is already the maximum
116832023-09-22T23:21:36.103ZINFOcrucible: Created new region file "/tmp/.tmp77DdeC/region.json"
116842023-09-22T23:21:36.103ZINFOcrucible: current number of open files limit 65536 is already the maximum
116852023-09-22T23:21:36.103ZINFOcrucible: Created new region file "/tmp/.tmpN2tu9r/region.json"
116862023-09-22T23:21:36.103ZINFOcrucible: current number of open files limit 65536 is already the maximum
11687 {{"msg":"Opened existing region file "\"msg":"/tmp/.tmpr8RcYx/region.json\"","v":0current number of open files limit 65536 is already the maximum,""name":","crucible"v",:"level0":,"30name":"crucible","level":30,"time":"2023-09-22T23:21:36.103819715Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
11688 ,"time":"2023-09-22T23:21:36.103824073Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
116892023-09-22T23:21:36.103ZINFOcrucible: Database read version 1
116902023-09-22T23:21:36.103ZINFOcrucible: Database write version 1
11691 test region::test::new_region ... ok
11692 {"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30test region::test::dump_a_region ... ,ok
11693 "time":"2023-09-22T23:21:36.104357351Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
116942023-09-22T23:21:36.104ZINFOcrucible: Opened existing region file "/tmp/.tmp6hm9W2/region.json"
11695 test region::test::region_bad_database_read_version_high ... ok
11696 test region::test::new_existing_region ... ok
11697 test region::test::region_bad_database_read_version_low ... ok
116982023-09-22T23:21:36.105ZINFOcrucible: current number of open files limit 65536 is already the maximum
11699 {"msg":"Opened existing region file \"/tmp/.tmpvxVNj7/region.json\"","v":0,"name":"crucible","level":30{"msg":","time"current number of open files limit 65536 is already the maximum":","2023-09-22T23:21:36.105162454Z"v":,"0hostname":","name":"ip-10-150-1-55.us-west-2.compute.internal"crucible",",pid"":level4766":}30
11700 {{"msg":""Database read version 1"msg",:""v":0,"name":current number of open files limit 65536 is already the maximum""crucible",","v"level":,"0:,time""name:"":30"2023-09-22T23:21:36.105192994Z"crucible",,""level"hostname:":"30ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
11701 ,"time":"2023-09-22T23:21:36.105218709Z{","hostname"":,msg"""time:"":"ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:21:36.105222246ZCreated new region file ""\",,"/tmp/.tmp74dTT1/region.json"\"hostname""pid,:""v""::ip-10-150-1-55.us-west-2.compute.internal0"4766,,""}pidname""::4766"
11702 crucible}"
11703 ,"level":30{"msg":"Opened existing region file \"/tmp/.tmpppQckI/region.json\"","v":0,"name":"crucible","level",:"30time":"2023-09-22T23:21:36.105265968Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
11704 ,"time":"2023-09-22T23:21:36.105278105Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
117052023-09-22T23:21:36.105ZINFOcrucible: Database read version 1
11706 {"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.105450646Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4766}"
11707 msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.105501117Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
11708 {"msg":"Created new region file \"/tmp/.tmptGTFWj/region.json\"","v":0,"name":"crucible","level":30,"time":"test region::test::region_bad_database_write_version_high ... 2023-09-22T23:21:36.105551894Zok
11709 ","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
117102023-09-22T23:21:36.105ZINFOcrucible: Created new region file "/tmp/.tmpi2WW6m/region.json"
11711 test region::test::region_bad_database_write_version_low ... ok
117122023-09-22T23:21:36.105ZINFOcrucible: current number of open files limit 65536 is already the maximum
117132023-09-22T23:21:36.106ZINFOcrucible: Database read version 1
117142023-09-22T23:21:36.106ZINFOcrucible: Database write version 1
117152023-09-22T23:21:36.106ZINFOcrucible: current number of open files limit 65536 is already the maximum
117162023-09-22T23:21:36.106ZINFOcrucible: Created new region file "/tmp/.tmpNnqPXL/region.json"
117172023-09-22T23:21:36.106ZINFOcrucible: current number of open files limit 65536 is already the maximum
117182023-09-22T23:21:36.106ZINFOcrucible: Created new region file "/tmp/.tmpfQyUMp/region.json"
117192023-09-22T23:21:36.108ZINFOcrucible: current number of open files limit 65536 is already the maximum
117202023-09-22T23:21:36.108ZINFOcrucible: Database read version 1
117212023-09-22T23:21:36.108ZINFOcrucible: Database write version 1
11722 test region::test::multiple_context ... ok
117232023-09-22T23:21:36.113ZINFOcrucible: current number of open files limit 65536 is already the maximum
117242023-09-22T23:21:36.113ZINFOcrucible: Created new region file "/tmp/.tmpF6IEvH/region.json"
117252023-09-22T23:21:36.114ZINFOcrucible: current number of open files limit 65536 is already the maximum
117262023-09-22T23:21:36.114ZINFOcrucible: Database read version 1
117272023-09-22T23:21:36.114ZINFOcrucible: Database write version 1
117282023-09-22T23:21:36.115ZINFOcrucible: current number of open files limit 65536 is already the maximum
11729 {{""msg":"msg":"Opened existing region file \"/tmp/.tmp74dTT1/region.json\""current number of open files limit 65536 is already the maximum,""v":0,","name"v"::"0crucible",,""name"level"::"30crucible","level":30,"time":"2023-09-22T23:21:36.115244506Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
11730 {,""msgtime"":":"Database read version 1","v":02023-09-22T23:21:36.115248095Z","name":","crucible","hostname"level"::30"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
11731 ,"time":"2023-09-22T23:21:36.115278141Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
11732 "msg":{""Database read version 1"msg":,"v":"0,"Database write version 1"name",:""v":crucible"0,,""level"name"::"30crucible","level":30,,""time"time":":"2023-09-22T23:21:36.115312948Z"2023-09-22T23:21:36.115315234Z",","hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"",pid"":pid"4766:4766}
11733 }
117342023-09-22T23:21:36.115ZINFOcrucible: Database write version 1
11735 cp "/tmp/.tmpfQyUMp/00/000/000" to "/tmp/.tmpfQyUMp/00/000/001.copy/001"
11736 Recreate "/tmp/.tmpfQyUMp/00/000/001.db-shm"
11737 Recreate "/tmp/.tmpfQyUMp/00/000/001.db-wal"
117382023-09-22T23:21:36.116ZINFOcrucible: Extent 1 found replacement dir, finishing replacement
117392023-09-22T23:21:36.116ZINFOcrucible: Copy files from "/tmp/.tmpfQyUMp/00/000/001.replace" in "/tmp/.tmpfQyUMp/00/000"
117402023-09-22T23:21:36.116ZINFOcrucible: Remove old file "/tmp/.tmpfQyUMp/00/000/001.db-shm" as there is no replacement
117412023-09-22T23:21:36.116ZINFOcrucible: Remove old file "/tmp/.tmpfQyUMp/00/000/001.db-wal" as there is no replacement
117422023-09-22T23:21:36.116ZINFOcrucible: Move directory "/tmp/.tmpfQyUMp/00/000/001.replace" to "/tmp/.tmpfQyUMp/00/000/001.completed"
117432023-09-22T23:21:36.118ZINFOcrucible: Extent 1 found replacement dir, finishing replacement
117442023-09-22T23:21:36.118ZINFOcrucible: Copy files from "/tmp/.tmpNnqPXL/00/000/001.replace" in "/tmp/.tmpNnqPXL/00/000"
117452023-09-22T23:21:36.119ZINFOcrucible: Move directory "/tmp/.tmpNnqPXL/00/000/001.replace" to "/tmp/.tmpNnqPXL/00/000/001.completed"
11746 test region::test::reopen_extent_cleanup_replay_short ... ok
11747 Extent 2
11748 GEN 0 0
11749 FLUSH_ID 0 0
11750 DIRTY
11751 
11752 BLOCK D0 D1 C0 C1 DIFF
117532023-09-22T23:21:36.120ZINFOcrucible: current number of open files limit 65536 is already the maximum
117542023-09-22T23:21:36.120ZINFOcrucible: Database read version 1
117552023-09-22T23:21:36.121ZINFOcrucible: Database write version 1
117562023-09-22T23:21:36.121ZINFOcrucible: current number of open files limit 65536 is already the maximum
117572023-09-22T23:21:36.121ZINFOcrucible: Created new region file "/tmp/.tmpntsZot/region.json"
11758 test region::test::reopen_extent_cleanup_one ... ok
117592023-09-22T23:21:36.122ZINFOcrucible: current number of open files limit 65536 is already the maximum
117602023-09-22T23:21:36.122ZINFOcrucible: Created new region file "/tmp/.tmprdPlCZ/region.json"
11761 EXT BLOCKS GEN0 GEN1 FL0 FL1 D0 D1
11762 0 000-009  0  0  0  0 F F
11763 1 010-019  0  0  0  0 F F
11764 Max gen: 0, Max flush: 0
11765 test region::test::reopen_extent_cleanup_replay ... ok
117662023-09-22T23:21:36.124ZINFOcrucible: current number of open files limit 65536 is already the maximum
117672023-09-22T23:21:36.124ZINFOcrucible: Created new region file "/tmp/.tmpNuvjXd/region.json"
11768 test region::test::region_create_drop_open ... ok
117692023-09-22T23:21:36.125ZINFOcrucible: current number of open files limit 65536 is already the maximum
117702023-09-22T23:21:36.125ZINFOcrucible: Created new region file "/tmp/.tmp8sn7MW/region.json"
11771 test region::test::dump_two_region ... ok
117722023-09-22T23:21:36.127ZINFOcrucible: current number of open files limit 65536 is already the maximum
117732023-09-22T23:21:36.127ZINFOcrucible: Created new region file "/tmp/.tmpomLMVu/region.json"
11774 test region::test::reopen_extent_cleanup_two ... ok
11775 {"msg":"Failed write hash validation","v":0,"name":"crucible","level":50,"time":"2023-09-22T23:21:36.128794397Z"{,"hostname":"ip-10-150-1-55.us-west-2.compute.internal""msg",:""pid":4766current number of open files limit 65536 is already the maximum"},"
11776 v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.128853368Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
117772023-09-22T23:21:36.128ZINFOcrucible: Created new region file "/tmp/.tmpgvVgcF/region.json"
117782023-09-22T23:21:36.129ZINFOcrucible: current number of open files limit 65536 is already the maximum
117792023-09-22T23:21:36.129ZINFOcrucible: Database read version 1
117802023-09-22T23:21:36.129ZINFOcrucible: Database write version 1
11781 test region::test::test_bad_hash_bad ... ok
117822023-09-22T23:21:36.130ZINFOcrucible: current number of open files limit 65536 is already the maximum
117832023-09-22T23:21:36.130ZINFOcrucible: Created new region file "/tmp/.tmpQNdK4B/region.json"
117842023-09-22T23:21:36.131ZINFOcrucible: current number of open files limit 65536 is already the maximum
117852023-09-22T23:21:36.131ZINFOcrucible: Database read version 1
117862023-09-22T23:21:36.131ZINFOcrucible: Database write version 1
11787 test region::test::test_blank_block_read_ok ... ok
117882023-09-22T23:21:36.133ZINFOcrucible: current number of open files limit 65536 is already the maximum
11789 {"msg":"Created new region file \"/tmp/.tmpy4Quhy/region.json\"","v":0,"name":"crucible","level":30,020 "time": A" A A A2023-09-22T23:21:36.133741631Z" ,
11790 test region::test::reopen_all_extents ... ok
11791 "hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
117922023-09-22T23:21:36.133ZINFOcrucible: current number of open files limit 65536 is already the maximum
117932023-09-22T23:21:36.134ZINFOcrucible: Database read version 1
117942023-09-22T23:21:36.134ZINFOcrucible: Database write version 1
117952023-09-22T23:21:36.134ZINFOcrucible: current number of open files limit 65536 is already the maximum
117962023-09-22T23:21:36.134ZINFOcrucible: Created new region file "/tmp/.tmpXjT7Kw/region.json"
11797 test region::test::reopen_extent_no_replay_readonly ... ok
117982023-09-22T23:21:36.137ZINFOcrucible: current number of open files limit 65536 is already the maximum
117992023-09-22T23:21:36.137ZINFOcrucible: Created new region file "/tmp/.tmpxFRMrp/region.json"
11800 test region::test::test_big_write ... ok
118012023-09-22T23:21:36.139ZINFOcrucible: current number of open files limit 65536 is already the maximum
118022023-09-22T23:21:36.139ZINFOcrucible: Database read version 1
118032023-09-22T23:21:36.139ZINFOcrucible: Database write version 1
11804 test region::test::test_extent_write_flush_close ... ok
118052023-09-22T23:21:36.139ZINFOcrucible: current number of open files limit 65536 is already the maximum
118062023-09-22T23:21:36.139ZINFOcrucible: Created new region file "/tmp/.tmpMj5GG3/region.json"
118072023-09-22T23:21:36.140ZINFOcrucible: current number of open files limit 65536 is already the maximum
118082023-09-22T23:21:36.140ZINFOcrucible: Created new region file "/tmp/.tmpuWtzyh/region.json"
11809 test region::test::test_extent_close_reopen_flush_close ... ok
118102023-09-22T23:21:36.142ZINFOcrucible: current number of open files limit 65536 is already the maximum
118112023-09-22T23:21:36.142ZINFOcrucible: Created new region file "/tmp/.tmpH48PpE/region.json"
11812 test region::test::test_flush_extent_limit_base ... ok
11813 test region::test::test_flush_extent_limit_too_large ... ok
11814 021 A A A A
118152023-09-22T23:21:36.143ZINFOcrucible: current number of open files limit 65536 is already the maximum
118162023-09-22T23:21:36.143ZINFOcrucible: Created new region file "/tmp/.tmpqrtt1Y/region.json"
118172023-09-22T23:21:36.144ZINFOcrucible: current number of open files limit 65536 is already the maximum
118182023-09-22T23:21:36.144ZINFOcrucible: Database read version 1
11819 {"msg":"Database write version 1","v":0,"name":"{crucible","level":30"msg":"current number of open files limit 65536 is already the maximum","v,"":time":"0,"2023-09-22T23:21:36.144305536Z"name":,""crucible"hostname":,""level":ip-10-150-1-55.us-west-2.compute.internal30","pid":4766}
11820 ,"time":"2023-09-22T23:21:36.144328701Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
118212023-09-22T23:21:36.144ZINFOcrucible: Created new region file "/tmp/.tmpSuumQF/region.json"
11822 test region::test::test_flush_extent_limit_end ... ok
118232023-09-22T23:21:36.147ZINFOcrucible: current number of open files limit 65536 is already the maximum
118242023-09-22T23:21:36.147ZINFOcrucible: Created new region file "/tmp/.tmp6x9BQd/region.json"
118252023-09-22T23:21:36.148ZINFOcrucible: current number of open files limit 65536 is already the maximum
11826 test region::test::test_fully_rehash_and_clean_does_not_mark_blocks_as_written ... ok
118272023-09-22T23:21:36.148ZINFOcrucible: Database read version 1
118282023-09-22T23:21:36.149ZINFOcrucible: Database write version 1
118292023-09-22T23:21:36.149ZINFOcrucible: current number of open files limit 65536 is already the maximum
118302023-09-22T23:21:36.149ZINFOcrucible: Created new region file "/tmp/.tmp3ADSVm/region.json"
11831 test region::test::test_ok_hash_ok ... ok
11832 test region::test::test_fully_rehash_marks_blocks_unwritten_if_data_never_hit_disk ... ok
118332023-09-22T23:21:36.150ZINFOcrucible: current number of open files limit 65536 is already the maximum
118342023-09-22T23:21:36.150ZINFOcrucible: Created new region file "/tmp/.tmp3gtgmK/region.json"
118352023-09-22T23:21:36.150ZINFOcrucible: current number of open files limit 65536 is already the maximum
118362023-09-22T23:21:36.150ZINFOcrucible: Created new region file "/tmp/.tmpyPH46t/region.json"
11837 022 A A A A
118382023-09-22T23:21:36.153ZINFOcrucible: current number of open files limit 65536 is already the maximum
118392023-09-22T23:21:36.153ZINFOcrucible: Database read version 1
118402023-09-22T23:21:36.153ZINFOcrucible: Database write version 1
118412023-09-22T23:21:36.157ZINFOcrucible: current number of open files limit 65536 is already the maximum
118422023-09-22T23:21:36.157ZINFOcrucible: Database read version 1
118432023-09-22T23:21:36.157ZINFOcrucible: Database write version 1
11844 Send flush to extent limit 0
11845 test region::test::test_read_multiple_disjoint_large_contiguous ... ok
11846 extent 0 should not be dirty now
11847 verify 1 still dirty
11848 verify 2 still dirty
11849 verify 3 still dirty
11850 verify 4 still dirty
11851 verify 5 still dirty
11852 verify 6 still dirty
11853 verify 7 still dirty
11854 {"msg":"current number of open files limit 65536 is already the maximum",verify 8 still dirty
11855 "v":0,"name":"crucible","level":30verify 9 still dirty
11856 ,"time":"2023-09-22T23:21:36.160191505Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
11857 Send flush to extent limit 1
11858 023 A A A A
118592023-09-22T23:21:36.160ZINFOcrucible: Created new region file "/tmp/.tmpfOWcq5/region.json"
11860 test region::test::test_read_multiple_disjoint_none_contiguous ... ok
118612023-09-22T23:21:36.160ZINFOcrucible: current number of open files limit 65536 is already the maximum
118622023-09-22T23:21:36.160ZINFOcrucible: Database read version 1
118632023-09-22T23:21:36.160ZINFOcrucible: Database write version 1
11864 extent 1 should not be dirty now
11865 verify 2 still dirty
118662023-09-22T23:21:36.161ZINFOcrucible: current number of open files limit 65536 is already the maximum
118672023-09-22T23:21:36.161ZINFOcrucible: Created new region file "/tmp/.tmpnAVzN7/region.json"
11868 verify 3 still dirty
11869 verify 4 still dirty
11870 verify 5 still dirty
11871 verify 6 still dirty
11872 verify 7 still dirty
11873 verify 8 still dirty
11874 verify 9 still dirty
11875 Send flush to extent limit 2
11876 extent 2 should not be dirty now
11877 test region::test::test_read_single_large_contiguous ... ok
11878 verify 3 still dirty
11879 verify 4 still dirty
11880 verify 5 still dirty
11881 verify 6 still dirty
11882 verify 7 still dirty
11883 verify 8 still dirty
11884 verify 9 still dirty
11885 Send flush to extent limit 3
11886 extent 3 should not be dirty now
11887 verify 4 still dirty
118882023-09-22T23:21:36.163ZINFOcrucible: current number of open files limit 65536 is already the maximum
118892023-09-22T23:21:36.163ZINFOcrucible: Created new region file "/tmp/.tmpi1O2O9/region.json"
11890 verify 5 still dirty
11891 verify 6 still dirty
11892 verify 7 still dirty
11893 verify 8 still dirty
11894 verify 9 still dirty
11895 Send flush to extent limit 4
11896 {"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.164788192Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal"extent 4 should not be dirty now
11897 ,"pid":4766}
11898 verify 5 still dirty
11899 verify 6 still dirty
11900 verify 7 still dirty
11901 verify 8 still dirty
11902 verify 9 still dirty
11903 Send flush to extent limit 5
11904 test region::test::test_read_single_large_contiguous_span_extents ... ok
11905 {extent 5 should not be dirty now
11906 "msg":"Database read version 1","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.165287477Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
119072023-09-22T23:21:36.165ZINFOcrucible: Database write version 1
119082023-09-22T23:21:36.165ZINFOcrucible: current number of open files limit 65536 is already the maximum
119092023-09-22T23:21:36.165ZINFOcrucible: Created new region file "/tmp/.tmpPcSvx5/region.json"
11910 verify 6 still dirty
11911 verify 7 still dirty
11912 verify 8 still dirty
11913 verify 9 still dirty
11914 Send flush to extent limit 6
11915 extent 6 should not be dirty now
11916 verify 7 still dirty
11917 verify 8 still dirty
11918 verify 9 still dirty
11919 Send flush to extent limit 7
11920 test region::test::test_region_open_removes_partial_writes ... ok
11921 extent 7 should not be dirty now
11922 verify 8 still dirty
11923 verify 9 still dirty
11924 Send flush to extent limit 8
11925 extent 8 should not be dirty now
119262023-09-22T23:21:36.167ZINFOcrucible: current number of open files limit 65536 is already the maximum
119272023-09-22T23:21:36.167ZINFOcrucible: Created new region file "/tmp/.tmpvA8mjM/region.json"
11928 verify 9 still dirty
11929 Send flush to extent limit 9
11930 extent 9 should not be dirty now
11931 024 A A A A
119322023-09-22T23:21:36.170ZINFOcrucible: current number of open files limit 65536 is already the maximum
119332023-09-22T23:21:36.170ZINFOcrucible: Database read version 1
119342023-09-22T23:21:36.170ZINFOcrucible: Database write version 1
11935 test region::test::test_flush_extent_limit_walk_it_off ... ok
119362023-09-22T23:21:36.172ZINFOcrucible: current number of open files limit 65536 is already the maximum
119372023-09-22T23:21:36.173ZINFOcrucible: Database read version 1
119382023-09-22T23:21:36.173ZINFOcrucible: Database write version 1
119392023-09-22T23:21:36.173ZINFOcrucible: current number of open files limit 65536 is already the maximum
119402023-09-22T23:21:36.173ZINFOcrucible: Created new region file "/tmp/.tmperRgma/region.json"
11941 test region::test::test_write_multiple_disjoint_large_contiguous ... ok
11942 025 A A A A
119432023-09-22T23:21:36.175ZINFOcrucible: current number of open files limit 65536 is already the maximum
119442023-09-22T23:21:36.176ZINFOcrucible: Database read version 1
11945 {"msg":"Database write version 1","v":0,"name":"crucible","level":30test region::test::test_write_multiple_disjoint_none_contiguous ... ok
11946 ,"time":"2023-09-22T23:21:36.176034295Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
119472023-09-22T23:21:36.176ZINFOcrucible: current number of open files limit 65536 is already the maximum
119482023-09-22T23:21:36.176ZINFOcrucible: Created new region file "/tmp/.tmpzWh5Jq/region.json"
119492023-09-22T23:21:36.176ZINFOcrucible: current number of open files limit 65536 is already the maximum
119502023-09-22T23:21:36.176ZINFOcrucible: Created new region file "/tmp/.tmp8g1tlB/region.json"
11951 test region::test::test_write_single_large_contiguous ... ok
119522023-09-22T23:21:36.178ZINFOcrucible: current number of open files limit 65536 is already the maximum
119532023-09-22T23:21:36.178ZINFOcrucible: Created new region file "/tmp/.tmpiH1UWN/region.json"
119542023-09-22T23:21:36.179ZINFOcrucible: current number of open files limit 65536 is already the maximum
119552023-09-22T23:21:36.179ZINFOcrucible: Database read version 1
119562023-09-22T23:21:36.179ZINFOcrucible: Database write version 1
11957 Total size: 15360
11958 test region::test::test_write_single_large_contiguous_span_extents ... ok
119592023-09-22T23:21:36.182ZINFOcrucible: current number of open files limit 65536 is already the maximum
119602023-09-22T23:21:36.182ZINFOcrucible: Created new region file "/tmp/.tmplN2WTP/region.json"
11961 026 A A A A
119622023-09-22T23:21:36.182ZINFOcrucible: current number of open files limit 65536 is already the maximum
119632023-09-22T23:21:36.182ZINFOcrucible: Database read version 1
119642023-09-22T23:21:36.183ZINFOcrucible: Database write version 1
11965 test region::test::test_write_unwritten_big_write ... ok
119662023-09-22T23:21:36.184ZINFOcrucible: current number of open files limit 65536 is already the maximum
11967 {"msg":"Created new region file \"/tmp/.tmpqIRCQc/region.json\"","v":0,"name"buffer size:2048
11968 :"crucible","level":30,"time":"2023-09-22T23:21:36.184846764Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
11969 Read eid: 0, 0 offset: Block { value: 0, shift: 9 }
11970 Read eid: 0, 1 offset: Block { value: 1, shift: 9 }
11971 Read eid: 0, 2 offset: Block { value: 2, shift: 9 }
11972 Read eid: 0, 3 offset: Block { value: 3, shift: 9 }
119732023-09-22T23:21:36.185ZINFOcrucible: current number of open files limit 65536 is already the maximum
119742023-09-22T23:21:36.185ZINFOcrucible: Database read version 1
119752023-09-22T23:21:36.185ZINFOcrucible: Database write version 1
11976 Read a region, append
11977 Read a region, append
11978 Read a region, append
11979 Read a region, append
11980 test region::test::test_write_unwritten_big_write_partial_0 ... ok
119812023-09-22T23:21:36.187ZINFOcrucible: current number of open files limit 65536 is already the maximum
119822023-09-22T23:21:36.187ZINFOcrucible: Created new region file "/tmp/.tmpAqU3tu/region.json"
11983 Total size: 20480
11984 test region::test::test_write_unwritten_big_write_partial_1 ... ok
11985 test region::test::test_write_unwritten_big_write_partial_final ... ok
119862023-09-22T23:21:36.189ZINFOcrucible: current number of open files limit 65536 is already the maximum
119872023-09-22T23:21:36.189ZINFOcrucible: Created new region file "/tmp/.tmp5xP0uT/region.json"
119882023-09-22T23:21:36.189ZINFOcrucible: current number of open files limit 65536 is already the maximum
119892023-09-22T23:21:36.189ZINFOcrucible: Created new region file "/tmp/.tmpPV6kWM/region.json"
11990 027 A A A A
119912023-09-22T23:21:36.190ZINFOcrucible: current number of open files limit 65536 is already the maximum
119922023-09-22T23:21:36.191ZINFOcrucible: Database read version 1
119932023-09-22T23:21:36.191ZINFOcrucible: Database write version 1
119942023-09-22T23:21:36.193ZINFOcrucible: current number of open files limit 65536 is already the maximum
119952023-09-22T23:21:36.194ZINFOcrucible: Database read version 1
119962023-09-22T23:21:36.194ZINFOcrucible: Database write version 1
11997 test region::test::test_write_unwritten_big_write_partial_sparse ... ok
119982023-09-22T23:21:36.197ZINFOcrucible: current number of open files limit 65536 is already the maximum
119992023-09-22T23:21:36.197ZINFOcrucible: Created new region file "/tmp/.tmpauX5Ae/region.json"
12000 test region::test::test_write_unwritten_multiple_disjoint_large_contiguous ... ok
12001 test region::test::test_write_unwritten_multiple_disjoint_none_contiguous ... ok
12002 test region::test::test_write_unwritten_single_large_contiguous ... {ok
12003 "msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.198391884Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
12004 028 A A A A
120052023-09-22T23:21:36.198ZINFOcrucible: Created new region file "/tmp/.tmpuaYM4G/region.json"
12006 {"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30{"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible",","level"time"::"302023-09-22T23:21:36.198582864Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
12007 test region::test::validate_repair_files_also_good ... ok,
12008 "time":"2023-09-22T23:21:36.198608693Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
120092023-09-22T23:21:36.198ZINFOcrucible: Created new region file "/tmp/.tmppqa7nG/region.json"
120102023-09-22T23:21:36.198ZINFOcrucible: Database read version 1
120112023-09-22T23:21:36.198ZINFOcrucible: Database write version 1
12012 test region::test::validate_repair_files_duplicate ... ok
12013 test region::test::validate_repair_files_duplicate_pair ... ok
12014 test region::test::validate_repair_files_empty ... ok
12015 test region::test::validate_repair_files_good ... ok
12016 test region::test::validate_repair_files_not_good_enough ... ok
12017 test region::test::validate_repair_files_offbyon ... ok
12018 test region::test::validate_repair_files_quad_duplicate ... ok
12019 test region::test::test_write_unwritten_single_large_contiguous_span_extents ... ok
12020 test region::test::validate_repair_files_too_good ... ok
120212023-09-22T23:21:36.201ZINFOcrucible: current number of open files limit 65536 is already the maximum
120222023-09-22T23:21:36.201ZINFOcrucible: Created new region file "/tmp/.tmpSZM0Ba/region.json"
120232023-09-22T23:21:36.202ZINFOcrucible: current number of open files limit 65536 is already the maximum
120242023-09-22T23:21:36.202ZINFOcrucible: Created new region file "/tmp/.tmpgmUT7T/region.json"
12025 test region::test::test_write_unwritten_when_empty ... ok
120262023-09-22T23:21:36.202ZINFOcrucible: current number of open files limit 65536 is already the maximum
120272023-09-22T23:21:36.202ZINFOcrucible: Database read version 1
120282023-09-22T23:21:36.202ZINFOcrucible: Database write version 1
120292023-09-22T23:21:36.202ZINFOcrucible: current number of open files limit 65536 is already the maximum
120302023-09-22T23:21:36.203ZINFOcrucible: Created new region file "/tmp/.tmpMUpUBZ/region.json"
12031 test region::test::test_write_unwritten_when_written ... ok
120322023-09-22T23:21:36.204ZINFOcrucible: current number of open files limit 65536 is already the maximum
120332023-09-22T23:21:36.204ZINFOcrucible: Created new region file "/tmp/.tmpjvZIIZ/region.json"
12034 test region::test::test_write_unwritten_when_written_flush ... ok
120352023-09-22T23:21:36.205ZINFOcrucible: current number of open files limit 65536 is already the maximum
120362023-09-22T23:21:36.205ZINFOcrucible: Created new region file "/tmp/.tmpx0FzWJ/region.json"
12037 029 A A A A
12038 files: ["001", "001.db", "001.db-shm", "001.db-wal"]
12039 test repair::test::extent_expected_files_fail ... ok
12040 test repair::test::extent_expected_files_fail_two ... ok
12041 test repair::test::extent_expected_files ... ok
12042 files: ["001", "001.db"]
120432023-09-22T23:21:36.211ZINFOcrucible: current number of open files limit 65536 is already the maximum
120442023-09-22T23:21:36.211ZINFOcrucible: Created new region file "/tmp/.tmpgbUOaV/region.json"
120452023-09-22T23:21:36.211ZINFOcrucible: current number of open files limit 65536 is already the maximum
120462023-09-22T23:21:36.211ZINFOcrucible: Created new region file "/tmp/.tmpGKg4P3/region.json"
12047 test repair::test::extent_expected_files_short ... ok
12048 test region::test::dump_extent ... ok
120492023-09-22T23:21:36.213ZINFOcrucible: current number of open files limit 65536 is already the maximum
120502023-09-22T23:21:36.213ZINFOcrucible: Created new region file "/tmp/.tmpixzTQ1/region.json"
120512023-09-22T23:21:36.214ZINFOcrucible: current number of open files limit 65536 is already the maximum
12052 files: ["001", "001.db"]
120532023-09-22T23:21:36.214ZINFOcrucible: Created new region file "/tmp/.tmpBn4xhF/region.json"
12054 test repair::test::extent_expected_files_short_with_close ... ok
120552023-09-22T23:21:36.218ZWARNcrucible: 1002 job Flush for connection UpstairsConnection { upstairs_id: 46c133d9-b38b-4055-adce-337c31923de1, session_id: 414a6c2c-a324-4ae8-9ffd-3c12d2b3e1c0, gen: 0 } waiting on 1 deps
12056 test test::job_dep_not_satisfied ... ok
12057 test test::jobs_extent_close ... ok
12058 test test::jobs_extent_flush_close ... ok
12059 test test::jobs_extent_live_noop ... ok
12060 test test::jobs_extent_live_reopen ... ok
12061 test test::jobs_extent_live_repair ... ok
12062 test repair::test::test_crucible_repair_openapi ... ok
12063 test test::jobs_independent ... ok
12064 test test::jobs_write_unwritten ... ok
120652023-09-22T23:21:36.227ZWARNcrucible: 1003 job Read for connection UpstairsConnection { upstairs_id: c02a08ce-458e-4f61-907f-97a9103eb0dd, session_id: d3f895e5-eda6-4e78-9662-969601129dc3, gen: 0 } waiting on 2 deps
12066 Import file_size: 51200 Extent size: 5120 Needed extents: 10
12067 Region already large enough for image
12068 Importing "/tmp/.tmpSz1cfO/random_data" to region
120692023-09-22T23:21:36.228ZWARNcrucible: 1003 job Read for connection UpstairsConnection { upstairs_id: c02a08ce-458e-4f61-907f-97a9103eb0dd, session_id: d3f895e5-eda6-4e78-9662-969601129dc3, gen: 0 } waiting on 1 deps
120702023-09-22T23:21:36.228ZWARNcrucible: 1003 job Read for connection UpstairsConnection { upstairs_id: 9cb730fb-14c4-4470-a37f-0a6ddcb7f6c3, session_id: ded622ab-8422-4ba2-9fb6-09cfcaa27eb4, gen: 0 } waiting on 2 deps
12071 Import file_size: 51200 Extent size: 5120 Needed extents: 10
12072 Region already large enough for image
12073 Importing "/tmp/.tmpY1QcCg/random_data" to region
120742023-09-22T23:21:36.229ZWARNcrucible: 1003 job Read for connection UpstairsConnection { upstairs_id: 9cb730fb-14c4-4470-a37f-0a6ddcb7f6c3, session_id: ded622ab-8422-4ba2-9fb6-09cfcaa27eb4, gen: 0 } waiting on 1 deps
12075 test test::out_of_order_arrives_after_1001_completes ... ok
12076 test test::out_of_order_arrives_after_first_do_work ... ok
120772023-09-22T23:21:36.231ZINFOcrucible: current number of open files limit 65536 is already the maximum
120782023-09-22T23:21:36.231ZINFOcrucible: Created new region file "/tmp/.tmpey9JDt/region.json"
12079 Import file_size: 51300 Extent size: 5120 Needed extents: 11
12080 Extending region to fit image
12081 Import file_size: 51100 Extent size: 5120 Needed extents: 10
12082 Region already large enough for image
12083 Importing "/tmp/.tmpBIrDZy/random_data" to region
120842023-09-22T23:21:36.233ZWARNcrucible: 1003 job Read for connection UpstairsConnection { upstairs_id: aef73505-1958-4799-9918-432738d9b385, session_id: 7f0ac549-00a1-42ce-8018-190ab0852381, gen: 0 } waiting on 2 deps
12085 Importing "/tmp/.tmpa7KOZ6/random_data" to region
120862023-09-22T23:21:36.234ZWARNcrucible: 1003 job Read for connection UpstairsConnection { upstairs_id: aef73505-1958-4799-9918-432738d9b385, session_id: 7f0ac549-00a1-42ce-8018-190ab0852381, gen: 0 } waiting on 1 deps
12087 test test::out_of_order_arrives_after_first_push_next_jobs ... ok
120882023-09-22T23:21:36.236ZINFOcrucible: current number of open files limit 65536 is already the maximum
120892023-09-22T23:21:36.236ZINFOcrucible: Created new region file "/tmp/.tmpO3n5HR/region.json"
120902023-09-22T23:21:36.237ZINFOcrucible: current number of open files limit 65536 is already the maximum
120912023-09-22T23:21:36.237ZINFOcrucible: Opened existing region file "/tmp/.tmpey9JDt/region.json"
120922023-09-22T23:21:36.237ZINFOcrucible: Database read version 1
120932023-09-22T23:21:36.237ZINFOcrucible: Database write version 1
120942023-09-22T23:21:36.240ZINFOcrucible: UUID: ec4db1ef-26ef-4452-8c2f-4e84fe9417d0
120952023-09-22T23:21:36.240ZINFOcrucible: Blocks per extent:4 Total Extents: 2
120962023-09-22T23:21:36.240ZINFOcrucible: UpstairsConnection { upstairs_id: 6425146f-9763-4b8e-9214-3a0aa05ff047, session_id: 91dc2a17-915b-4d94-a7a6-c9c413ea5a19, gen: 10 } is now active (read-write)
120972023-09-22T23:21:36.240ZWARNcrucible: Attempting RW takeover from UpstairsConnection { upstairs_id: 6425146f-9763-4b8e-9214-3a0aa05ff047, session_id: 91dc2a17-915b-4d94-a7a6-c9c413ea5a19, gen: 10 } to UpstairsConnection { upstairs_id: 6425146f-9763-4b8e-9214-3a0aa05ff047, session_id: 91dc2a17-915b-4d94-a7a6-c9c413ea5a19, gen: 10 }
120982023-09-22T23:21:36.240ZWARNcrucible: Signaling to UpstairsConnection { upstairs_id: 6425146f-9763-4b8e-9214-3a0aa05ff047, session_id: 91dc2a17-915b-4d94-a7a6-c9c413ea5a19, gen: 10 } thread that UpstairsConnection { upstairs_id: 6425146f-9763-4b8e-9214-3a0aa05ff047, session_id: 91dc2a17-915b-4d94-a7a6-c9c413ea5a19, gen: 10 } is being promoted (read-write)
120992023-09-22T23:21:36.240ZWARNcrucible: Crucible Downstairs promoting UpstairsConnection { upstairs_id: 6425146f-9763-4b8e-9214-3a0aa05ff047, session_id: 91dc2a17-915b-4d94-a7a6-c9c413ea5a19, gen: 10 } to active, discarding 1 jobs
121002023-09-22T23:21:36.240ZINFOcrucible: UpstairsConnection { upstairs_id: 6425146f-9763-4b8e-9214-3a0aa05ff047, session_id: 91dc2a17-915b-4d94-a7a6-c9c413ea5a19, gen: 10 } is now active (read-write)
121012023-09-22T23:21:36.242ZINFOcrucible: current number of open files limit 65536 is already the maximum
121022023-09-22T23:21:36.242ZINFOcrucible: Opened existing region file "/tmp/.tmpO3n5HR/region.json"
12103 {"msg":"Database read version 1","v":0,"name":"crucible","level":30test test::test_complete_work_can_see_none ... ,ok
12104 "time":"2023-09-22T23:21:36.2425166Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
121052023-09-22T23:21:36.242ZINFOcrucible: Database write version 1
121062023-09-22T23:21:36.243ZINFOcrucible: current number of open files limit 65536 is already the maximum
121072023-09-22T23:21:36.243ZINFOcrucible: UUID: ca00fd14-756b-4b6c-995a-1f0dc95ba49d
12108 {"msg":"Blocks per extent:4 Total Extents: 2","v":0,"name":"crucible","level":30{"msg":"Created new region file ,\""time":"/tmp/.tmpICuwkB/region.json\"",2023-09-22T23:21:36.243381296Z""v",:"0hostname,"":"name":"crucible","ip-10-150-1-55.us-west-2.compute.internal"level",:"30pid":4766}
12109 {"msg":","time":"2023-09-22T23:21:36.243400952Z"UpstairsConnection { upstairs_id: 64733bc8-0e54-426a-b284-8ee7a1092e12, session_id: c328a90a-8f5c-4369-a069-ea688b82e263, gen: 10 } is now active (read-write),"","hostnamev""::"0,"name":"ip-10-150-1-55.us-west-2.compute.internal"crucible",","pid":level"4766:30}
12110 ,"time":"2023-09-22T23:21:36.24342261Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
121112023-09-22T23:21:36.243ZWARNcrucible: Attempting RW takeover from UpstairsConnection { upstairs_id: 64733bc8-0e54-426a-b284-8ee7a1092e12, session_id: c328a90a-8f5c-4369-a069-ea688b82e263, gen: 10 } to UpstairsConnection { upstairs_id: 6d8e1874-8a0b-4075-9b57-a413f518a52b, session_id: 5b236919-eca9-476f-9f66-0439a7f4dbf6, gen: 11 }
121122023-09-22T23:21:36.243ZWARNcrucible: Signaling to UpstairsConnection { upstairs_id: 64733bc8-0e54-426a-b284-8ee7a1092e12, session_id: c328a90a-8f5c-4369-a069-ea688b82e263, gen: 10 } thread that UpstairsConnection { upstairs_id: 6d8e1874-8a0b-4075-9b57-a413f518a52b, session_id: 5b236919-eca9-476f-9f66-0439a7f4dbf6, gen: 11 } is being promoted (read-write)
121132023-09-22T23:21:36.243ZWARNcrucible: Crucible Downstairs promoting UpstairsConnection { upstairs_id: 6d8e1874-8a0b-4075-9b57-a413f518a52b, session_id: 5b236919-eca9-476f-9f66-0439a7f4dbf6, gen: 11 } to active, discarding 1 jobs
121142023-09-22T23:21:36.243ZINFOcrucible: UpstairsConnection { upstairs_id: 6d8e1874-8a0b-4075-9b57-a413f518a52b, session_id: 5b236919-eca9-476f-9f66-0439a7f4dbf6, gen: 11 } is now active (read-write)
121152023-09-22T23:21:36.243ZWARNcrucible: UpstairsConnection { upstairs_id: 64733bc8-0e54-426a-b284-8ee7a1092e12, session_id: c328a90a-8f5c-4369-a069-ea688b82e263, gen: 10 } cannot grab work lock, 64733bc8-0e54-426a-b284-8ee7a1092e12 is not active!
12116 test test::test_complete_work_cannot_see_none_different_upstairs_id ... ok
121172023-09-22T23:21:36.246ZINFOcrucible: current number of open files limit 65536 is already the maximum
121182023-09-22T23:21:36.246ZINFOcrucible: Created new region file "/tmp/.tmpPtzVoF/region.json"
121192023-09-22T23:21:36.248ZINFOcrucible: current number of open files limit 65536 is already the maximum
121202023-09-22T23:21:36.248ZINFOcrucible: Opened existing region file "/tmp/.tmpICuwkB/region.json"
121212023-09-22T23:21:36.248ZINFOcrucible: Database read version 1
121222023-09-22T23:21:36.248ZINFOcrucible: Database write version 1
121232023-09-22T23:21:36.250ZINFOcrucible: UUID: c4a9db90-cf08-4cd2-a281-2e3153e10dea
121242023-09-22T23:21:36.250ZINFOcrucible: Blocks per extent:4 Total Extents: 2
121252023-09-22T23:21:36.250ZINFOcrucible: UpstairsConnection { upstairs_id: debf9816-5036-4a29-971a-7959707ef7c4, session_id: cfd97f53-f668-4c56-a83f-a49fa1f1dd7b, gen: 10 } is now active (read-write)
121262023-09-22T23:21:36.250ZWARNcrucible: Attempting RW takeover from UpstairsConnection { upstairs_id: debf9816-5036-4a29-971a-7959707ef7c4, session_id: cfd97f53-f668-4c56-a83f-a49fa1f1dd7b, gen: 10 } to UpstairsConnection { upstairs_id: debf9816-5036-4a29-971a-7959707ef7c4, session_id: 05528500-99fe-4bae-be75-dfc22dbde9d4, gen: 11 }
121272023-09-22T23:21:36.250ZWARNcrucible: Signaling to UpstairsConnection { upstairs_id: debf9816-5036-4a29-971a-7959707ef7c4, session_id: cfd97f53-f668-4c56-a83f-a49fa1f1dd7b, gen: 10 } thread that UpstairsConnection { upstairs_id: debf9816-5036-4a29-971a-7959707ef7c4, session_id: 05528500-99fe-4bae-be75-dfc22dbde9d4, gen: 11 } is being promoted (read-write)
121282023-09-22T23:21:36.250ZWARNcrucible: Crucible Downstairs promoting UpstairsConnection { upstairs_id: debf9816-5036-4a29-971a-7959707ef7c4, session_id: 05528500-99fe-4bae-be75-dfc22dbde9d4, gen: 11 } to active, discarding 1 jobs
121292023-09-22T23:21:36.250ZINFOcrucible: UpstairsConnection { upstairs_id: debf9816-5036-4a29-971a-7959707ef7c4, session_id: 05528500-99fe-4bae-be75-dfc22dbde9d4, gen: 11 } is now active (read-write)
121302023-09-22T23:21:36.250ZWARNcrucible: UpstairsConnection { upstairs_id: debf9816-5036-4a29-971a-7959707ef7c4, session_id: cfd97f53-f668-4c56-a83f-a49fa1f1dd7b, gen: 10 } cannot grab lock, does not match UpstairsConnection { upstairs_id: debf9816-5036-4a29-971a-7959707ef7c4, session_id: 05528500-99fe-4bae-be75-dfc22dbde9d4, gen: 11 }!
12131 test test::test_complete_work_cannot_see_none_same_upstairs_id ... ok
12132 {{"msg":""current number of open files limit 65536 is already the maximum"msg":","v":0,"current number of open files limit 65536 is already the maximumname":""crucible",","level"v"::300,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.254150803Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",,""timepid""::"47662023-09-22T23:21:36.254156538Z"}
12133 ,"hostname":"ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4766"msg":"}
12134 Opened existing region file \"/tmp/.tmpPtzVoF/region.json\"","v":{0,"name":""crucible"msg",:""level":30Created new region file \"/tmp/.tmp7h3aet/region.json\"","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.254220454Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766,"}time":
12135 "2023-09-22T23:21:36.254228573Z","{hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","Database read version 1"pid",:"4766v":0},
12136 "name":"crucible","level":30,"time":"2023-09-22T23:21:36.254268461Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
121372023-09-22T23:21:36.254ZINFOcrucible: Database write version 1
121382023-09-22T23:21:36.261ZINFOcrucible: UUID: aa7fa17b-6d35-4d22-b991-2042801319b4
121392023-09-22T23:21:36.261ZINFOcrucible: Blocks per extent:4 Total Extents: 5
12140 {"msg":"UpstairsConnection { upstairs_id: 9a57314d-b0f0-4fbd-bf04-117b6fe34801, session_id: bcd80027-a5a2-4e00-8334-f6e5f90f3646, gen: 10 } is now active (read-write)","v":0,"name":"crucible","level":30Active Upstairs connections: [UpstairsConnection { upstairs_id: 9a57314d-b0f0-4fbd-bf04-117b6fe34801, session_id: bcd80027-a5a2-4e00-8334-f6e5f90f3646, gen: 10 }]
12141 ,"time":"2023-09-22T23:21:36.261851869Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766Crucible Downstairs work queue:
12142 }
12143 DSW:[1000] EClose New deps:[]
12144 DSW:[1001] EFClose New deps:[]
12145 DSW:[1002] ReOpen New deps:[JobId(1000)]
12146 DSW:[1003] ReOpen New deps:[JobId(1001)]
12147 Done tasks []
12148 last_flush: JobId(0)
12149 --------------------------------------
12150 Got new work: [JobId(1000), JobId(1001), JobId(1002), JobId(1003)]
121512023-09-22T23:21:36.263ZINFOcrucible: current number of open files limit 65536 is already the maximum
121522023-09-22T23:21:36.263ZINFOcrucible: Opened existing region file "/tmp/.tmp7h3aet/region.json"
121532023-09-22T23:21:36.263ZINFOcrucible: Database read version 1
121542023-09-22T23:21:36.263ZINFOcrucible: Database write version 1
12155 {test test::test_extent_new_close_flush_close ... "okmsg"
12156 :"UUID: 37d7ca80-1c95-44f8-b547-ec9cebe4faaf","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.267409026Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
121572023-09-22T23:21:36.267ZINFOcrucible: Blocks per extent:4 Total Extents: 5
12158 Before doing work we have:
12159 Active Upstairs connections: [UpstairsConnection { upstairs_id: d247139f-5311-4ae1-914d-5f69c019b4de, session_id: b4d17432-7997-44b3-bf5b-8b1e5f7a26f7, gen: 10 }]
12160 Crucible Downstairs work queue:
12161 DSW:[1000] EClose New deps:[]
12162 DSW:[1001] EFClose New deps:[]
12163 DSW:[1002] Read New deps:[JobId(1000), JobId(1001)]
12164 DSW:[1003] NoOp New deps:[JobId(1000), JobId(1001), JobId(1002)]
12165 DSW:[1004] ReOpen New deps:[JobId(1000), JobId(1001), JobId(1002), JobId(1003)]
12166 Done tasks []
12167 last_flush: JobId(0)
12168 --------------------------------------
12169 Got new work: [JobId(1000), JobId(1001), JobId(1002), JobId(1003), JobId(1004)]
12170 Do IOop 1000
12171 {"msg":"UpstairsConnection { upstairs_id: d247139f-5311-4ae1-914d-5f69c019b4de, session_id: b4d17432-7997-44b3-bf5b-8b1e5f7a26f7, gen: 10 } is now active (read-write)","v":0,"name":"crucible","level":30{"msg",:""time":"2023-09-22T23:21:36.268199905Z"current number of open files limit 65536 is already the maximum,""hostname":","v":ip-10-150-1-55.us-west-2.compute.internal"0,","pid"name"::"4766crucible","}level"
12172 :30,"time":"2023-09-22T23:21:36.268243809Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
121732023-09-22T23:21:36.268ZINFOcrucible: Created new region file "/tmp/.tmpIBUDCt/region.json"
12174 Got m: ExtentLiveCloseAck { upstairs_id: d247139f-5311-4ae1-914d-5f69c019b4de, session_id: b4d17432-7997-44b3-bf5b-8b1e5f7a26f7, job_id: JobId(1000), result: Ok((0, 0, false)) }
12175 Do IOop 1001
12176 Got m: ExtentLiveCloseAck { upstairs_id: d247139f-5311-4ae1-914d-5f69c019b4de, session_id: b4d17432-7997-44b3-bf5b-8b1e5f7a26f7, job_id: JobId(1001), result: Ok((0, 0, false)) }
12177 Do IOop 1002
12178 Got m: ReadResponse { upstairs_id: d247139f-5311-4ae1-914d-5f69c019b4de, session_id: b4d17432-7997-44b3-bf5b-8b1e5f7a26f7, job_id: JobId(1002), responses: Ok([ReadResponse { eid: 2, offset: Block { value: 1, shift: 9 }, data: b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0", block_contexts: [] }]) }
12179 Do IOop 1003
12180 Got m: ExtentLiveAckId { upstairs_id: d247139f-5311-4ae1-914d-5f69c019b4de, session_id: b4d17432-7997-44b3-bf5b-8b1e5f7a26f7, job_id: JobId(1003), result: Ok(()) }
12181 Do IOop 1004
12182 Got m: ExtentLiveAckId { upstairs_id: d247139f-5311-4ae1-914d-5f69c019b4de, session_id: b4d17432-7997-44b3-bf5b-8b1e5f7a26f7, job_id: JobId(1004), result: Ok(()) }
12183 test test::test_extent_simple_close_flush_close ... ok
121842023-09-22T23:21:36.273ZINFOcrucible: current number of open files limit 65536 is already the maximum
121852023-09-22T23:21:36.273ZINFOcrucible: Created new region file "/tmp/.tmpfXGQZN/region.json"
121862023-09-22T23:21:36.274ZINFOcrucible: current number of open files limit 65536 is already the maximum
121872023-09-22T23:21:36.274ZINFOcrucible: Opened existing region file "/tmp/.tmpIBUDCt/region.json"
121882023-09-22T23:21:36.274ZINFOcrucible: Database read version 1
121892023-09-22T23:21:36.274ZINFOcrucible: Database write version 1
121902023-09-22T23:21:36.277ZINFOcrucible: UUID: 0c6c1a78-03b9-4f93-b775-53b778c4926b
121912023-09-22T23:21:36.277ZINFOcrucible: Blocks per extent:4 Total Extents: 5
12192 Active Upstairs connections: [UpstairsConnection { upstairs_id: 8f3031f5-83bf-4e47-9bd4-4c6a88b9030a, session_id: 2b6beb2e-82c4-45d6-8df8-ab94de799177, gen: 10 }]
12193 Crucible Downstairs work queue:
12194 DSW:[1000] Write New deps:[]
12195 DSW:[1001] EClose New deps:[JobId(1000)]
12196 Done tasks []
12197 last_flush: JobId(0)
12198 --------------------------------------
12199 Got new work: [JobId(1000), JobId(1001)]
122002023-09-22T23:21:36.278ZINFOcrucible: UpstairsConnection { upstairs_id: 8f3031f5-83bf-4e47-9bd4-4c6a88b9030a, session_id: 2b6beb2e-82c4-45d6-8df8-ab94de799177, gen: 10 } is now active (read-write)
122012023-09-22T23:21:36.280ZINFOcrucible: current number of open files limit 65536 is already the maximum
122022023-09-22T23:21:36.280ZINFOcrucible: Opened existing region file "/tmp/.tmpfXGQZN/region.json"
122032023-09-22T23:21:36.280ZINFOcrucible: Database read version 1
122042023-09-22T23:21:36.280ZINFOcrucible: Database write version 1
12205 test test::test_extent_write_close ... ok
122062023-09-22T23:21:36.282ZINFOcrucible: current number of open files limit 65536 is already the maximum
122072023-09-22T23:21:36.282ZINFOcrucible: Created new region file "/tmp/.tmpWqEyfg/region.json"
122082023-09-22T23:21:36.283ZINFOcrucible: UUID: a3036cd6-f3d8-4f1b-852c-75a6ecb2f8cf
122092023-09-22T23:21:36.283ZINFOcrucible: Blocks per extent:4 Total Extents: 5
12210 Active Upstairs connections: [UpstairsConnection { upstairs_id: aa96e365-81ae-4658-ba33-355ec5724697, session_id: 92f7a2cb-754e-4338-852f-d402d37e960c, gen: 10 }]
12211 {"msg":"Crucible Downstairs work queue:
12212 UpstairsConnection { upstairs_id: aa96e365-81ae-4658-ba33-355ec5724697, session_id: 92f7a2cb-754e-4338-852f-d402d37e960c, gen: 10 } is now active (read-write)"DSW:[1000] Write New deps:[]
12213 ,"v"DSW:[1001] EFClose New deps:[JobId(1000)]
12214 :Done tasks []
12215 0last_flush: JobId(0)
12216 ,--------------------------------------
12217 "name":"crucible","level":30Got new work: [JobId(1000), JobId(1001)]
12218 ,"time":"2023-09-22T23:21:36.283648143Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
12219 test test::test_extent_write_flush_close ... ok
122202023-09-22T23:21:36.288ZINFOcrucible: current number of open files limit 65536 is already the maximum
122212023-09-22T23:21:36.288ZINFOcrucible: Created new region file "/tmp/.tmp4ll9pw/region.json"
122222023-09-22T23:21:36.289ZINFOcrucible: current number of open files limit 65536 is already the maximum
122232023-09-22T23:21:36.289ZINFOcrucible: Opened existing region file "/tmp/.tmpWqEyfg/region.json"
122242023-09-22T23:21:36.289ZINFOcrucible: Database read version 1
122252023-09-22T23:21:36.289ZINFOcrucible: Database write version 1
122262023-09-22T23:21:36.291ZINFOcrucible: UUID: 97737660-1a46-4ea8-ab7d-8ff38e4e9cd9
122272023-09-22T23:21:36.291ZINFOcrucible: Blocks per extent:4 Total Extents: 5
12228 {"msg":"UpstairsConnection { upstairs_id: 51fd8704-7826-4cb9-8726-3803fc9d81db, session_id: 8b3cee1a-cd10-48a6-ad9b-ec1df5477e78, gen: 10 } is now active (read-write)Active Upstairs connections: [UpstairsConnection { upstairs_id: 51fd8704-7826-4cb9-8726-3803fc9d81db, session_id: 8b3cee1a-cd10-48a6-ad9b-ec1df5477e78, gen: 10 }]
12229 ","v":0,"name":"crucible","level"Crucible Downstairs work queue:
12230 :30DSW:[1000] Write New deps:[]
12231 DSW:[1001] Flush New deps:[]
12232 DSW:[1002] Write New deps:[JobId(1000), JobId(1001)]
12233 ,"DSW:[1003] EClose New deps:[JobId(1000), JobId(1001), JobId(1002)]
12234 time":Done tasks []
12235 "last_flush: JobId(0)
12236 2023-09-22T23:21:36.292350278Z"--------------------------------------
12237 ,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
12238 Got new work: [JobId(1000), JobId(1001), JobId(1002), JobId(1003)]
122392023-09-22T23:21:36.294ZINFOcrucible: current number of open files limit 65536 is already the maximum
122402023-09-22T23:21:36.294ZINFOcrucible: Opened existing region file "/tmp/.tmp4ll9pw/region.json"
122412023-09-22T23:21:36.294ZINFOcrucible: Database read version 1
122422023-09-22T23:21:36.294ZINFOcrucible: Database write version 1
12243 test test::test_extent_write_flush_write_close ... ok
122442023-09-22T23:21:36.297ZINFOcrucible: UUID: a4c27b10-4243-40e6-97b2-683044208503
122452023-09-22T23:21:36.297ZINFOcrucible: Blocks per extent:4 Total Extents: 5
122462023-09-22T23:21:36.297ZINFOcrucible: current number of open files limit 65536 is already the maximum
122472023-09-22T23:21:36.297ZINFOcrucible: Created new region file "/tmp/.tmpSwLMyT/region.json"
12248 {"msg":"UpstairsConnection { upstairs_id: 7c391f6f-c198-4917-aa35-1c75063f9878, session_id: 1635c1bd-824d-4dd9-9a89-8df7a5974793, gen: 10 } is now active (read-write)"Active Upstairs connections: [UpstairsConnection { upstairs_id: 7c391f6f-c198-4917-aa35-1c75063f9878, session_id: 1635c1bd-824d-4dd9-9a89-8df7a5974793, gen: 10 }]
12249 ,"v":0,"name":"crucible","level":30Crucible Downstairs work queue:
12250 DSW:[1000] Write New deps:[]
12251 DSW:[1001] Write New deps:[]
12252 DSW:[1002] EFClose New deps:[JobId(1000)]
12253 DSW:[1003] EClose New deps:[JobId(1001)]
12254 ,Done tasks []
12255 "last_flush: JobId(0)
12256 time":--------------------------------------
12257 "2023-09-22T23:21:36.29765977Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
12258 Got new work: [JobId(1000), JobId(1001), JobId(1002), JobId(1003)]
122592023-09-22T23:21:36.301ZINFOcrucible: current number of open files limit 65536 is already the maximum
122602023-09-22T23:21:36.301ZINFOcrucible: Opened existing region file "/tmp/.tmpSwLMyT/region.json"
122612023-09-22T23:21:36.301ZINFOcrucible: Database read version 1
122622023-09-22T23:21:36.301ZINFOcrucible: Database write version 1
12263 test test::test_extent_write_write_flush_close ... ok
122642023-09-22T23:21:36.302ZINFOcrucible: UUID: bff87367-9361-47d4-a673-7bded39b1521
122652023-09-22T23:21:36.302ZINFOcrucible: Blocks per extent:4 Total Extents: 2
122662023-09-22T23:21:36.302ZINFOcrucible: current number of open files limit 65536 is already the maximum
122672023-09-22T23:21:36.302ZINFOcrucible: Created new region file "/tmp/.tmpeno1fe/region.json"
12268 test test::test_multiple_read_only_no_job_id_collision ... ok
122692023-09-22T23:21:36.305ZINFOcrucible: current number of open files limit 65536 is already the maximum
122702023-09-22T23:21:36.305ZINFOcrucible: Created new region file "/tmp/.tmp0Wl0vI/region.json"
122712023-09-22T23:21:36.306ZINFOcrucible: current number of open files limit 65536 is already the maximum
122722023-09-22T23:21:36.306ZINFOcrucible: Opened existing region file "/tmp/.tmpeno1fe/region.json"
122732023-09-22T23:21:36.306ZINFOcrucible: Database read version 1
122742023-09-22T23:21:36.306ZINFOcrucible: Database write version 1
122752023-09-22T23:21:36.307ZINFOcrucible: UUID: e8fcc6b0-1f98-4bfd-ab37-79f94974d36d
122762023-09-22T23:21:36.307ZINFOcrucible: Blocks per extent:4 Total Extents: 2
122772023-09-22T23:21:36.308ZINFOcrucible: current number of open files limit 65536 is already the maximum
122782023-09-22T23:21:36.308ZINFOcrucible: Opened existing region file "/tmp/.tmp0Wl0vI/region.json"
122792023-09-22T23:21:36.308ZINFOcrucible: Database read version 1
122802023-09-22T23:21:36.308ZINFOcrucible: Database write version 1
12281 test test::test_promote_to_active_multi_read_only_different_uuid ... ok
122822023-09-22T23:21:36.309ZINFOcrucible: UUID: 3b8ecbef-099a-4919-b60a-4233ef43e424
122832023-09-22T23:21:36.309ZINFOcrucible: Blocks per extent:4 Total Extents: 2
12284 {"msg":"{"msg":"current number of open files limit 65536 is already the maximum","v":Signaling to UpstairsConnection { upstairs_id: 6f6956d1-741a-416d-92f2-d3763223d0a0, session_id: 878abda3-5c53-4b83-a225-52cc439c2d50, gen: 1 } thread that UpstairsConnection { upstairs_id: 6f6956d1-741a-416d-92f2-d3763223d0a0, session_id: 5948108d-46c6-4e4a-8532-1ee9bc949c97, gen: 1 } is being promoted (read-only)"0,","name"v":":crucible0",","name"level":":crucible"30,"level":30,"time":,""time":"2023-09-22T23:21:36.310380396Z","2023-09-22T23:21:36.310378493Z"hostname":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4766,"pid"}:
12285 4766}
122862023-09-22T23:21:36.310ZINFOcrucible: Created new region file "/tmp/.tmpq3Ts9a/region.json"
12287 test test::test_promote_to_active_multi_read_only_same_uuid ... ok
122882023-09-22T23:21:36.312ZINFOcrucible: current number of open files limit 65536 is already the maximum
122892023-09-22T23:21:36.312ZINFOcrucible: Created new region file "/tmp/.tmpb62GWu/region.json"
122902023-09-22T23:21:36.314ZINFOcrucible: current number of open files limit 65536 is already the maximum
122912023-09-22T23:21:36.314ZINFOcrucible: Opened existing region file "/tmp/.tmpq3Ts9a/region.json"
122922023-09-22T23:21:36.314ZINFOcrucible: Database read version 1
122932023-09-22T23:21:36.314ZINFOcrucible: Database write version 1
122942023-09-22T23:21:36.315ZINFOcrucible: UUID: 9178ef01-f71a-4dbd-8623-68423516cf03
122952023-09-22T23:21:36.315ZINFOcrucible: Blocks per extent:4 Total Extents: 2
12296 ds1: MutexGuard { value: Downstairs { region: Region { dir: "/tmp/.tmpq3Ts9a", def: RegionDefinition { block_size: 512, extent_size: Block { value: 4, shift: 9 }, extent_count: 2, uuid: 9178ef01-f71a-4dbd-8623-68423516cf03, encrypted: false, database_read_version: 1, database_write_version: 1 }, extents: [Mutex { data: Opened(Extent { number: 0, read_only: false, block_size: 512, extent_size: Block { value: 4, shift: 9 }, iov_max: 1024, inner: Mutex { data: Inner { file: File { fd: 237 }, metadb: Connection { path: Some("/tmp/.tmpq3Ts9a/00/000/000.db") }, dirty_blocks: {} } } }) }, Mutex { data: Opened(Extent { number: 1, read_only: false, block_size: 512, extent_size: Block { value: 4, shift: 9 }, iov_max: 1024, inner: Mutex { data: Inner { file: File { fd: 241 }, metadb: Connection { path: Some("/tmp/.tmpq3Ts9a/00/000/001.db") }, dirty_blocks: {} } } }) }], dirty_extents: {}, read_only: false, log: Logger() }, lossy: false, read_errors: false, write_errors: false, flush_errors: false, active_upstairs: {}, dss: DsStatOuter { ds_stat_wrap: Mutex { is_locked: false, has_waiters: false } }, read_only: false, encrypted: false, address: None, repair_address: None, log: Logger() }, mutex: Mutex { is_locked: true, has_waiters: false } }
12297 
12298 ds2: MutexGuard { value: Downstairs { region: Region { dir: "/tmp/.tmpq3Ts9a", def: RegionDefinition { block_size: 512, extent_size: Block { value: 4, shift: 9 }, extent_count: 2, uuid: 9178ef01-f71a-4dbd-8623-68423516cf03, encrypted: false, database_read_version: 1, database_write_version: 1 }, extents: [Mutex { data: Opened(Extent { number: 0, read_only: false, block_size: 512, extent_size: Block { value: 4, shift: 9 }, iov_max: 1024, inner: Mutex { data: Inner { file: File { fd: 237 }, metadb: Connection { path: Some("/tmp/.tmpq3Ts9a/00/000/000.db") }, dirty_blocks: {} } } }) }, Mutex { data: Opened(Extent { number: 1, read_only: false, block_size: 512, extent_size: Block { value: 4, shift: 9 }, iov_max: 1024, inner: Mutex { data: Inner { file: File { fd: 241 }, metadb: Connection { path: Some("/tmp/.tmpq3Ts9a/00/000/001.db") }, dirty_blocks: {} } } }) }], dirty_extents: {}, read_only: false, log: Logger() }, lossy: false, read_errors: false, write_errors: false, flush_errors: false, active_upstairs: {b76ea64e-5a32-4215-9476-0f488dbb3024: ActiveUpstairs { upstairs_connection: UpstairsConnection { upstairs_id: b76ea64e-5a32-4215-9476-0f488dbb3024, session_id: bf5fccf8-eef4-472f-8b71-fc1a31eaeb56, gen: 2 }, work: Mutex { is_locked: false, has_waiters: false }, terminate_sender: Sender { chan: Tx { inner: Chan { tx: Tx { block_tail: 0x5cc6a90, tail_position: 0 }, semaphore: Semaphore { semaphore: Semaphore { permits: 1 }, bound: 1 }, rx_waker: AtomicWaker, tx_count: 1, rx_fields: "..." } } } }}, dss: DsStatOuter { ds_stat_wrap: Mutex { is_locked: false, has_waiters: false } }, read_only: false, encrypted: false, address: None, repair_address: None, log: Logger() }, mutex: Mutex { is_locked: true, has_waiters: false } }
12299 
12300 {{""msg":"msg":"current number of open files limit 65536 is already the maximum"UpstairsConnection { upstairs_id: b76ea64e-5a32-4215-9476-0f488dbb3024, session_id: bf5fccf8-eef4-472f-8b71-fc1a31eaeb56, gen: 2 } is now active (read-write)","v":,"0v",":name":"0crucible",","name":"level"crucible":,"level"30:30,",time"":"time":"2023-09-22T23:21:36.316131565Z"2023-09-22T23:21:36.316132874Z","hostname":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid"ip-10-150-1-55.us-west-2.compute.internal":,"4766pid":4766}}
12301 
12302 {"msg":"{"msg":"Opened existing region file \"/tmp/.tmpb62GWu/region.json\"Attempting RW takeover from UpstairsConnection { upstairs_id: b76ea64e-5a32-4215-9476-0f488dbb3024, session_id: bf5fccf8-eef4-472f-8b71-fc1a31eaeb56, gen: 2 } to UpstairsConnection { upstairs_id: 97418758-0f11-4547-a9ef-1a43890b320e, session_id: 32fbdd10-df06-43fe-9e8d-757ae10c99ee, gen: 1 }"",",v":"0v":,"0name",":"name"crucible":",crucible"","level":30level":40,"time":"2023-09-22T23:21:36.316249928Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
12303 ,"time":"2023-09-22T23:21:36.316263085Z"{,"hostname":""msg"ip-10-150-1-55.us-west-2.compute.internal":","pid":Database read version 1"4766,"v":}0
12304 ,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.316321294Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
123052023-09-22T23:21:36.316ZINFOcrucible: Database write version 1
12306 test test::test_promote_to_active_multi_read_write_different_uuid_lower_gen ... ok
123072023-09-22T23:21:36.317ZINFOcrucible: current number of open files limit 65536 is already the maximum
123082023-09-22T23:21:36.317ZINFOcrucible: Created new region file "/tmp/.tmpGrq4Il/region.json"
123092023-09-22T23:21:36.317ZINFOcrucible: UUID: 2e7ab951-802d-4955-bf93-556d497258b4
123102023-09-22T23:21:36.318ZINFOcrucible: Blocks per extent:4 Total Extents: 2
123112023-09-22T23:21:36.318ZINFOcrucible: UpstairsConnection { upstairs_id: ef548056-f8ca-406d-a2df-e9377f52dec3, session_id: b69f1e2d-5f26-44d8-8655-e0d1da715faf, gen: 1 } is now active (read-write)
123122023-09-22T23:21:36.318ZWARNcrucible: Attempting RW takeover from UpstairsConnection { upstairs_id: ef548056-f8ca-406d-a2df-e9377f52dec3, session_id: b69f1e2d-5f26-44d8-8655-e0d1da715faf, gen: 1 } to UpstairsConnection { upstairs_id: 47fd0e40-3a11-4b94-a18d-1eab3b74989b, session_id: 54d92ad8-4da7-4b3a-8148-02f158e67cb2, gen: 1 }
12313 test test::test_promote_to_active_multi_read_write_different_uuid_same_gen ... ok
123142023-09-22T23:21:36.320ZINFOcrucible: current number of open files limit 65536 is already the maximum
123152023-09-22T23:21:36.320ZINFOcrucible: Created new region file "/tmp/.tmpeILeH1/region.json"
12316 Populated 10 extents by copying 51200 bytes (100 blocks)
12317 Populated 10 extents by copying 51200 bytes (100 blocks)
123182023-09-22T23:21:36.321ZINFOcrucible: current number of open files limit 65536 is already the maximum
123192023-09-22T23:21:36.321ZINFOcrucible: Opened existing region file "/tmp/.tmpGrq4Il/region.json"
123202023-09-22T23:21:36.321ZINFOcrucible: Database read version 1
123212023-09-22T23:21:36.321ZINFOcrucible: Database write version 1
123222023-09-22T23:21:36.323ZINFOcrucible: UUID: 2b71e199-4551-4f7e-a897-922324d9c9ed
123232023-09-22T23:21:36.323ZINFOcrucible: Blocks per extent:4 Total Extents: 2
123242023-09-22T23:21:36.323ZINFOcrucible: current number of open files limit 65536 is already the maximum
123252023-09-22T23:21:36.324ZINFOcrucible: Opened existing region file "/tmp/.tmpeILeH1/region.json"
123262023-09-22T23:21:36.324ZINFOcrucible: Database read version 1
12327 {"msg":"Database write version 1","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.324149559Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",Export total_size: 51200 Extent size:5120 Total Extents:10
12328 "Exporting from start_block: 0 count:100
12329 pid":4766}
12330 Populated 10 extents by copying 51200 bytes (100 blocks)
123312023-09-22T23:21:36.324ZINFOcrucible: UpstairsConnection { upstairs_id: 13f002c8-20b2-457c-9af0-0f9b5a05056c, session_id: 77629114-556d-464c-ac8e-9bd127fa69bc, gen: 1 } is now active (read-write)
123322023-09-22T23:21:36.324ZWARNcrucible: Attempting RW takeover from UpstairsConnection { upstairs_id: 13f002c8-20b2-457c-9af0-0f9b5a05056c, session_id: 77629114-556d-464c-ac8e-9bd127fa69bc, gen: 1 } to UpstairsConnection { upstairs_id: 13f002c8-20b2-457c-9af0-0f9b5a05056c, session_id: 37e396c4-b9e3-49cb-9e72-4add2d50216e, gen: 2 }
123332023-09-22T23:21:36.324ZWARNcrucible: Signaling to UpstairsConnection { upstairs_id: 13f002c8-20b2-457c-9af0-0f9b5a05056c, session_id: 77629114-556d-464c-ac8e-9bd127fa69bc, gen: 1 } thread that UpstairsConnection { upstairs_id: 13f002c8-20b2-457c-9af0-0f9b5a05056c, session_id: 37e396c4-b9e3-49cb-9e72-4add2d50216e, gen: 2 } is being promoted (read-write)
123342023-09-22T23:21:36.324ZINFOcrucible: UpstairsConnection { upstairs_id: 13f002c8-20b2-457c-9af0-0f9b5a05056c, session_id: 37e396c4-b9e3-49cb-9e72-4add2d50216e, gen: 2 } is now active (read-write)
12335 Populated 11 extents by copying 51712 bytes (101 blocks)
12336 {"msg":"UUID: 9441f374-eacf-409a-80a9-79ff99067655","v":0,"name":"crucible","level":30test test::test_promote_to_active_multi_read_write_same_uuid_larger_gen ... ok
12337 ,"time":"2023-09-22T23:21:36.32529425Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
123382023-09-22T23:21:36.325ZINFOcrucible: Blocks per extent:4 Total Extents: 2
123392023-09-22T23:21:36.325ZINFOcrucible: UpstairsConnection { upstairs_id: 15a5e302-b08f-4d85-a556-c77589e5d674, session_id: 06087ed2-beaf-4761-b7d9-adc46411651a, gen: 1 } is now active (read-write)
123402023-09-22T23:21:36.325ZWARNcrucible: Attempting RW takeover from UpstairsConnection { upstairs_id: 15a5e302-b08f-4d85-a556-c77589e5d674, session_id: 06087ed2-beaf-4761-b7d9-adc46411651a, gen: 1 } to UpstairsConnection { upstairs_id: 15a5e302-b08f-4d85-a556-c77589e5d674, session_id: b1f64075-23f4-4c0a-8cdc-b0e5f86e9f7d, gen: 1 }
123412023-09-22T23:21:36.325ZINFOcrucible: current number of open files limit 65536 is already the maximum
123422023-09-22T23:21:36.326ZINFOcrucible: Created new region file "/tmp/.tmpVuBd7H/region.json"
12343 test test::test_promote_to_active_multi_read_write_same_uuid_same_gen ... ok
123442023-09-22T23:21:36.327ZINFOcrucible: current number of open files limit 65536 is already the maximum
123452023-09-22T23:21:36.327ZINFOcrucible: Created new region file "/tmp/.tmpb88AZ2/region.json"
12346 Export total_size: 51200 Extent size:5120 Total Extents:10
12347 Exporting from start_block: 0 count:100
12348 Export total_size: 56320 Extent size:5120 Total Extents:11
12349 Exporting from start_block: 0 count:101
123502023-09-22T23:21:36.329ZINFOcrucible: current number of open files limit 65536 is already the maximum
123512023-09-22T23:21:36.329ZINFOcrucible: Opened existing region file "/tmp/.tmpVuBd7H/region.json"
123522023-09-22T23:21:36.329ZINFOcrucible: Database read version 1
123532023-09-22T23:21:36.329ZINFOcrucible: Database write version 1
123542023-09-22T23:21:36.330ZINFOcrucible: UUID: 32f8c64d-d6f0-455d-a03e-b312f819d6a8
123552023-09-22T23:21:36.330ZINFOcrucible: Blocks per extent:4 Total Extents: 2
123562023-09-22T23:21:36.330ZINFOcrucible: current number of open files limit 65536 is already the maximum
123572023-09-22T23:21:36.330ZINFOcrucible: Opened existing region file "/tmp/.tmpb88AZ2/region.json"
123582023-09-22T23:21:36.330ZINFOcrucible: Database read version 1
123592023-09-22T23:21:36.330ZINFOcrucible: Database write version 1
12360 test test::test_promote_to_active_one_read_only ... ok
123612023-09-22T23:21:36.331ZINFOcrucible: UUID: b57ff436-f771-4756-ac0c-c6bb6dd7b2b6
123622023-09-22T23:21:36.331ZINFOcrucible: Blocks per extent:4 Total Extents: 2
123632023-09-22T23:21:36.332ZINFOcrucible: current number of open files limit 65536 is already the maximum
123642023-09-22T23:21:36.332ZINFOcrucible: Created new region file "/tmp/.tmphT0TAH/region.json"
123652023-09-22T23:21:36.332ZINFOcrucible: UpstairsConnection { upstairs_id: 6fed7906-30ce-4ec1-bab3-26584d5634d0, session_id: 2d3f5bce-a17c-4a79-bbc6-f9cad82074ad, gen: 1 } is now active (read-write)
12366 test test::test_promote_to_active_one_read_write ... ok
123672023-09-22T23:21:36.333ZINFOcrucible: current number of open files limit 65536 is already the maximum
123682023-09-22T23:21:36.333ZINFOcrucible: Created new region file "/tmp/.tmp6BWdqh/region.json"
12369 Read and wrote out 100 blocks
123702023-09-22T23:21:36.335ZINFOcrucible: current number of open files limit 65536 is already the maximum
123712023-09-22T23:21:36.335ZINFOcrucible: Opened existing region file "/tmp/.tmphT0TAH/region.json"
123722023-09-22T23:21:36.335ZINFOcrucible: Database read version 1
123732023-09-22T23:21:36.335ZINFOcrucible: Database write version 1
12374 {"msg":"UUID: fc483d0d-1ea7-4d46-a06e-c635454a441b","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.337797691Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":Active Upstairs connections: [UpstairsConnection { upstairs_id: 3b357141-7ef4-4ae1-8f8f-c6f0511eea53, session_id: 04a5e9c8-5fc7-455e-ba77-6099913b2300, gen: 10 }]
12375 Crucible Downstairs work queue:
12376 DSW:[1000] Read New deps:[]
12377 DSW:[1001] Read New deps:[JobId(1000)]
12378 Done tasks []
12379 last_flush: JobId(0)
12380 --------------------------------------
12381 Got new work: [JobId(1000), JobId(1001)]
12382 4766}
12383 Do IOop 1000
123842023-09-22T23:21:36.338ZINFOcrucible: Blocks per extent:4 Total Extents: 2
123852023-09-22T23:21:36.338ZINFOcrucible: UpstairsConnection { upstairs_id: 3b357141-7ef4-4ae1-8f8f-c6f0511eea53, session_id: 04a5e9c8-5fc7-455e-ba77-6099913b2300, gen: 10 } is now active (read-write)
12386 Got m: ReadResponse { upstairs_id: 3b357141-7ef4-4ae1-8f8f-c6f0511eea53, session_id: 04a5e9c8-5fc7-455e-ba77-6099913b2300, job_id: JobId(1000), responses: Ok([ReadResponse { eid: 0, offset: Block { value: 1, shift: 9 }, data: b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0", block_contexts: [] }]) }
12387 Do IOop 1001
12388 Got m: ReadResponse { upstairs_id: 3b357141-7ef4-4ae1-8f8f-c6f0511eea53, session_id: 04a5e9c8-5fc7-455e-ba77-6099913b2300, job_id: JobId(1001), responses: Ok([ReadResponse { eid: 1, offset: Block { value: 1, shift: 9 }, data: b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0", block_contexts: [] }]) }
12389 Active Upstairs connections: [UpstairsConnection { upstairs_id: 3b357141-7ef4-4ae1-8f8f-c6f0511eea53, session_id: 04a5e9c8-5fc7-455e-ba77-6099913b2300, gen: 10 }]
12390 Crucible Downstairs work queue: Empty
12391 Done tasks [JobId(1000), JobId(1001)]
12392 last_flush: JobId(0)
12393 --------------------------------------
12394 test test::import_test_too_small ... ok
123952023-09-22T23:21:36.338ZINFOcrucible: current number of open files limit 65536 is already the maximum
123962023-09-22T23:21:36.339ZINFOcrucible: Created new region file "/tmp/.tmpxwEdpG/region.json"
12397 Read and wrote out 101 blocks
12398 test test::test_simple_read ... ok
123992023-09-22T23:21:36.340ZINFOcrucible: current number of open files limit 65536 is already the maximum
124002023-09-22T23:21:36.340ZINFOcrucible: Created new region file "/tmp/.tmpZt7Eys/region.json"
124012023-09-22T23:21:36.341ZINFOcrucible: current number of open files limit 65536 is already the maximum
124022023-09-22T23:21:36.341ZINFOcrucible: Opened existing region file "/tmp/.tmp6BWdqh/region.json"
124032023-09-22T23:21:36.341ZINFOcrucible: Database read version 1
124042023-09-22T23:21:36.341ZINFOcrucible: Database write version 1
12405 Read and wrote out 100 blocks
12406 test test::import_test_basic_read_blocks ... ok
124072023-09-22T23:21:36.346ZINFOcrucible: current number of open files limit 65536 is already the maximum
124082023-09-22T23:21:36.346ZINFOcrucible: Created new region file "/tmp/.tmppimBq2/region.json"
124092023-09-22T23:21:36.347ZINFOcrucible: UUID: 81def555-b192-40a0-acf4-3ddea77e2c6d
124102023-09-22T23:21:36.347ZINFOcrucible: Blocks per extent:4 Total Extents: 5
12411 test test::import_test_too_large ... ok
124122023-09-22T23:21:36.348ZINFOcrucible: current number of open files limit 65536 is already the maximum
124132023-09-22T23:21:36.348ZINFOcrucible: Created new region file "/tmp/.tmpo0lONP/region.json"
124142023-09-22T23:21:36.348ZINFOcrucible: Crucible Version: Crucible Version: 0.0.1 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main rustc: 1.70.0 stable x86_64-unknown-illumos Cargo: x86_64-unknown-illumos Debug: true Opt level: 0 task = main
124152023-09-22T23:21:36.348ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4 task = main
124162023-09-22T23:21:36.348ZINFOcrucible: Using address: 127.0.0.1:5557 task = main
124172023-09-22T23:21:36.349ZINFOcrucible: current number of open files limit 65536 is already the maximum
124182023-09-22T23:21:36.349ZINFOcrucible: Opened existing region file "/tmp/.tmpZt7Eys/region.json"
124192023-09-22T23:21:36.349ZINFOcrucible: Database read version 1
124202023-09-22T23:21:36.349ZINFOcrucible: Database write version 1
12421 {{"msg"":"msg":"current number of open files limit 65536 is already the maximum"Repair listens on 127.0.0.1:5558","v":,"0v,"":name":"0crucible,""name":,""crucible"level",":level":3030,"time":"2023-09-22T23:21:36.349615303Z",","hostname":time":""2023-09-22T23:21:36.349616017Z"ip-10-150-1-55.us-west-2.compute.internal",,""pid"hostname"::"4766}
12422 ip-10-150-1-55.us-west-2.compute.internal","pid":4766{,"task":""repair"msg":"}
12423 Opened existing region file \"/tmp/.tmpxwEdpG/region.json\"","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.349673042Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
124242023-09-22T23:21:36.349ZINFOcrucible: Database read version 1
124252023-09-22T23:21:36.349ZINFOcrucible: Database write version 1
12426 {"msg":"listening","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.350680455Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766test test::import_test_basic ... ok
12427 ,"local_addr":"127.0.0.1:5558","task":"repair"}
124282023-09-22T23:21:36.350ZINFOcrucible: Using repair address: 127.0.0.1:5558 task = main
124292023-09-22T23:21:36.350ZINFOcrucible: No SSL acceptor configured task = main
124302023-09-22T23:21:36.351ZINFOcrucible: listening on 127.0.0.1:5557 task = main
124312023-09-22T23:21:36.352ZINFOcrucible: accepted connection from 127.0.0.1:52256 task = main
124322023-09-22T23:21:36.352ZINFOcrucible: Connection request from 15649826-de3f-4a94-ae54-5732918c3786 with version 3 task = proc
12433 {"msg":"UUID: 1b8d930c-af92-4946-b813-90e44a027831","v":{0,"name":"crucible","level":"30msg":"UUID: aad39680-8265-40f5-8f31-e66186f0fe36","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.35305682Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
12434 ,"time":"{2023-09-22T23:21:36.353068201Z",""hostname"msg:"":"ip-10-150-1-55.us-west-2.compute.internal"Blocks per extent:4 Total Extents: 5",",pid"":v":47660,"name":"}crucible"
12435 ,"level":30,"time":"2023-09-22T23:21:36.35311508Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766}
124362023-09-22T23:21:36.353ZINFOcrucible: Blocks per extent:4 Total Extents: 5
124372023-09-22T23:21:36.353ZINFOcrucible: current number of open files limit 65536 is already the maximum
124382023-09-22T23:21:36.353ZINFOcrucible: Opened existing region file "/tmp/.tmppimBq2/region.json"
124392023-09-22T23:21:36.353ZINFOcrucible: Database read version 1
124402023-09-22T23:21:36.353ZINFOcrucible: Database write version 1
12441 {"{msg":""msg":"connection (127.0.0.1:52256) Exits with error: Required version 4, Or [3] got 3"Crucible Version: Crucible Version: 0.0.1\n,"v":0Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46\n,"name":"crucible"Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main\n,"level":50rustc: 1.70.0 stable x86_64-unknown-illumos\nCargo: x86_64-unknown-illumos Debug: true Opt level: 0","v":0,"name":"crucible","level":30,","time"time":":"2023-09-22T23:21:36.353659199Z"2023-09-22T23:21:36.353651304Z",","hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",","pid"pid"::47664766,"}task":
12442 "main"}
124432023-09-22T23:21:36.353ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4 task = main
124442023-09-22T23:21:36.353ZINFOcrucible: Using address: 127.0.0.1:5555 task = main
124452023-09-22T23:21:36.353ZINFOcrucible: Repair listens on 127.0.0.1:5556 task = repair
124462023-09-22T23:21:36.354ZINFOcrucible: listening local_addr = 127.0.0.1:5556 task = repair
12447 {"msg":"1002 job Read for connection UpstairsConnection { upstairs_id: e49f863f-8242-419c-9497-aa73fb0f3913, session_id: 8c10f889-cfe9-4164-a34e-713141a58437, gen: 0 } waiting on 1 deps","v":0,"name":"crucible","level":40,"time":"2023-09-22T23:21:36.354292961Z","hostname"{:"ip-10-150-1-55.us-west-2.compute.internal","pid"":msg4766":"}
12448 Using repair address: 127.0.0.1:5556","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.354342009Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766,"task":"main"}
124492023-09-22T23:21:36.354ZINFOcrucible: No SSL acceptor configured task = main
124502023-09-22T23:21:36.354ZINFOcrucible: listening on 127.0.0.1:5555 task = main
124512023-09-22T23:21:36.354ZINFOcrucible: Crucible Version: Crucible Version: 0.0.1 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main rustc: 1.70.0 stable x86_64-unknown-illumos Cargo: x86_64-unknown-illumos Debug: true Opt level: 0 task = main
124522023-09-22T23:21:36.354ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4 task = main
124532023-09-22T23:21:36.354ZINFOcrucible: Using address: 127.0.0.1:5561 task = main
124542023-09-22T23:21:36.355ZINFOcrucible: Repair listens on 127.0.0.1:5562 task = repair
12455 {{""msg":"msg"listening":","v":accepted connection from 127.0.0.1:52476"0,"name,"":v"":crucible"0,","level"name"::"30crucible","level":30,"time":"2023-09-22T23:21:36.355188321Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"time,"":pid"":47662023-09-22T23:21:36.355191388Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",,""pid":local_addr"4766:"127.0.0.1:5562,""task",":"task"main":"}repair"
12456 }
12457 {"msg":"Using repair address: 127.0.0.1:5562","v":{0,"name":"crucible","level":"30msg":"Connection request from acb09623-8b52-40d9-9ec9-f8e8a0e2acbd with version 4","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.35540462Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766,"task":"main","}time":
12458 {"msg":"No SSL acceptor configured"","v":0,"2023-09-22T23:21:36.355416481Z"name":"crucible",",level"":hostname"30:"ip-10-150-1-55.us-west-2.compute.internal","pid":4766,"task":"proc"}
12459 ,"time":"2023-09-22T23:21:36.355472077Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766,"task":"main"}
12460 {{"msg":"listening on 127.0.0.1:5561",""v":msg"0:","name":"crucible","level":30upstairs UpstairsConnection { upstairs_id: acb09623-8b52-40d9-9ec9-f8e8a0e2acbd, session_id: 88ebf34f-49b9-432d-8ca9-0cbead928022, gen: 1 } connected, version 4","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.355537457Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766,"task":"main"}
12461 ,"time":"2023-09-22T23:21:36.355549861Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766,"task":"proc"}
124622023-09-22T23:21:36.355ZWARNcrucible: 2002 job Flush for connection UpstairsConnection { upstairs_id: e49f863f-8242-419c-9497-aa73fb0f3913, session_id: 8c10f889-cfe9-4164-a34e-713141a58437, gen: 0 } waiting on 1 deps
124632023-09-22T23:21:36.355ZINFOcrucible: accepted connection from 127.0.0.1:35336 task = main
124642023-09-22T23:21:36.356ZINFOcrucible: Connection request from 7c6cc61c-e084-46c4-9a6b-2d866e33dfac with version 5 task = proc
124652023-09-22T23:21:36.356ZWARNcrucible: downstairs and upstairs using different but compatible versions, Upstairs is 5, but supports [4, 5], downstairs is 4 task = proc
124662023-09-22T23:21:36.356ZINFOcrucible: upstairs UpstairsConnection { upstairs_id: 7c6cc61c-e084-46c4-9a6b-2d866e33dfac, session_id: 1c566281-ee17-4f04-ac33-90bea0b26a2e, gen: 1 } connected, version 4 task = proc
124672023-09-22T23:21:36.356ZINFOcrucible: current number of open files limit 65536 is already the maximum
124682023-09-22T23:21:36.356ZINFOcrucible: Opened existing region file "/tmp/.tmpo0lONP/region.json"
124692023-09-22T23:21:36.356ZINFOcrucible: Database read version 1
124702023-09-22T23:21:36.356ZINFOcrucible: Database write version 1
12471 test test::test_version_downrev ... ok
12472 test test::two_job_chains ... ok
12473 test test::test_version_match ... ok
12474 test test::unblock_job ... ok
124752023-09-22T23:21:36.360ZWARNcrucible: 1002 job Read for connection UpstairsConnection { upstairs_id: d6889a6f-b791-4b23-96b2-7bad3c7074ce, session_id: 86f013ed-9310-4dfe-9691-063aa4bd272f, gen: 0 } waiting on 1 deps
12476 test test::test_version_uprev_compatable ... ok
124772023-09-22T23:21:36.360ZINFOcrucible: UUID: f0e1cc08-496b-4df1-af7b-29dd28d285c9
124782023-09-22T23:21:36.360ZINFOcrucible: Blocks per extent:4 Total Extents: 5
12479 test test::unblock_job_chain ... ok
124802023-09-22T23:21:36.361ZINFOcrucible: Crucible Version: Crucible Version: 0.0.1 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main rustc: 1.70.0 stable x86_64-unknown-illumos Cargo: x86_64-unknown-illumos Debug: true Opt level: 0 task = main
124812023-09-22T23:21:36.361ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4 task = main
12482 {"msg":"Using address: 127.0.0.1:5563","v":0,"name":"crucible","level":30test test::you_had_one_job ... ,ok"
12483 time":"2023-09-22T23:21:36.361423871Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766,"task":"main"}
124842023-09-22T23:21:36.361ZINFOcrucible: UUID: 7c42c1a4-22d9-45fd-a826-47736cc8907f
124852023-09-22T23:21:36.361ZINFOcrucible: Blocks per extent:4 Total Extents: 5
124862023-09-22T23:21:36.361ZINFOcrucible: Repair listens on 127.0.0.1:5564 task = repair
124872023-09-22T23:21:36.361ZINFOcrucible: listening local_addr = 127.0.0.1:5564 task = repair
12488 {"msg":"1002 job Read for connection UpstairsConnection { upstairs_id: 0a96c1e0-3b8f-4a41-bdd0-0ac673cd2e7d, session_id: 013c3911-e491-4601-bc22-64918fd8155c, gen: 0 } waiting on 1 deps","v":0,"name":"crucible","level":40{"msg":"1002 job Read for connection UpstairsConnection { upstairs_id: b1435588-acb5-43f3-9d5b-5b81cde60797, session_id: f9e1212a-a737-4a66-baef-4fd64a6f829d, gen: 0 } waiting on 1 deps","v":,"0time":","name"2023-09-22T23:21:36.361809808Z":","crucible"hostname":","level"ip-10-150-1-55.us-west-2.compute.internal":,"pid":404766}
12489 {"msg":"Using repair address: 127.0.0.1:5564","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.361841448Z",","time"hostname"::""2023-09-22T23:21:36.361903423Z"ip-10-150-1-55.us-west-2.compute.internal",","hostname"pid"::"4766ip-10-150-1-55.us-west-2.compute.internal"},"
12490 pid":4766,"task":"main"}
124912023-09-22T23:21:36.361ZINFOcrucible: No SSL acceptor configured task = main
124922023-09-22T23:21:36.362ZINFOcrucible: Crucible Version: Crucible Version: 0.0.1 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main rustc: 1.70.0 stable x86_64-unknown-illumos Cargo: x86_64-unknown-illumos Debug: true Opt level: 0 task = main
12493 {"msg":"Upstairs <-> Downstairs Message Version: 4","v":{0,"name":"crucible","level":"30msg":"listening on 127.0.0.1:5563","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.362176502Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766,"task":"main"}
12494 ,"time":"{2023-09-22T23:21:36.362187052Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal"Using address: 127.0.0.1:5579,"","pidv""::04766,"name":,""crucible"task",:""main"level":}30
12495 ,"time":"2023-09-22T23:21:36.362228048Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766,"task":"main"}
124962023-09-22T23:21:36.362ZWARNcrucible: 1002 job Flush for connection UpstairsConnection { upstairs_id: a6691f57-e53e-48bb-8d47-8d44e03a96a3, session_id: 58dea043-607b-4e68-9c88-b6528c30ae9e, gen: 0 } waiting on 1 deps
124972023-09-22T23:21:36.362ZINFOcrucible: Repair listens on 127.0.0.1:5560 task = repair
12498 test test::unblock_job_chain_second_is_flush ... {ok
12499 "msg":"accepted connection from 127.0.0.1:37397","v":0,"{name":"crucible","level"":msg"30:"listening","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:36.362531869Z",","time":hostname":""2023-09-22T23:21:36.362538592Z"ip-10-150-1-55.us-west-2.compute.internal,"","hostname"pid:"":4766ip-10-150-1-55.us-west-2.compute.internal,"","taskpid""::4766"main"}
12500 ,"local_addr":"127.0.0.1:5560","task":"repair"}
125012023-09-22T23:21:36.362ZINFOcrucible: Connection request from 5c71e2d7-67fc-4daf-b682-2fd653bbbf4e with version 8 task = proc
12502 {"msg":"{Using repair address: 127.0.0.1:5560","v":0,"name":""crucible"msg",":"level":30downstairs and upstairs using different but compatible versions, Upstairs is 8, but supports [3, 4, 5], downstairs is 4","v":0,"name":"crucible","level":40,"time":"2023-09-22T23:21:36.362763698Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766,"task":"main"}
12503 {"msg":",No SSL acceptor configured"","timev""::"0,"2023-09-22T23:21:36.362773566Z"name":"crucible",","hostname"level":":30ip-10-150-1-55.us-west-2.compute.internal","pid":4766,"task":"proc"}
12504 {"msg":","time":"2023-09-22T23:21:36.36291573Z","hostname":upstairs UpstairsConnection { upstairs_id: 5c71e2d7-67fc-4daf-b682-2fd653bbbf4e, session_id: 623344f4-c90d-4a6f-92d6-8f741bd9fc20, gen: 1 } connected, version 4"","v":ip-10-150-1-55.us-west-2.compute.internal"0,","pid"name"::4766"crucible",","task"level"::"30main"}
125052023-09-22T23:21:36.362ZINFOcrucible: listening on 127.0.0.1:5579 task = proc
12506 ,"time":"2023-09-22T23:21:36.362974335Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4766,"task":"main"}
12507 test test::unblock_job_chain_first_is_flush ... ok
125082023-09-22T23:21:36.363ZINFOcrucible: accepted connection from 127.0.0.1:40997 task = main
125092023-09-22T23:21:36.363ZINFOcrucible: Connection request from 77586f62-9e02-449a-b289-31377afff630 with version 5 task = proc
125102023-09-22T23:21:36.363ZERROcrucible: connection (127.0.0.1:40997) Exits with error: Required version 4, Or [5] got 5
12511 test test::unblock_job_upstairs_sends_big_deps ... ok
12512 test test::test_version_uprev_only ... ok
12513 test test::test_version_uprev_list ... ok
12514 test region::test::test_flush_after_multiple_disjoint_writes ... ok
12515 test region::test::test_big_extent_full_write_and_flush ... ok
12516 
12517 test result: ok. 156 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 1.49s
12518 
12519 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_downstairs-3ed9735920c1592d --nocapture`
12520 
12521 running 0 tests
12522 
12523 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
12524 
12525 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_hammer-9622fb9be260fb45 --nocapture`
12526 
12527 running 0 tests
12528 
12529 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
12530 
12531 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_integration_tests-8902d603847d3610 --nocapture`
12532 
12533 running 57 tests
12534 Sep 22 23:21:37.635 INFO current number of open files limit 65536 is already the maximum
12535 Sep 22 23:21:37.635 INFO current number of open files limit 65536 is already the maximum
12536 Sep 22 23:21:37.635 INFO current number of open files limit 65536 is already the maximum
12537 Sep 22 23:21:37.635 INFO current number of open files limit 65536 is already the maximum
12538 Sep 22 23:21:37.635 INFO current number of open files limit 65536 is already the maximum
12539 Sep 22 23:21:37.635 INFO current number of open files limit 65536 is already the maximum
12540 Sep 22 23:21:37.635 INFO current number of open files limit 65536 is already the maximum
12541 Sep 22 23:21:37.635 INFO current number of open files limit 65536 is already the maximum
12542 Sep 22 23:21:37.639 INFO Created new region file "/tmp/downstairs-hyFqsHgE/region.json"
12543 Sep 22 23:21:37.639 INFO Created new region file "/tmp/downstairs-7jreVyVE/region.json"
12544 Sep 22 23:21:37.639 INFO Created new region file "/tmp/downstairs-fNrmxQUi/region.json"
12545 Sep 22 23:21:37.639 INFO Created new region file "/tmp/downstairs-tEDzCIA0/region.json"
12546 Sep 22 23:21:37.639 INFO Created new region file "/tmp/downstairs-MOi2zaso/region.json"
12547 Sep 22 23:21:37.639 INFO Created new region file "/tmp/downstairs-a8oaK2eo/region.json"
12548 Sep 22 23:21:37.639 INFO Created new region file "/tmp/downstairs-qaZTfu42/region.json"
12549 Sep 22 23:21:37.639 INFO Created new region file "/tmp/downstairs-kwQdCZxu/region.json"
12550 Sep 22 23:21:37.652 INFO current number of open files limit 65536 is already the maximum
12551 Sep 22 23:21:37.652 INFO Opened existing region file "/tmp/downstairs-hyFqsHgE/region.json"
12552 Sep 22 23:21:37.652 INFO Database read version 1
12553 Sep 22 23:21:37.652 INFO Database write version 1
12554 Sep 22 23:21:37.653 INFO current number of open files limit 65536 is already the maximum
12555 Sep 22 23:21:37.653 INFO Opened existing region file "/tmp/downstairs-fNrmxQUi/region.json"
12556 Sep 22 23:21:37.653 INFO Database read version 1
12557 Sep 22 23:21:37.653 INFO Database write version 1
12558 Sep 22 23:21:37.654 INFO current number of open files limit 65536 is already the maximum
12559 Sep 22 23:21:37.654 INFO Opened existing region file "/tmp/downstairs-7jreVyVE/region.json"
12560 Sep 22 23:21:37.654 INFO Database read version 1
12561 Sep 22 23:21:37.654 INFO Database write version 1
12562 Sep 22 23:21:37.655 INFO current number of open files limit 65536 is already the maximum
12563 Sep 22 23:21:37.655 INFO Opened existing region file "/tmp/downstairs-a8oaK2eo/region.json"
12564 Sep 22 23:21:37.655 INFO Database read version 1
12565 Sep 22 23:21:37.655 INFO Database write version 1
12566 Sep 22 23:21:37.655 INFO current number of open files limit 65536 is already the maximum
12567 Sep 22 23:21:37.655 INFO current number of open files limit 65536 is already the maximum
12568 Sep 22 23:21:37.655 INFO Opened existing region file "/tmp/downstairs-kwQdCZxu/region.json"
12569 Sep 22 23:21:37.655 INFO Opened existing region file "/tmp/downstairs-qaZTfu42/region.json"
12570 Sep 22 23:21:37.655 INFO Database read version 1
12571 Sep 22 23:21:37.655 INFO Database read version 1
12572 Sep 22 23:21:37.655 INFO Database write version 1
12573 Sep 22 23:21:37.655 INFO Database write version 1
12574 Sep 22 23:21:37.655 INFO current number of open files limit 65536 is already the maximum
12575 Sep 22 23:21:37.655 INFO Opened existing region file "/tmp/downstairs-MOi2zaso/region.json"
12576 Sep 22 23:21:37.655 INFO Database read version 1
12577 Sep 22 23:21:37.655 INFO Database write version 1
12578 Sep 22 23:21:37.656 INFO current number of open files limit 65536 is already the maximum
12579 Sep 22 23:21:37.656 INFO Opened existing region file "/tmp/downstairs-tEDzCIA0/region.json"
12580 Sep 22 23:21:37.656 INFO Database read version 1
12581 Sep 22 23:21:37.656 INFO Database write version 1
12582 Sep 22 23:21:37.658 INFO UUID: f2d164d9-9cbb-47eb-a838-b7f17a4d0411
12583 Sep 22 23:21:37.658 INFO Blocks per extent:5 Total Extents: 2
12584 Sep 22 23:21:37.658 INFO Crucible Version: Crucible Version: 0.0.1
12585 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12586 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12587 rustc: 1.70.0 stable x86_64-unknown-illumos
12588 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12589 Sep 22 23:21:37.659 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12590 Sep 22 23:21:37.659 INFO Using address: 127.0.0.1:54884, task: main
12591 Sep 22 23:21:37.661 INFO Repair listens on 127.0.0.1:0, task: repair
12592 Sep 22 23:21:37.661 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:61071, task: repair
12593 Sep 22 23:21:37.661 INFO UUID: ea1b6b61-6132-4ae7-8fec-05be62de606d
12594 Sep 22 23:21:37.661 INFO Blocks per extent:5 Total Extents: 2
12595 Sep 22 23:21:37.661 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:61071, task: repair
12596 Sep 22 23:21:37.661 INFO Crucible Version: Crucible Version: 0.0.1
12597 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12598 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12599 rustc: 1.70.0 stable x86_64-unknown-illumos
12600 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12601 Sep 22 23:21:37.661 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12602 Sep 22 23:21:37.661 INFO Using address: 127.0.0.1:39150, task: main
12603 Sep 22 23:21:37.661 INFO listening, local_addr: 127.0.0.1:61071, task: repair
12604 Sep 22 23:21:37.662 INFO Repair listens on 127.0.0.1:0, task: repair
12605 Sep 22 23:21:37.662 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50517, task: repair
12606 Sep 22 23:21:37.662 INFO UUID: ca34832d-be06-44a7-9364-f6c3bc44d7fb
12607 Sep 22 23:21:37.662 INFO Blocks per extent:5 Total Extents: 2
12608 Sep 22 23:21:37.662 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50517, task: repair
12609 Sep 22 23:21:37.662 INFO listening, local_addr: 127.0.0.1:50517, task: repair
12610 Sep 22 23:21:37.662 INFO Crucible Version: Crucible Version: 0.0.1
12611 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12612 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12613 rustc: 1.70.0 stable x86_64-unknown-illumos
12614 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12615 Sep 22 23:21:37.662 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12616 Sep 22 23:21:37.662 INFO Using address: 127.0.0.1:46295, task: main
12617 Sep 22 23:21:37.662 INFO UUID: f652e01a-1c15-4c9f-9d6f-06b9109bda2e
12618 Sep 22 23:21:37.662 INFO Blocks per extent:5 Total Extents: 2
12619 Sep 22 23:21:37.662 INFO Crucible Version: Crucible Version: 0.0.1
12620 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12621 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12622 rustc: 1.70.0 stable x86_64-unknown-illumos
12623 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12624 Sep 22 23:21:37.662 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12625 Sep 22 23:21:37.662 INFO Using address: 127.0.0.1:63497, task: main
12626 Sep 22 23:21:37.662 INFO Repair listens on 127.0.0.1:0, task: repair
12627 Sep 22 23:21:37.663 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58361, task: repair
12628 Sep 22 23:21:37.663 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58361, task: repair
12629 Sep 22 23:21:37.663 INFO listening, local_addr: 127.0.0.1:58361, task: repair
12630 Sep 22 23:21:37.663 INFO UUID: b778f7e5-519c-44d5-90ea-a92dbeffe813
12631 Sep 22 23:21:37.663 INFO Blocks per extent:5 Total Extents: 2
12632 Sep 22 23:21:37.663 INFO Crucible Version: Crucible Version: 0.0.1
12633 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12634 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12635 rustc: 1.70.0 stable x86_64-unknown-illumos
12636 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12637 Sep 22 23:21:37.663 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12638 Sep 22 23:21:37.663 INFO Using address: 127.0.0.1:36647, task: main
12639 Sep 22 23:21:37.663 INFO UUID: a4638f9d-a209-4281-803d-046dfafa8f18
12640 Sep 22 23:21:37.663 INFO Blocks per extent:5 Total Extents: 2
12641 Sep 22 23:21:37.663 INFO Repair listens on 127.0.0.1:0, task: repair
12642 Sep 22 23:21:37.663 INFO UUID: ff162497-e5a3-4763-b129-29dec4869786
12643 Sep 22 23:21:37.663 INFO Blocks per extent:5 Total Extents: 2
12644 Sep 22 23:21:37.663 INFO Crucible Version: Crucible Version: 0.0.1
12645 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12646 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12647 rustc: 1.70.0 stable x86_64-unknown-illumos
12648 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12649 Sep 22 23:21:37.663 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12650 Sep 22 23:21:37.663 INFO Using address: 127.0.0.1:34554, task: main
12651 Sep 22 23:21:37.663 INFO UUID: 2ad3fd00-b9f7-450b-935d-a7c44c290e9e
12652 Sep 22 23:21:37.663 INFO Blocks per extent:5 Total Extents: 2
12653 Sep 22 23:21:37.663 INFO Crucible Version: Crucible Version: 0.0.1
12654 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12655 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12656 rustc: 1.70.0 stable x86_64-unknown-illumos
12657 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12658 Sep 22 23:21:37.663 INFO Crucible Version: Crucible Version: 0.0.1
12659 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12660 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12661 rustc: 1.70.0 stable x86_64-unknown-illumos
12662 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12663 Sep 22 23:21:37.664 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12664 Sep 22 23:21:37.664 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12665 Sep 22 23:21:37.664 INFO Using address: 127.0.0.1:46967, task: main
12666 Sep 22 23:21:37.664 INFO Repair listens on 127.0.0.1:0, task: repair
12667 Sep 22 23:21:37.664 INFO Using address: 127.0.0.1:43936, task: main
12668 Sep 22 23:21:37.664 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:39836, task: repair
12669 Sep 22 23:21:37.664 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:39836, task: repair
12670 Sep 22 23:21:37.664 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:46776, task: repair
12671 Sep 22 23:21:37.664 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:46776, task: repair
12672 Sep 22 23:21:37.664 INFO listening, local_addr: 127.0.0.1:39836, task: repair
12673 Sep 22 23:21:37.664 INFO listening, local_addr: 127.0.0.1:46776, task: repair
12674 Sep 22 23:21:37.664 INFO Repair listens on 127.0.0.1:0, task: repair
12675 Sep 22 23:21:37.664 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:64111, task: repair
12676 Sep 22 23:21:37.664 INFO Repair listens on 127.0.0.1:0, task: repair
12677 Sep 22 23:21:37.664 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:64111, task: repair
12678 Sep 22 23:21:37.664 INFO listening, local_addr: 127.0.0.1:64111, task: repair
12679 Sep 22 23:21:37.664 INFO Repair listens on 127.0.0.1:0, task: repair
12680 Sep 22 23:21:37.664 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38438, task: repair
12681 Sep 22 23:21:37.664 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38438, task: repair
12682 Sep 22 23:21:37.664 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51241, task: repair
12683 Sep 22 23:21:37.664 INFO listening, local_addr: 127.0.0.1:38438, task: repair
12684 Sep 22 23:21:37.664 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51241, task: repair
12685 Sep 22 23:21:37.664 INFO listening, local_addr: 127.0.0.1:51241, task: repair
12686 Sep 22 23:21:37.667 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51241, task: repair
12687 Sep 22 23:21:37.667 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38438, task: repair
12688 Sep 22 23:21:37.667 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:46776, task: repair
12689 Sep 22 23:21:37.667 INFO Using repair address: 127.0.0.1:51241, task: main
12690 Sep 22 23:21:37.667 INFO Using repair address: 127.0.0.1:38438, task: main
12691 Sep 22 23:21:37.667 INFO Using repair address: 127.0.0.1:46776, task: main
12692 Sep 22 23:21:37.667 INFO No SSL acceptor configured, task: main
12693 Sep 22 23:21:37.667 INFO No SSL acceptor configured, task: main
12694 Sep 22 23:21:37.667 INFO No SSL acceptor configured, task: main
12695 Sep 22 23:21:37.667 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58361, task: repair
12696 Sep 22 23:21:37.667 INFO Using repair address: 127.0.0.1:58361, task: main
12697 Sep 22 23:21:37.667 INFO No SSL acceptor configured, task: main
12698 Sep 22 23:21:37.667 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:61071, task: repair
12699 Sep 22 23:21:37.667 INFO Using repair address: 127.0.0.1:61071, task: main
12700 Sep 22 23:21:37.667 INFO No SSL acceptor configured, task: main
12701 Sep 22 23:21:37.668 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50517, task: repair
12702 Sep 22 23:21:37.668 INFO Using repair address: 127.0.0.1:50517, task: main
12703 Sep 22 23:21:37.668 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:64111, task: repair
12704 Sep 22 23:21:37.668 INFO No SSL acceptor configured, task: main
12705 Sep 22 23:21:37.668 INFO Using repair address: 127.0.0.1:64111, task: main
12706 Sep 22 23:21:37.668 INFO No SSL acceptor configured, task: main
12707 Sep 22 23:21:37.668 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:39836, task: repair
12708 Sep 22 23:21:37.668 INFO Using repair address: 127.0.0.1:39836, task: main
12709 Sep 22 23:21:37.668 INFO No SSL acceptor configured, task: main
12710 Sep 22 23:21:37.668 INFO current number of open files limit 65536 is already the maximum
12711 Sep 22 23:21:37.668 INFO current number of open files limit 65536 is already the maximum
12712 Sep 22 23:21:37.668 INFO current number of open files limit 65536 is already the maximum
12713 Sep 22 23:21:37.668 INFO current number of open files limit 65536 is already the maximum
12714 Sep 22 23:21:37.668 INFO Created new region file "/tmp/downstairs-pWkbItfq/region.json"
12715 Sep 22 23:21:37.668 INFO Created new region file "/tmp/downstairs-75rNZW4A/region.json"
12716 Sep 22 23:21:37.668 INFO Created new region file "/tmp/downstairs-n0vNnrMi/region.json"
12717 Sep 22 23:21:37.668 INFO Created new region file "/tmp/downstairs-Nlgr6SUH/region.json"
12718 Sep 22 23:21:37.668 INFO current number of open files limit 65536 is already the maximum
12719 Sep 22 23:21:37.668 INFO current number of open files limit 65536 is already the maximum
12720 Sep 22 23:21:37.668 INFO current number of open files limit 65536 is already the maximum
12721 Sep 22 23:21:37.668 INFO current number of open files limit 65536 is already the maximum
12722 Sep 22 23:21:37.668 INFO Created new region file "/tmp/downstairs-k8ooJOk2/region.json"
12723 Sep 22 23:21:37.668 INFO Created new region file "/tmp/downstairs-TZ4p57jk/region.json"
12724 Sep 22 23:21:37.668 INFO Created new region file "/tmp/downstairs-jXVezaZ4/region.json"
12725 Sep 22 23:21:37.668 INFO Created new region file "/tmp/downstairs-CZzWC74T/region.json"
12726 Sep 22 23:21:37.672 INFO current number of open files limit 65536 is already the maximum
12727 Sep 22 23:21:37.672 INFO Opened existing region file "/tmp/downstairs-pWkbItfq/region.json"
12728 Sep 22 23:21:37.672 INFO Database read version 1
12729 Sep 22 23:21:37.672 INFO Database write version 1
12730 Sep 22 23:21:37.673 INFO current number of open files limit 65536 is already the maximum
12731 Sep 22 23:21:37.673 INFO Opened existing region file "/tmp/downstairs-n0vNnrMi/region.json"
12732 Sep 22 23:21:37.673 INFO Database read version 1
12733 Sep 22 23:21:37.673 INFO Database write version 1
12734 Sep 22 23:21:37.674 INFO current number of open files limit 65536 is already the maximum
12735 Sep 22 23:21:37.674 INFO Opened existing region file "/tmp/downstairs-Nlgr6SUH/region.json"
12736 Sep 22 23:21:37.674 INFO Database read version 1
12737 Sep 22 23:21:37.674 INFO Database write version 1
12738 Sep 22 23:21:37.674 INFO current number of open files limit 65536 is already the maximum
12739 Sep 22 23:21:37.674 INFO Opened existing region file "/tmp/downstairs-k8ooJOk2/region.json"
12740 Sep 22 23:21:37.674 INFO Database read version 1
12741 Sep 22 23:21:37.674 INFO Database write version 1
12742 Sep 22 23:21:37.674 INFO current number of open files limit 65536 is already the maximum
12743 Sep 22 23:21:37.674 INFO Opened existing region file "/tmp/downstairs-75rNZW4A/region.json"
12744 Sep 22 23:21:37.674 INFO Database read version 1
12745 Sep 22 23:21:37.674 INFO Database write version 1
12746 Sep 22 23:21:37.675 INFO UUID: 4e7888c2-7beb-4f8a-a2f2-dd1592f0ed78
12747 Sep 22 23:21:37.675 INFO Blocks per extent:5 Total Extents: 2
12748 Sep 22 23:21:37.675 INFO Crucible Version: Crucible Version: 0.0.1
12749 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12750 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12751 rustc: 1.70.0 stable x86_64-unknown-illumos
12752 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12753 Sep 22 23:21:37.675 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12754 Sep 22 23:21:37.675 INFO Using address: 127.0.0.1:40751, task: main
12755 Sep 22 23:21:37.675 INFO Repair listens on 127.0.0.1:0, task: repair
12756 Sep 22 23:21:37.676 INFO current number of open files limit 65536 is already the maximum
12757 Sep 22 23:21:37.676 INFO current number of open files limit 65536 is already the maximum
12758 Sep 22 23:21:37.676 INFO Opened existing region file "/tmp/downstairs-TZ4p57jk/region.json"
12759 Sep 22 23:21:37.676 INFO Opened existing region file "/tmp/downstairs-jXVezaZ4/region.json"
12760 Sep 22 23:21:37.676 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52716, task: repair
12761 Sep 22 23:21:37.676 INFO Database read version 1
12762 Sep 22 23:21:37.676 INFO Database read version 1
12763 Sep 22 23:21:37.676 INFO Database write version 1
12764 Sep 22 23:21:37.676 INFO Database write version 1
12765 Sep 22 23:21:37.676 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52716, task: repair
12766 Sep 22 23:21:37.676 INFO current number of open files limit 65536 is already the maximum
12767 Sep 22 23:21:37.676 INFO listening, local_addr: 127.0.0.1:52716, task: repair
12768 Sep 22 23:21:37.676 INFO Opened existing region file "/tmp/downstairs-CZzWC74T/region.json"
12769 Sep 22 23:21:37.676 INFO Database read version 1
12770 Sep 22 23:21:37.676 INFO Database write version 1
12771 Sep 22 23:21:37.676 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52716, task: repair
12772 Sep 22 23:21:37.676 INFO Using repair address: 127.0.0.1:52716, task: main
12773 Sep 22 23:21:37.676 INFO No SSL acceptor configured, task: main
12774 Sep 22 23:21:37.676 INFO UUID: f7971d88-fcac-4a34-864e-16881c021a65
12775 Sep 22 23:21:37.676 INFO Blocks per extent:5 Total Extents: 2
12776 Sep 22 23:21:37.676 INFO Crucible Version: Crucible Version: 0.0.1
12777 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12778 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12779 rustc: 1.70.0 stable x86_64-unknown-illumos
12780 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12781 Sep 22 23:21:37.676 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12782 Sep 22 23:21:37.676 INFO Using address: 127.0.0.1:43385, task: main
12783 Sep 22 23:21:37.677 INFO Repair listens on 127.0.0.1:0, task: repair
12784 Sep 22 23:21:37.677 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:35126, task: repair
12785 Sep 22 23:21:37.677 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:35126, task: repair
12786 Sep 22 23:21:37.677 INFO listening, local_addr: 127.0.0.1:35126, task: repair
12787 Sep 22 23:21:37.677 INFO current number of open files limit 65536 is already the maximum
12788 Sep 22 23:21:37.677 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:35126, task: repair
12789 Sep 22 23:21:37.677 INFO Created new region file "/tmp/downstairs-Avl7H1Y2/region.json"
12790 Sep 22 23:21:37.677 INFO Using repair address: 127.0.0.1:35126, task: main
12791 Sep 22 23:21:37.677 INFO No SSL acceptor configured, task: main
12792 Sep 22 23:21:37.678 INFO current number of open files limit 65536 is already the maximum
12793 Sep 22 23:21:37.679 INFO Created new region file "/tmp/downstairs-j0KkEAG7/region.json"
12794 Sep 22 23:21:37.679 INFO UUID: d803d148-f26b-4671-a1b7-af69788d7c3f
12795 Sep 22 23:21:37.679 INFO Blocks per extent:5 Total Extents: 2
12796 Sep 22 23:21:37.679 INFO UUID: 4b784146-9bfd-4506-a702-f66a5880a2a3
12797 Sep 22 23:21:37.679 INFO Blocks per extent:5 Total Extents: 2
12798 Sep 22 23:21:37.679 INFO Crucible Version: Crucible Version: 0.0.1
12799 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12800 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12801 rustc: 1.70.0 stable x86_64-unknown-illumos
12802 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12803 Sep 22 23:21:37.679 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12804 Sep 22 23:21:37.679 INFO Using address: 127.0.0.1:55221, task: main
12805 Sep 22 23:21:37.679 INFO Crucible Version: Crucible Version: 0.0.1
12806 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12807 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12808 rustc: 1.70.0 stable x86_64-unknown-illumos
12809 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12810 Sep 22 23:21:37.679 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12811 Sep 22 23:21:37.679 INFO Using address: 127.0.0.1:38174, task: main
12812 Sep 22 23:21:37.679 INFO UUID: 51f6c6d4-74e6-40e9-8073-39006e8120ae
12813 Sep 22 23:21:37.679 INFO Blocks per extent:5 Total Extents: 2
12814 Sep 22 23:21:37.679 INFO Crucible Version: Crucible Version: 0.0.1
12815 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12816 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12817 rustc: 1.70.0 stable x86_64-unknown-illumos
12818 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12819 Sep 22 23:21:37.679 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12820 Sep 22 23:21:37.679 INFO Using address: 127.0.0.1:64794, task: main
12821 Sep 22 23:21:37.679 INFO Repair listens on 127.0.0.1:0, task: repair
12822 Sep 22 23:21:37.679 INFO UUID: ab7c04fa-de73-4af6-8f95-d0dc6939a823
12823 Sep 22 23:21:37.679 INFO Blocks per extent:5 Total Extents: 2
12824 Sep 22 23:21:37.680 INFO Crucible Version: Crucible Version: 0.0.1
12825 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12826 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12827 rustc: 1.70.0 stable x86_64-unknown-illumos
12828 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12829 Sep 22 23:21:37.680 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12830 Sep 22 23:21:37.680 INFO Using address: 127.0.0.1:39320, task: main
12831 Sep 22 23:21:37.680 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:44687, task: repair
12832 Sep 22 23:21:37.679 INFO Repair listens on 127.0.0.1:0, task: repair
12833 Sep 22 23:21:37.680 INFO Repair listens on 127.0.0.1:0, task: repair
12834 Sep 22 23:21:37.680 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:44687, task: repair
12835 Sep 22 23:21:37.680 INFO listening, local_addr: 127.0.0.1:44687, task: repair
12836 Sep 22 23:21:37.680 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52116, task: repair
12837 Sep 22 23:21:37.680 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:59717, task: repair
12838 Sep 22 23:21:37.680 INFO UUID: f9162fd6-864f-4c42-be11-73fe673d33ac
12839 Sep 22 23:21:37.680 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52116, task: repair
12840 Sep 22 23:21:37.680 INFO Blocks per extent:5 Total Extents: 2
12841 Sep 22 23:21:37.680 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:59717, task: repair
12842 Sep 22 23:21:37.680 INFO Repair listens on 127.0.0.1:0, task: repair
12843 Sep 22 23:21:37.680 INFO listening, local_addr: 127.0.0.1:52116, task: repair
12844 Sep 22 23:21:37.680 INFO listening, local_addr: 127.0.0.1:59717, task: repair
12845 Sep 22 23:21:37.680 INFO Crucible Version: Crucible Version: 0.0.1
12846 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12847 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12848 rustc: 1.70.0 stable x86_64-unknown-illumos
12849 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12850 Sep 22 23:21:37.680 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:44687, task: repair
12851 Sep 22 23:21:37.680 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12852 Sep 22 23:21:37.680 INFO Using address: 127.0.0.1:59903, task: main
12853 Sep 22 23:21:37.680 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:35012, task: repair
12854 Sep 22 23:21:37.680 INFO Using repair address: 127.0.0.1:44687, task: main
12855 Sep 22 23:21:37.680 INFO No SSL acceptor configured, task: main
12856 Sep 22 23:21:37.680 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:35012, task: repair
12857 Sep 22 23:21:37.680 INFO listening, local_addr: 127.0.0.1:35012, task: repair
12858 Sep 22 23:21:37.680 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52116, task: repair
12859 Sep 22 23:21:37.680 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:59717, task: repair
12860 Sep 22 23:21:37.680 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:35012, task: repair
12861 Sep 22 23:21:37.680 INFO Using repair address: 127.0.0.1:59717, task: main
12862 Sep 22 23:21:37.680 INFO Using repair address: 127.0.0.1:35012, task: main
12863 Sep 22 23:21:37.680 INFO UUID: edb26b00-76c7-4c82-b262-a6cf7181b4aa
12864 Sep 22 23:21:37.680 INFO No SSL acceptor configured, task: main
12865 Sep 22 23:21:37.680 INFO No SSL acceptor configured, task: main
12866 Sep 22 23:21:37.680 INFO Using repair address: 127.0.0.1:52116, task: main
12867 Sep 22 23:21:37.680 INFO Blocks per extent:5 Total Extents: 2
12868 Sep 22 23:21:37.680 INFO No SSL acceptor configured, task: main
12869 Sep 22 23:21:37.681 INFO current number of open files limit 65536 is already the maximum
12870 Sep 22 23:21:37.681 INFO Crucible Version: Crucible Version: 0.0.1
12871 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12872 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12873 rustc: 1.70.0 stable x86_64-unknown-illumos
12874 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12875 Sep 22 23:21:37.681 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12876 Sep 22 23:21:37.681 INFO Using address: 127.0.0.1:62341, task: main
12877 Sep 22 23:21:37.681 INFO Created new region file "/tmp/downstairs-3KVbZFEh/region.json"
12878 Sep 22 23:21:37.681 INFO Repair listens on 127.0.0.1:0, task: repair
12879 Sep 22 23:21:37.681 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:45979, task: repair
12880 Sep 22 23:21:37.681 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:45979, task: repair
12881 Sep 22 23:21:37.681 INFO listening, local_addr: 127.0.0.1:45979, task: repair
12882 Sep 22 23:21:37.681 INFO Repair listens on 127.0.0.1:0, task: repair
12883 Sep 22 23:21:37.681 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58643, task: repair
12884 Sep 22 23:21:37.681 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:45979, task: repair
12885 Sep 22 23:21:37.681 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58643, task: repair
12886 Sep 22 23:21:37.681 INFO Using repair address: 127.0.0.1:45979, task: main
12887 Sep 22 23:21:37.681 INFO listening, local_addr: 127.0.0.1:58643, task: repair
12888 Sep 22 23:21:37.681 INFO No SSL acceptor configured, task: main
12889 Sep 22 23:21:37.681 INFO current number of open files limit 65536 is already the maximum
12890 Sep 22 23:21:37.681 INFO current number of open files limit 65536 is already the maximum
12891 Sep 22 23:21:37.681 INFO Created new region file "/tmp/downstairs-ClNSXxBw/region.json"
12892 Sep 22 23:21:37.681 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58643, task: repair
12893 Sep 22 23:21:37.681 INFO current number of open files limit 65536 is already the maximum
12894 Sep 22 23:21:37.681 INFO Created new region file "/tmp/downstairs-XyohQkMW/region.json"
12895 Sep 22 23:21:37.681 INFO Using repair address: 127.0.0.1:58643, task: main
12896 Sep 22 23:21:37.681 INFO No SSL acceptor configured, task: main
12897 Sep 22 23:21:37.681 INFO Created new region file "/tmp/downstairs-gG7qY3T8/region.json"
12898 Sep 22 23:21:37.682 INFO current number of open files limit 65536 is already the maximum
12899 Sep 22 23:21:37.682 INFO Created new region file "/tmp/downstairs-wQIcFXNr/region.json"
12900 Sep 22 23:21:37.682 INFO current number of open files limit 65536 is already the maximum
12901 Sep 22 23:21:37.682 INFO Created new region file "/tmp/downstairs-olr9HIrf/region.json"
12902 Sep 22 23:21:37.682 INFO current number of open files limit 65536 is already the maximum
12903 Sep 22 23:21:37.682 INFO Opened existing region file "/tmp/downstairs-Avl7H1Y2/region.json"
12904 Sep 22 23:21:37.682 INFO Database read version 1
12905 Sep 22 23:21:37.682 INFO Database write version 1
12906 Sep 22 23:21:37.684 INFO current number of open files limit 65536 is already the maximum
12907 Sep 22 23:21:37.684 INFO Opened existing region file "/tmp/downstairs-j0KkEAG7/region.json"
12908 Sep 22 23:21:37.684 INFO Database read version 1
12909 Sep 22 23:21:37.684 INFO Database write version 1
12910 Sep 22 23:21:37.685 INFO UUID: 7bf7d359-28ba-4346-a3ee-541f5bc5a1c0
12911 Sep 22 23:21:37.685 INFO Blocks per extent:5 Total Extents: 2
12912 Sep 22 23:21:37.686 INFO Crucible Version: Crucible Version: 0.0.1
12913 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12914 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12915 rustc: 1.70.0 stable x86_64-unknown-illumos
12916 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12917 Sep 22 23:21:37.686 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12918 Sep 22 23:21:37.686 INFO Using address: 127.0.0.1:42674, task: main
12919 Sep 22 23:21:37.686 INFO Repair listens on 127.0.0.1:0, task: repair
12920 Sep 22 23:21:37.686 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33608, task: repair
12921 Sep 22 23:21:37.686 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33608, task: repair
12922 Sep 22 23:21:37.686 INFO current number of open files limit 65536 is already the maximum
12923 Sep 22 23:21:37.686 INFO Opened existing region file "/tmp/downstairs-3KVbZFEh/region.json"
12924 Sep 22 23:21:37.686 INFO Database read version 1
12925 Sep 22 23:21:37.686 INFO listening, local_addr: 127.0.0.1:33608, task: repair
12926 Sep 22 23:21:37.686 INFO Database write version 1
12927 Sep 22 23:21:37.686 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33608, task: repair
12928 Sep 22 23:21:37.686 INFO Using repair address: 127.0.0.1:33608, task: main
12929 Sep 22 23:21:37.686 INFO No SSL acceptor configured, task: main
12930 Sep 22 23:21:37.687 INFO current number of open files limit 65536 is already the maximum
12931 Sep 22 23:21:37.687 INFO Opened existing region file "/tmp/downstairs-XyohQkMW/region.json"
12932 Sep 22 23:21:37.687 INFO Database read version 1
12933 Sep 22 23:21:37.687 INFO Database write version 1
12934 Sep 22 23:21:37.688 INFO current number of open files limit 65536 is already the maximum
12935 Sep 22 23:21:37.688 INFO Opened existing region file "/tmp/downstairs-ClNSXxBw/region.json"
12936 Sep 22 23:21:37.688 INFO Database read version 1
12937 Sep 22 23:21:37.688 INFO Database write version 1
12938 Sep 22 23:21:37.688 INFO UUID: 4afe10fd-86f8-4da0-8ac1-1f760ded452a
12939 Sep 22 23:21:37.689 INFO Blocks per extent:5 Total Extents: 2
12940 Sep 22 23:21:37.689 INFO Crucible Version: Crucible Version: 0.0.1
12941 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12942 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12943 rustc: 1.70.0 stable x86_64-unknown-illumos
12944 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12945 Sep 22 23:21:37.689 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12946 Sep 22 23:21:37.689 INFO Using address: 127.0.0.1:64157, task: main
12947 Sep 22 23:21:37.689 INFO Repair listens on 127.0.0.1:0, task: repair
12948 Sep 22 23:21:37.689 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:39249, task: repair
12949 Sep 22 23:21:37.689 INFO current number of open files limit 65536 is already the maximum
12950 Sep 22 23:21:37.689 INFO Opened existing region file "/tmp/downstairs-gG7qY3T8/region.json"
12951 Sep 22 23:21:37.689 INFO Database read version 1
12952 Sep 22 23:21:37.689 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:39249, task: repair
12953 Sep 22 23:21:37.689 INFO Database write version 1
12954 Sep 22 23:21:37.689 INFO current number of open files limit 65536 is already the maximum
12955 Sep 22 23:21:37.689 INFO listening, local_addr: 127.0.0.1:39249, task: repair
12956 Sep 22 23:21:37.689 INFO Opened existing region file "/tmp/downstairs-wQIcFXNr/region.json"
12957 Sep 22 23:21:37.689 INFO Database read version 1
12958 Sep 22 23:21:37.689 INFO Database write version 1
12959 Sep 22 23:21:37.689 INFO UUID: 74dd6474-4427-416d-8d80-1f3912278411
12960 Sep 22 23:21:37.689 INFO Blocks per extent:5 Total Extents: 2
12961 Sep 22 23:21:37.689 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:39249, task: repair
12962 Sep 22 23:21:37.689 INFO Crucible Version: Crucible Version: 0.0.1
12963 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12964 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12965 rustc: 1.70.0 stable x86_64-unknown-illumos
12966 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12967 Sep 22 23:21:37.689 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12968 Sep 22 23:21:37.689 INFO Using repair address: 127.0.0.1:39249, task: main
12969 Sep 22 23:21:37.689 INFO Using address: 127.0.0.1:59938, task: main
12970 Sep 22 23:21:37.689 INFO No SSL acceptor configured, task: main
12971 Sep 22 23:21:37.690 INFO Repair listens on 127.0.0.1:0, task: repair
12972 Sep 22 23:21:37.690 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50240, task: repair
12973 Sep 22 23:21:37.690 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50240, task: repair
12974 Sep 22 23:21:37.690 INFO listening, local_addr: 127.0.0.1:50240, task: repair
12975 Sep 22 23:21:37.690 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50240, task: repair
12976 Sep 22 23:21:37.690 INFO Using repair address: 127.0.0.1:50240, task: main
12977 Sep 22 23:21:37.690 INFO No SSL acceptor configured, task: main
12978 Sep 22 23:21:37.691 INFO Upstairs starts
12979 Sep 22 23:21:37.691 INFO Crucible Version: BuildInfo {
12980 version: "0.0.1",
12981 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
12982 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
12983 git_branch: "main",
12984 rustc_semver: "1.70.0",
12985 rustc_channel: "stable",
12986 rustc_host_triple: "x86_64-unknown-illumos",
12987 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
12988 cargo_triple: "x86_64-unknown-illumos",
12989 debug: true,
12990 opt_level: 0,
12991 }
12992 Sep 22 23:21:37.691 INFO Upstairs <-> Downstairs Message Version: 4
12993 {"msg":"Upstairs starts","v":0,"name":"crucible","level":30Sep 22 23:21:37.691 INFO Crucible stats registered with UUID: f712941d-0577-43e4-8a2e-7814ec270c09
12994 Sep 22 23:21:37.691 INFO Crucible f712941d-0577-43e4-8a2e-7814ec270c09 has session id: 2db8c46a-7d80-4db6-a4af-05be116bdc65
12995 ,"time":"2023-09-22T23:21:37.69144035Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
12996 {"msg":"Crucible Version: BuildInfo {\n version: \"0.0.1\",\n git_sha: \"ed48f294784d46ea7d4bb99336918b74358eca46\",\n git_commit_timestamp: \"2023-09-22T22:51:18.000000000Z\",\nSep 22 23:21:37.691 INFO UUID: 440ce80c-c0c9-48b9-ba41-98710683ab87
12997 git_branch: \"main\",\n rustc_semver: \"1.70.0\",\n rustc_channel: \"stable\",\n rustc_host_triple: \"x86_64-unknown-illumos\",\n rustc_commit_sha: \"90c541806f23a127002de5b4038be731ba1458ca\",\n cargo_triple: \"x86_64-unknown-illumosSep 22 23:21:37.691 INFO Blocks per extent:5 Total Extents: 2
12998 \",\n debug: true,\n opt_level: 0,\n}","v":0,"name":"crucible","level":30Sep 22 23:21:37.692 INFO Crucible Version: Crucible Version: 0.0.1
12999 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
13000 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
13001 rustc: 1.70.0 stable x86_64-unknown-illumos
13002 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
13003 Sep 22 23:21:37.692 INFO Upstairs <-> Downstairs Message Version: 4, task: main
13004 ,"time":"2023-09-22T23:21:37.691994616Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769Sep 22 23:21:37.692 INFO Using address: 127.0.0.1:57826, task: main
13005 }
130062023-09-22T23:21:37.692ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4
13007 {{"msg":"Upstairs starts","v":0,"name":"crucible","level":30"msg":"Crucible stats registered with UUID: 8e88d9c1-84f3-4981-affe-0c1310d04226",,""v"time"::"0,"name2023-09-22T23:21:37.692513464Z"":"crucible,"",hostname"":"level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13008 {"msg":","Crucible Version: BuildInfo {time"\n:" version: \"0.0.12023-09-22T23:21:37.692549962Z\"",\n," git_sha: \"hostname":"ed48f294784d46ea7d4bb99336918b74358eca46\",\nip-10-150-1-55.us-west-2.compute.internal"," git_commit_timestamp: pid\"":47692023-09-22T22:51:18.000000000Z\",\n}
13009 git_branch: \"main\",\n rustc_semver: \"1.70.0\",\n{ rustc_channel: \"stable\",\n" rustc_host_triple: \"msg":"x86_64-unknown-illumos\",\n rustc_commit_sha: \"Crucible 8e88d9c1-84f3-4981-affe-0c1310d04226 has session id: 8e7645e9-bbcd-4dae-b03b-42e001b696e390c541806f23a127002de5b4038be731ba1458ca"\",\n,"v": cargo_triple: \"0,"namex86_64-unknown-illumos"\":",\ncrucible" debug: true,,\n"level" opt_level: 0,:\n30}","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.692601839Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:21:37.69260832Zpid"":,4769"hostname}"
13010 :"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
130112023-09-22T23:21:37.692ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4
13012 {"msg":"Crucible stats registered with UUID: 1c48f237-34b8-4484-b38b-7c6b80300cc8","v":0,"name":"crucible","level":30,"time":"Sep 22 23:21:37.692 INFO Repair listens on 127.0.0.1:0, task: repair
13013 2023-09-22T23:21:37.692687614Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
130142023-09-22T23:21:37.692ZINFOcrucible: Crucible 1c48f237-34b8-4484-b38b-7c6b80300cc8 has session id: a5c3fa2b-8808-4f8f-9c13-31707eb7c936
13015 Sep 22 23:21:37.692 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:44890, task: repair
13016 Sep 22 23:21:37.692 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:44890, task: repair
13017 Sep 22 23:21:37.692 INFO listening, local_addr: 127.0.0.1:44890, task: repair
13018 Sep 22 23:21:37.693 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:44890, task: repair
13019 Sep 22 23:21:37.693 INFO Using repair address: 127.0.0.1:44890, task: main
13020 Sep 22 23:21:37.693 INFO No SSL acceptor configured, task: main
13021 The guest has requested activation
13022 The guest has requested activation
13023 The guest has requested activation
13024 Sep 22 23:21:37.693 INFO current number of open files limit 65536 is already the maximum
13025 Sep 22 23:21:37.693 INFO Opened existing region file "/tmp/downstairs-olr9HIrf/region.json"
13026 Sep 22 23:21:37.693 INFO Database read version 1
13027 Sep 22 23:21:37.693 INFO Database write version 1
13028 Sep 22 23:21:37.694 INFO listening on 127.0.0.1:0, task: main
13029 Sep 22 23:21:37.694 INFO listening on 127.0.0.1:0, task: main
13030 Sep 22 23:21:37.694 INFO listening on 127.0.0.1:0, task: main
13031 Sep 22 23:21:37.694 INFO listening on 127.0.0.1:0, task: main
13032 Sep 22 23:21:37.694 INFO listening on 127.0.0.1:0, task: main
13033 Sep 22 23:21:37.694 INFO listening on 127.0.0.1:0, task: main
13034 Sep 22 23:21:37.694 INFO UUID: 61a97a70-da7b-4cb5-91fa-9da487068e7c
13035 Sep 22 23:21:37.694 INFO listening on 127.0.0.1:0, task: main
13036 Sep 22 23:21:37.694 INFO Blocks per extent:5 Total Extents: 2
13037 Sep 22 23:21:37.694 INFO listening on 127.0.0.1:0, task: main
13038 Sep 22 23:21:37.694 INFO listening on 127.0.0.1:0, task: main
13039 Sep 22 23:21:37.694 INFO UUID: 10863a73-c1c6-434a-bb7b-15af38914b1b
13040 Sep 22 23:21:37.694 INFO Blocks per extent:5 Total Extents: 2
13041 Sep 22 23:21:37.694 INFO UUID: 88162c01-9da1-4e8d-8218-aee73fa16e70
13042 Sep 22 23:21:37.694 INFO Blocks per extent:5 Total Extents: 2
13043 Sep 22 23:21:37.694 INFO Crucible Version: Crucible Version: 0.0.1
13044 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
13045 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
13046 rustc: 1.70.0 stable x86_64-unknown-illumos
13047 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
13048 Sep 22 23:21:37.694 INFO Crucible Version: Crucible Version: 0.0.1
13049 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
13050 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
13051 rustc: 1.70.0 stable x86_64-unknown-illumos
13052 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
13053 Sep 22 23:21:37.694 INFO Upstairs <-> Downstairs Message Version: 4, task: main
13054 Sep 22 23:21:37.694 INFO Upstairs <-> Downstairs Message Version: 4, task: main
13055 Sep 22 23:21:37.694 INFO Using address: 127.0.0.1:43168, task: main
13056 Sep 22 23:21:37.694 INFO Using address: 127.0.0.1:34655, task: main
13057 Sep 22 23:21:37.694 INFO Crucible Version: Crucible Version: 0.0.1
13058 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
13059 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
13060 rustc: 1.70.0 stable x86_64-unknown-illumos
13061 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
13062 Sep 22 23:21:37.694 INFO Upstairs <-> Downstairs Message Version: 4, task: main
13063 Sep 22 23:21:37.694 INFO Using address: 127.0.0.1:36502, task: main
13064 Sep 22 23:21:37.694 INFO [0] connecting to 127.0.0.1:46295, looper: 0
13065 {"msg":"Upstairs starts"{,"v":0,"name":""cruciblemsg"":","level":[0] connecting to 127.0.0.1:36647"30,"v":0,"name":"crucible","level":30{"msg":"[0] connecting to 127.0.0.1:46967","v":0,"name":"crucible,"","time"level":,:""30time":"2023-09-22T23:21:37.69489497Z"2023-09-22T23:21:37.694887787Z","hostname":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pidip-10-150-1-55.us-west-2.compute.internal"":,4769"pid":,4769"looper":"0"}
13066 }
13067 ,{"time":"2023-09-22T23:21:37.694922109Z""msg",:""hostname":"Crucible Version: BuildInfo {\nip-10-150-1-55.us-west-2.compute.internal" version: \","0.0.1\"pid",\n:4769 git_sha: \","ed48f294784d46ea7d4bb99336918b74358eca46\"looper",\n:"0" git_commit_timestamp: \"}
13068 2023-09-22T22:51:18.000000000Z\",\n git_branch: \"main\",\n rustc_semver: \"1.70.0\",\n rustc_channel: \"stable\",\n rustc_host_triple: \"x86_64-unknown-illumos\",\n rustc_commit_sha: \"90c541806f23a127002de5b4038be731ba1458ca\",\n cargo_triple: \"x86_64-unknown-illumos\",\n debug: true,\n opt_level: 0,\n}","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.695003069Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
130692023-09-22T23:21:37.695ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4
130702023-09-22T23:21:37.695ZINFOcrucible: Crucible stats registered with UUID: 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d
130712023-09-22T23:21:37.695ZINFOcrucible: Crucible 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d has session id: 267e8ce7-7645-4600-9b29-59a06b3738f2
13072 Sep 22 23:21:37.695 INFO [1] connecting to 127.0.0.1:43385, looper: 1
130732023-09-22T23:21:37.695ZINFOcrucible: [1] connecting to 127.0.0.1:55221 looper = 1
13074 Sep 22 23:21:37.695 INFO Repair listens on 127.0.0.1:0, task: repair
13075 Sep 22 23:21:37.695 INFO [2] connecting to 127.0.0.1:64157, looper: 2
13076 Sep 22 23:21:37.695 INFO Repair listens on 127.0.0.1:0, task: repair
13077 {"msg":"[1] connecting to 127.0.0.1:40751","v":0,"name":"crucible","level":30{"msg":"[2] connecting to 127.0.0.1:59938","v":0,"name":"crucible",,""timelevel""::"30The guest has requested activation
13078 2023-09-22T23:21:37.695446374Z"Sep 22 23:21:37.695 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:46369, task: repair
13079 Sep 22 23:21:37.695 INFO Repair listens on 127.0.0.1:0, task: repair
13080 Sep 22 23:21:37.695 INFO listening on 127.0.0.1:0, task: main
13081 ,"time":"2023-09-22T23:21:37.69547072Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"2"}
13082 Sep 22 23:21:37.695 INFO up_listen starts, task: up_listen
13083 Sep 22 23:21:37.695 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:46369, task: repair
13084 Sep 22 23:21:37.695 INFO listening on 127.0.0.1:0, task: main
13085 Sep 22 23:21:37.695 INFO Wait for all three downstairs to come online
13086 Sep 22 23:21:37.695 INFO Flush timeout: 0.5
13087 Sep 22 23:21:37.695 INFO listening, local_addr: 127.0.0.1:46369, task: repair
13088 ,Sep 22 23:21:37.695 INFO listening on 127.0.0.1:0, task: main
13089 "hostname":{"ip-10-150-1-55.us-west-2.compute.internal",""pidmsg""::"4769up_listen starts",,""v"looper:":"01","}name"
13090 :"crucible","level":30{"msg":"[2] connecting to 127.0.0.1:42674","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.695822504Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",","time"pid:"":47692023-09-22T23:21:37.69583393Z",,""hostnametask""::""up_listen"}
13091 ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper{":"2"}
13092 "msg":"{Wait for all three downstairs to come online",""msgv""::"0,"up_listen starts"name":,""vcrucible"":0,","level"name:":30"crucible","level":30,,""timetime""::""2023-09-22T23:21:37.695882977Z2023-09-22T23:21:37.69588068Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47694769},
13093 "Sep 22 23:21:37.695 INFO f712941d-0577-43e4-8a2e-7814ec270c09 active request set
13094 Sep 22 23:21:37.695 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:56492, task: repair
13095 {task":"up_listen""}
13096 msg":"Flush timeout: 0.5{","v"":msg0":,""name":"crucible"Wait for all three downstairs to come online,"",level""v:":300,"name":"crucible","level":30,"time":","time2023-09-22T23:21:37.695926987Z"":","hostname"2023-09-22T23:21:37.695930483Z:"",Sep 22 23:21:37.695 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:46369, task: repair
13097 "ip-10-150-1-55.us-west-2.compute.internal"hostname",:""Sep 22 23:21:37.695 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:56492, task: repair
13098 pidip-10-150-1-55.us-west-2.compute.internal"":,4769"}pid"
13099 :4769}
13100 {"msg":"Flush timeout: 0.5","v":0,"name":"{crucible",""level"msg:":30"1c48f237-34b8-4484-b38b-7c6b80300cc8 active request set","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.695975787Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",","time"pid:"":4769}
13101 2023-09-22T23:21:37.695983574Z"Sep 22 23:21:37.695 INFO Using repair address: 127.0.0.1:46369, task: main
13102 {Sep 22 23:21:37.695 INFO listening, local_addr: 127.0.0.1:56492, task: repair
13103 ,""msg"hostname:"":"ip-10-150-1-55.us-west-2.compute.internal"8e88d9c1-84f3-4981-affe-0c1310d04226 active request set",","pid"v:":47690,"}
13104 name":"crucible","level":30Sep 22 23:21:37.696 INFO No SSL acceptor configured, task: main
13105 ,"time":"2023-09-22T23:21:37.696019167Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
131062023-09-22T23:21:37.696ZINFOcrucible: [0] connecting to 127.0.0.1:43936 looper = 0
131072023-09-22T23:21:37.696ZINFOcrucible: [1] connecting to 127.0.0.1:39320 looper = 1
13108 {"msg":"[2] connecting to 127.0.0.1:57826","v":0,"name":"crucible","level":30Sep 22 23:21:37.696 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:56492, task: repair
13109 ,"time":"2023-09-22T23:21:37.696169026Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"2"}
13110 {"msg":"up_listen starts","v":0,"name":"crucible","level":30Sep 22 23:21:37.696 INFO accepted connection from 127.0.0.1:37490, task: main
13111 ,"time":"2023-09-22T23:21:37.69621327Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769Sep 22 23:21:37.696 INFO accepted connection from 127.0.0.1:49080, task: main
13112 ,"task":"up_listen"}
131132023-09-22T23:21:37.696ZINFOcrucible: Wait for all three downstairs to come online
13114 {"msg":"Flush timeout: 0.5","v":0,"name":"crucible","level":30Sep 22 23:21:37.696 INFO Using repair address: 127.0.0.1:56492, task: main
13115 ,"time":"2023-09-22T23:21:37.696309132Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13116 {"Sep 22 23:21:37.696 INFO No SSL acceptor configured, task: main
13117 msg":"69c4c5e1-2fd5-4777-bf6a-fcd75618f55d active request set","v":0,"name":"crucible","level":30Sep 22 23:21:37.696 INFO accepted connection from 127.0.0.1:40329, task: main
13118 ,"time":"Sep 22 23:21:37.696 INFO accepted connection from 127.0.0.1:59058, task: main
13119 Sep 22 23:21:37.696 INFO accepted connection from 127.0.0.1:40703, task: main
13120 Sep 22 23:21:37.696 INFO accepted connection from 127.0.0.1:45668, task: main
13121 2023-09-22T23:21:37.696357793Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13122 Sep 22 23:21:37.696 INFO UUID: e80e1f8b-7f5a-43fe-b856-a2bd94bd70c3
13123 Sep 22 23:21:37.696 INFO Blocks per extent:5 Total Extents: 2
13124 Sep 22 23:21:37.696 INFO accepted connection from 127.0.0.1:57639, task: main
13125 Sep 22 23:21:37.696 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:35046, task: repair
13126 Sep 22 23:21:37.696 INFO accepted connection from 127.0.0.1:49558, task: main
13127 Sep 22 23:21:37.696 INFO accepted connection from 127.0.0.1:40898, task: main
13128 Sep 22 23:21:37.696 INFO accepted connection from 127.0.0.1:40210, task: main
13129 Sep 22 23:21:37.696 INFO Crucible Version: Crucible Version: 0.0.1
13130 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
13131 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
13132 rustc: 1.70.0 stable x86_64-unknown-illumos
13133 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
13134 Sep 22 23:21:37.696 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:35046, task: repair
13135 Sep 22 23:21:37.696 INFO Upstairs <-> Downstairs Message Version: 4, task: main
13136 Sep 22 23:21:37.696 INFO Using address: 127.0.0.1:51713, task: main
13137 Sep 22 23:21:37.696 INFO accepted connection from 127.0.0.1:60563, task: main
13138 {"msg":"[0] 1c48f237-34b8-4484-b38b-7c6b80300cc8 looper connected","v":0,"name":"crucible"Sep 22 23:21:37.696 INFO [0] f712941d-0577-43e4-8a2e-7814ec270c09 looper connected, looper: 0
13139 ,"level":30Sep 22 23:21:37.696 INFO listening, local_addr: 127.0.0.1:35046, task: repair
13140 ,"time":"2023-09-22T23:21:37.696776987Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"0"}
13141 {"msg":"[0] 8e88d9c1-84f3-4981-affe-0c1310d04226 looper connected","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.696870353Z","{hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":"4769msg":,""looper":"0"}[0] Proc runs for 127.0.0.1:46967 in state New"
13142 ,"v":0,"name":"crucible"{,"level":30"msg":"[0] Proc runs for 127.0.0.1:36647 in state New","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.696917047Z","hostname":"Sep 22 23:21:37.696 INFO [0] Proc runs for 127.0.0.1:46295 in state New
13143 ip-10-150-1-55.us-west-2.compute.internal",,Sep 22 23:21:37.696 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:35046, task: repair
13144 ""pid":time4769":"}2023-09-22T23:21:37.696926118Z"
13145 ,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13146 {"msg":"[0] 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d looper connected"Sep 22 23:21:37.696 INFO Using repair address: 127.0.0.1:35046, task: main
13147 ,"v":0,"name":"crucible","level":30Sep 22 23:21:37.697 INFO No SSL acceptor configured, task: main
13148 ,"time":"2023-09-22T23:21:37.697006965Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"0"}
13149 Sep 22 23:21:37.697 INFO Repair listens on 127.0.0.1:0, task: repair
13150 Sep 22 23:21:37.697 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:42362, task: repair
13151 {"msg":"Upstairs starts","v":0,"name":"crucible"Sep 22 23:21:37.697 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:42362, task: repair
13152 ,"level":30,"time":"2023-09-22T23:21:37.697243772Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13153 {"msg":"Crucible Version: BuildInfo {\n version: \"0.0.1\",\n git_sha: \"ed48f294784d46ea7d4bb99336918b74358eca46\",\n git_commit_timestamp: \"2023-09-22T22:51:18.000000000Z\",\n{ git_branch: \"main\",\n rustc_semver: \""1.70.0\"msg,"\n:" rustc_channel: \"stable\",\n[1] 1c48f237-34b8-4484-b38b-7c6b80300cc8 looper connected" rustc_host_triple: \","vx86_64-unknown-illumos\"",:\n0, rustc_commit_sha: "\"name":"crucible"90c541806f23a127002de5b4038be731ba1458ca\",",\nlevel": cargo_triple: \"30x86_64-unknown-illumos\",\n debug: true,\n opt_level: 0,\n}"Sep 22 23:21:37.697 INFO [1] f712941d-0577-43e4-8a2e-7814ec270c09 looper connected, looper: 1
13154 ,{"v":0,"name":"crucible","level"":30msg":"Upstairs starts","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.69734695Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13155 {,,"""msgtime""::""time":"2023-09-22T23:21:37.697322444ZUpstairs <-> Downstairs Message Version: 4""2023-09-22T23:21:37.697356779Z",,""v"hostname"::"0,","hostname"ip-10-150-1-55.us-west-2.compute.internalname"",:""Sep 22 23:21:37.697 INFO [1] Proc runs for 127.0.0.1:43385 in state New
13156 :"cruciblepid"":,4769"level"ip-10-150-1-55.us-west-2.compute.internal":,,30""looper"pid":":1"4769}
13157 }
13158 {,"time":""msg"2023-09-22T23:21:37.697409168Z:"","{hostname":[1] Proc runs for 127.0.0.1:55221 in state New"","v":0ip-10-150-1-55.us-west-2.compute.internal,""",name"":pid"":crucible4769"msg},
13159 ""level"{::{"30"Crucible Version: BuildInfo {msg""\n:msg version: ""\":0.0.1"\",\nCrucible stats registered with UUID: 8f69534d-528b-4c23-b2c9-ce2dfe832ae1" git_sha: \",[1] 8e88d9c1-84f3-4981-affe-0c1310d04226 looper connected",""v,ed48f294784d46ea7d4bb99336918b74358eca46\""time""v::0",,""name\n"2023-09-22T23:21:37.697442914Z:"":crucible," git_commit_timestamp: ,""\"0hostnamelevel""::"302023-09-22T22:51:18.000000000Z\",,\n"ip-10-150-1-55.us-west-2.compute.internal" git_branch: name"\"Sep 22 23:21:37.697 INFO listening, local_addr: 127.0.0.1:42362, task: repair
13160 :,,Sep 22 23:21:37.697 INFO [2] f712941d-0577-43e4-8a2e-7814ec270c09 looper connected, looper: 2
13161 main\"""pid""timecrucible":":4769",}\n
13162 2023-09-22T23:21:37.697473536Z,"" rustc_semver: ,\"level1.70.0\"",:"\n30hostname":" rustc_channel: \"stable\",\nip-10-150-1-55.us-west-2.compute.internal"," rustc_host_triple: \"pid":4769x86_64-unknown-illumos\"},
13163 \n rustc_commit_sha: \"{90c541806f23a127002de5b4038be731ba1458ca\",\n"msg" cargo_triple: :\""x86_64-unknown-illumos\",\n debug: true,\n opt_level: 0,\nCrucible 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 has session id: fdc9bcf8-a539-475f-a4b2-6afeca56855a}",",""v"time"::,""0v",":name"2023-09-22T23:21:37.697514944Z0":","crucible","name"hostname:","":crucible"level","":levelip-10-150-1-55.us-west-2.compute.internal"",:"3030pid":4769,"looper":"1"}
13164 ,"time":"2023-09-22T23:21:37.697562375Z","hostname":",ip-10-150-1-55.us-west-2.compute.internal"","timepid""::4769"}
13165 Sep 22 23:21:37.697 INFO [2] Proc runs for 127.0.0.1:64157 in state New
13166 2023-09-22T23:21:37.69756306Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
131672023-09-22T23:21:37.697ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4
13168 {Sep 22 23:21:37.697 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:42362, task: repair
13169 "msg":"Crucible stats registered with UUID: 82efa4d5-f84d-4765-a042-3cbb8c544041","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.697677994Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769Sep 22 23:21:37.697 INFO Using repair address: 127.0.0.1:42362, task: main
13170 }
13171 {"msg":"Sep 22 23:21:37.697 INFO No SSL acceptor configured, task: main
13172 Crucible 82efa4d5-f84d-4765-a042-3cbb8c544041 has session id: 6892d9b7-9ef5-4f95-bdab-c03ff61fc150","v":0,"name":"crucible","level":30{"msg":","time":"[1] Proc runs for 127.0.0.1:40751 in state New"2023-09-22T23:21:37.69773119Z",",v":"0hostname",":name":""crucible","level":ip-10-150-1-55.us-west-2.compute.internal"30,"pid":4769}
13173 ,"time":"2023-09-22T23:21:37.697758764Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
131742023-09-22T23:21:37.697ZINFOcrucible: [2] 8e88d9c1-84f3-4981-affe-0c1310d04226 looper connected looper = 2
13175 The guest has requested activation
13176 Sep 22 23:21:37.697 INFO listening on 127.0.0.1:0, task: main
13177 Sep 22 23:21:37.697 INFO listening on 127.0.0.1:0, task: main
13178 Sep 22 23:21:37.698 INFO listening on 127.0.0.1:0, task: main
13179 Sep 22 23:21:37.698 INFO Connection request from 1c48f237-34b8-4484-b38b-7c6b80300cc8 with version 4, task: proc
13180 {"msg":"[0] Proc runs for 127.0.0.1:43936 in state New","v":0,"name":"crucible","level":30,"time":"Sep 22 23:21:37.698 INFO upstairs UpstairsConnection { upstairs_id: 1c48f237-34b8-4484-b38b-7c6b80300cc8, session_id: 126b8002-1d8d-46c3-951e-15087f60a7c9, gen: 1 } connected, version 4, task: proc
13181 2023-09-22T23:21:37.698097675Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
131822023-09-22T23:21:37.698ZINFOcrucible: [1] 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d looper connected looper = 1
131832023-09-22T23:21:37.698ZINFOcrucible: [1] Proc runs for 127.0.0.1:39320 in state New
13184 {"msg":"[2] 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d looper connected","v":0Sep 22 23:21:37.698 INFO accepted connection from 127.0.0.1:51763, task: main
13185 ,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.698262618Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"2"}
13186 {{"msg":""msg"{:[2] Proc runs for 127.0.0.1:57826 in state New{Sep 22 23:21:37.698 INFO Connection request from f712941d-0577-43e4-8a2e-7814ec270c09 with version 4, task: proc
13187 """"msg,"":"v"[0] connecting to 127.0.0.1:39150msg""Upstairs starts:"0,:"",,""v[2] Proc runs for 127.0.0.1:42674 in state New"":vname,0"""::0",,v""name:"0:",crucible""name",:"""cruciblename"":,""cruciblelevelSep 22 23:21:37.698 INFO upstairs UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } connected, version 4, task: proc
13188 levelcrucible"":,"30"":,level30""level"::3030,"time":","2023-09-22T23:21:37.698351182Z"time,"":,time"":""2023-09-22T23:21:37.698346874Zhostname""2023-09-22T23:21:37.698353774Z,"":",hostname"":"hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internalpid"ip-10-150-1-55.us-west-2.compute.internal"":,,4769""pidpid""::47694769}
13189 ,"time"Sep 22 23:21:37.698 INFO Connection request from f712941d-0577-43e4-8a2e-7814ec270c09 with version 4, task: proc
13190 :"2023-09-22T23:21:37.698354057Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid,":"4769looper":"0"}
13191 }
13192 {{"msg":""msg":"Sep 22 23:21:37.698 INFO upstairs UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } connected, version 4, task: proc
13193 [1] connecting to 127.0.0.1:64794Crucible Version: BuildInfo {"\n, version: {\""v0.0.1"\":,\n0," git_sha: \"name":""crucibleed48f294784d46ea7d4bb99336918b74358eca46"\"msg,,\n""level" git_commit_timestamp: ::30\""2023-09-22T22:51:18.000000000Z\",\n[2] 1c48f237-34b8-4484-b38b-7c6b80300cc8 looper connected" git_branch: \"main\",\n,"v" rustc_semver: \":1.70.0\"0,\n,"name" rustc_channel: \":"stable\"crucible",\n,"level" rustc_host_triple: ,\"":timex86_64-unknown-illumos"\":,"\n302023-09-22T23:21:37.698501932Z rustc_commit_sha: "\","hostname":90c541806f23a127002de5b4038be731ba1458ca"\",\n cargo_triple: \"ip-10-150-1-55.us-west-2.compute.internal","x86_64-unknown-illumospid\"":,\n4769 debug: true,,\n"looper" opt_level: 0,:\n"}1""}}
13194 ,"
13195 {v":0,""namemsg""::"",crucible"",[2] connecting to 127.0.0.1:43168""time,level"""v:"30::0","name":"2023-09-22T23:21:37.698525651Z"crucible",",level":"30hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"2","}time,"":"
13196 2023-09-22T23:21:37.698551933Ztime"":","hostname"2023-09-22T23:21:37.698558293Z:"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769ip-10-150-1-55.us-west-2.compute.internal","}pid
13197 ":4769,"looper":Sep 22 23:21:37.698 INFO Connection request from f712941d-0577-43e4-8a2e-7814ec270c09 with version 4, task: proc
13198 "{2""msg"}:"
13199 Upstairs <-> Downstairs Message Version: 4","v"{:0,""name"msg:"":"crucible",up_listen starts""level,"":v"30:0,"name":"crucible","level":30,"time",:""Sep 22 23:21:37.698 INFO upstairs UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } connected, version 4, task: proc
13200 2023-09-22T23:21:37.6986053Ztime"",:""hostname":"2023-09-22T23:21:37.698608964Z","ip-10-150-1-55.us-west-2.compute.internalhostname"":,""pid":4769}ip-10-150-1-55.us-west-2.compute.internal
13201 ","pid":4769{,"task":"up_listen""msg}"
13202 :"{"Crucible stats registered with UUID: 7be22e7c-d55a-4790-a835-081c096012f4"msg":,""v":0,"nameWait for all three downstairs to come online"":","crucible"v",:"0,level"":name"30:"crucible","level":30,"time":","time":"2023-09-22T23:21:37.698655639Z","2023-09-22T23:21:37.69865806Z"hostname":,""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid"ip-10-150-1-55.us-west-2.compute.internal:"4769,"pid}"
13203 :4769}
13204 {{""msg"msg:"":"Flush timeout: 0.5","v":0,"name"Crucible 7be22e7c-d55a-4790-a835-081c096012f4 has session id: 354f8ce2-d4bb-4d47-a624-d406af786eb7":","crucible"v",:"0level,"":30name":"crucible","level":30,"time":","time":2023-09-22T23:21:37.698690815Z"","hostname2023-09-22T23:21:37.698693507Z"":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4769,"pid}"
13205 :4769}
132062023-09-22T23:21:37.698ZINFOcrucible: 82efa4d5-f84d-4765-a042-3cbb8c544041 active request set
132072023-09-22T23:21:37.698ZINFOcrucible: [0] 82efa4d5-f84d-4765-a042-3cbb8c544041 looper connected looper = 0
13208 {"msg":"[0] Proc runs for 127.0.0.1:39150 in state New","v":0,"name{":"crucible","level":30"msg":"Upstairs starts","v":0,"name":"crucible",","time"level":":302023-09-22T23:21:37.698783312Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13209 Sep 22 23:21:37.698 INFO accepted connection from 127.0.0.1:52973, task: main
13210 ,"time":"2023-09-22T23:21:37.698798624Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13211 {"msg":"Crucible Version: BuildInfo {\n version: \"0.0.1\",\n git_sha: \"ed48f294784d46ea7d4bb99336918b74358eca46\",\nSep 22 23:21:37.698 INFO accepted connection from 127.0.0.1:43044, task: main
13212 git_commit_timestamp: \"2023-09-22T22:51:18.000000000Z\",\n git_branch: \"main\",\n rustc_semver: \"1.70.0\",\n rustc_channel: \"stable\",\n rustc_host_triple: \"x86_64-unknown-illumos\",\n rustc_commit_sha: \"90c541806f23a127002de5b4038be731ba1458ca\",\n cargo_triple: \"x86_64-unknown-illumos\",\n debug: true,\n opt_level: 0,\n}","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.698949491Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13213 {"msg":"Upstairs <-> Downstairs Message Version: 4","v":0,"name":"crucible","level":30,"time":Sep 22 23:21:37.698 INFO [0] f712941d-0577-43e4-8a2e-7814ec270c09 (512f09eb-7e91-421e-9fa9-f1bb0acbe6ae) New New New ds_transition to WaitActive
13214 "2023-09-22T23:21:37.698996661Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
132152023-09-22T23:21:37.699ZINFOcrucible: Crucible stats registered with UUID: dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9
13216 {"msg":"Crucible dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 has session id: 12ec540d-5ec3-4696-aaa4-c59d1d2545f8","v":0,"name":"crucible","level":30Sep 22 23:21:37.699 INFO [0] Transition from New to WaitActive
13217 ,"time":"2023-09-22T23:21:37.699083589Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13218 Sep 22 23:21:37.699 INFO [0] client is_active_req TRUE, promote! session 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae
13219 The guest has requested activation
13220 The guest has requested activation
13221 The guest has requested activation
13222 Sep 22 23:21:37.699 INFO listening on 127.0.0.1:0, task: main
13223 Sep 22 23:21:37.699 INFO listening on 127.0.0.1:0, task: main
13224 Sep 22 23:21:37.699 INFO [1] f712941d-0577-43e4-8a2e-7814ec270c09 (512f09eb-7e91-421e-9fa9-f1bb0acbe6ae) WaitActive New New ds_transition to WaitActive
13225 {Sep 22 23:21:37.699 INFO [1] Transition from New to WaitActive
13226 Sep 22 23:21:37.699 INFO listening on 127.0.0.1:0, task: main
13227 "msg":"[2] Proc runs for 127.0.0.1:59938 in state New","v":0,"name":"crucible","level":30Sep 22 23:21:37.699 INFO [1] client is_active_req TRUE, promote! session 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae
13228 Sep 22 23:21:37.699 INFO listening on 127.0.0.1:0, task: main
13229 Sep 22 23:21:37.699 INFO Connection request from 1c48f237-34b8-4484-b38b-7c6b80300cc8 with version 4, task: proc
13230 Sep 22 23:21:37.699 INFO Connection request from 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d with version 4, task: proc
13231 ,"time":"2023-09-22T23:21:37.699336772Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13232 Sep 22 23:21:37.699 INFO upstairs UpstairsConnection { upstairs_id: 1c48f237-34b8-4484-b38b-7c6b80300cc8, session_id: 126b8002-1d8d-46c3-951e-15087f60a7c9, gen: 1 } connected, version 4, task: proc
13233 Sep 22 23:21:37.699 INFO listening on 127.0.0.1:0, task: main
13234 Sep 22 23:21:37.699 INFO upstairs UpstairsConnection { upstairs_id: 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d, session_id: 412fdf38-1b7f-416c-b371-8bd0b7f86f94, gen: 1 } connected, version 4, task: proc
13235 Sep 22 23:21:37.699 INFO listening on 127.0.0.1:0, task: main
13236 Sep 22 23:21:37.699 INFO listening on 127.0.0.1:0, task: main
13237 Sep 22 23:21:37.699 INFO Connection request from 8e88d9c1-84f3-4981-affe-0c1310d04226 with version 4, task: proc
13238 Sep 22 23:21:37.699 INFO accepted connection from 127.0.0.1:44013, task: main
13239 {"msg":"[1] 82efa4d5-f84d-4765-a042-3cbb8c544041 looper connected"Sep 22 23:21:37.699 INFO upstairs UpstairsConnection { upstairs_id: 8e88d9c1-84f3-4981-affe-0c1310d04226, session_id: d738ea25-1afc-4d4c-9b80-99c2ada34ff7, gen: 1 } connected, version 4, task: proc
13240 ,Sep 22 23:21:37.699 INFO listening on 127.0.0.1:0, task: main
13241 Sep 22 23:21:37.699 INFO [2] f712941d-0577-43e4-8a2e-7814ec270c09 (512f09eb-7e91-421e-9fa9-f1bb0acbe6ae) WaitActive WaitActive New ds_transition to WaitActive
13242 Sep 22 23:21:37.699 INFO listening on 127.0.0.1:0, task: main
13243 Sep 22 23:21:37.699 INFO [2] Transition from New to WaitActive
13244 Sep 22 23:21:37.699 INFO Connection request from 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d with version 4, task: proc
13245 Sep 22 23:21:37.699 INFO [2] client is_active_req TRUE, promote! session 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae
13246 Sep 22 23:21:37.699 INFO Connection request from 8e88d9c1-84f3-4981-affe-0c1310d04226 with version 4, task: proc
13247 {"msg":"[0] connecting to 127.0.0.1:63497","v":0,"name":"crucible","level":30Sep 22 23:21:37.699 INFO upstairs UpstairsConnection { upstairs_id: 8e88d9c1-84f3-4981-affe-0c1310d04226, session_id: d738ea25-1afc-4d4c-9b80-99c2ada34ff7, gen: 1 } connected, version 4, task: proc
13248 ,"time":"2023-09-22T23:21:37.699596388Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"0"}
13249 {"msg":"[1] connecting to 127.0.0.1:62341","v":0,"name":"crucible","level":30,"time"Sep 22 23:21:37.699 INFO Connection request from 1c48f237-34b8-4484-b38b-7c6b80300cc8 with version 4, task: proc
13250 :"2023-09-22T23:21:37.699649102Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"1"}
13251 Sep 22 23:21:37.699 INFO upstairs UpstairsConnection { upstairs_id: 1c48f237-34b8-4484-b38b-7c6b80300cc8, session_id: 126b8002-1d8d-46c3-951e-15087f60a7c9, gen: 1 } connected, version 4, task: proc
13252 {{"msg":"[0] connecting to 127.0.0.1:54884","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.699804643Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"0"}
13253 Sep 22 23:21:37.699 INFO UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } is now active (read-write)
13254 {""vmsg":"[0] 1c48f237-34b8-4484-b38b-7c6b80300cc8 (126b8002-1d8d-46c3-951e-15087f60a7c9) New New New ds_transition to WaitActive",Sep 22 23:21:37.700 INFO accepted connection from 127.0.0.1:47996, task: main
13255 "v":0,"name":"crucible","level":30Sep 22 23:21:37.699 INFO Connection request from 8e88d9c1-84f3-4981-affe-0c1310d04226 with version 4, task: proc
13256 ":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.700108304Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13257 Sep 22 23:21:37.700 INFO UpstairsConnection { upstairs_id: 1c48f237-34b8-4484-b38b-7c6b80300cc8, session_id: 126b8002-1d8d-46c3-951e-15087f60a7c9, gen: 1 } is now active (read-write)
13258 Sep 22 23:21:37.700 INFO accepted connection from 127.0.0.1:59016, task: main
13259 {Sep 22 23:21:37.700 INFO UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } is now active (read-write)
13260 "msg"Sep 22 23:21:37.700 INFO accepted connection from 127.0.0.1:51535, task: main
13261 Sep 22 23:21:37.700 INFO upstairs UpstairsConnection { upstairs_id: 8e88d9c1-84f3-4981-affe-0c1310d04226, session_id: d738ea25-1afc-4d4c-9b80-99c2ada34ff7, gen: 1 } connected, version 4, task: proc
13262 :""[0] Transition from New to WaitActive","v":0msg"{,":name"":"crucible"[0] connecting to 127.0.0.1:34554"","levelmsg"",:":30"v":0,[2] connecting to 127.0.0.1:51713""name":",crucible"","v"level:":030,"name":"crucible","level":Sep 22 23:21:37.700 INFO accepted connection from 127.0.0.1:33771, task: main
13263 30,"time":"2023-09-22T23:21:37.70019406Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",,""time"pid":":47692023-09-22T23:21:37.700203658Z"},
13264 ","hostname":time"":"{2023-09-22T23:21:37.700212899Z"ip-10-150-1-55.us-west-2.compute.internal",",pid""":hostname"msg":4769":","looper"ip-10-150-1-55.us-west-2.compute.internal":",[0] client is_active_req TRUE, promote! session 126b8002-1d8d-46c3-951e-15087f60a7c9"0"",pid"":}v"
13265 4769Sep 22 23:21:37.700 INFO accepted connection from 127.0.0.1:34244, task: main
13266 :,"0Sep 22 23:21:37.700 INFO UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } is now active (read-write)
13267 {,Sep 22 23:21:37.700 INFO accepted connection from 127.0.0.1:36546, task: main
13268 "msg":"[1] connecting to 127.0.0.1:59903","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.700331435Z","hostname":"Sep 22 23:21:37.700 INFO accepted connection from 127.0.0.1:54123, task: main
13269 ip-10-150-1-55.us-west-2.compute.internal","pid":"4769name",":looper"":"crucible"1",}"Sep 22 23:21:37.700 INFO UpstairsConnection { upstairs_id: 1c48f237-34b8-4484-b38b-7c6b80300cc8, session_id: 126b8002-1d8d-46c3-951e-15087f60a7c9, gen: 1 } is now active (read-write)
13270 level"
13271 :30{"Sep 22 23:21:37.700 INFO accepted connection from 127.0.0.1:54296, task: main
13272 msg":"[2] connecting to 127.0.0.1:36502","v":0,"name":"crucible","level",:"30time":"2023-09-22T23:21:37.700383427Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13273 ,"time":"{2023-09-22T23:21:37.700403744Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"2"[1] 1c48f237-34b8-4484-b38b-7c6b80300cc8 (126b8002-1d8d-46c3-951e-15087f60a7c9) WaitActive New New ds_transition to WaitActive"}
13274 ,"v":0,"name":"{crucible","level":"30msg":"up_listen starts","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.700443433Z","hostname":","time":ip-10-150-1-55.us-west-2.compute.internal"","pid":2023-09-22T23:21:37.700450785Z"4769,"hostname"}:
13275 "ip-10-150-1-55.us-west-2.compute.internal","pid":{4769,"task":""up_listen"msg"}:
13276 "[1] Transition from New to WaitActive","v":{Sep 22 23:21:37.700 INFO accepted connection from 127.0.0.1:53816, task: main
13277 0"msg",:""name":"crucible",Wait for all three downstairs to come online""level",:"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.700511179Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","pid2023-09-22T23:21:37.700516003Z"":,4769"hostname":}"
13278 Sep 22 23:21:37.700 INFO UpstairsConnection { upstairs_id: 1c48f237-34b8-4484-b38b-7c6b80300cc8, session_id: 126b8002-1d8d-46c3-951e-15087f60a7c9, gen: 1 } is now active (read-write)
13279 ip-10-150-1-55.us-west-2.compute.internal"{,""pid"msg"::4769"}
13280 [1] client is_active_req TRUE, promote! session 126b8002-1d8d-46c3-951e-15087f60a7c9","v":0,"name"{:"crucible","level":"30msg":"Flush timeout: 0.5","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.700566161Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,4769"time"}:"
13281 2023-09-22T23:21:37.700573835Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4769"msg}"
13282 :"{[2] 1c48f237-34b8-4484-b38b-7c6b80300cc8 (126b8002-1d8d-46c3-951e-15087f60a7c9) WaitActive WaitActive New ds_transition to WaitActive""msg",:""v":0,"name":"8f69534d-528b-4c23-b2c9-ce2dfe832ae1 active request set"crucible",,""v"level"::030,"name":"crucible","level":30Sep 22 23:21:37.700 INFO UpstairsConnection { upstairs_id: 8e88d9c1-84f3-4981-affe-0c1310d04226, session_id: d738ea25-1afc-4d4c-9b80-99c2ada34ff7, gen: 1 } is now active (read-write)
13283 ,",time":""time":"2023-09-22T23:21:37.700633873Z"2023-09-22T23:21:37.700637429Z",",hostname"":"hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internalpid"":,"4769pid":4769}
13284 }
13285 {"{msg":"[2] Transition from New to WaitActive""msg":,""v":0,"name":"[0] 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 looper connected"crucible",,""v"level"::030,"name":"crucible","level":30looper":"2"}
13286 ,"time":","time2023-09-22T23:21:37.700695573Z"":",{"2023-09-22T23:21:37.700699128Z",{"hostname",msg""time":msghostname"""::":"""ip-10-150-1-55.us-west-2.compute.internalup_listen starts:"ip-10-150-1-55.us-west-2.compute.internal",","""2023-09-22T23:21:37.700127898Zpid""",pid":"4769,[0] 8e88d9c1-84f3-4981-affe-0c1310d04226 (d738ea25-1afc-4d4c-9b80-99c2ada34ff7) New New New ds_transition to WaitActive:"4769,""},vlooper
13287 "":vhostname""::"{"0"0"ip-10-150-1-55.us-west-2.compute.internal,:}"0",""msg",Sep 22 23:21:37.700 INFO UpstairsConnection { upstairs_id: 8e88d9c1-84f3-4981-affe-0c1310d04226, session_id: d738ea25-1afc-4d4c-9b80-99c2ada34ff7, gen: 1 } is now active (read-write)
13288 :
13289 namename""pid{":""::"4769crucible"""crucible,",[2] client is_active_req TRUE, promote! session 126b8002-1d8d-46c3-951e-15087f60a7c9msg,""":looper","":v"level""level"1:"":30:"0[0] Proc runs for 127.0.0.1:34554 in state New",}30
13290 ","name"v":":crucible"0{,,""name"level"::""crucible"msg,,,""level":""time30"time"::""":2023-09-22T23:21:37.70081188Z2023-09-22T23:21:37.700816504Z""30,,""[1] Proc runs for 127.0.0.1:64794 in state Newhostnamehostname""::""","v"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"":,,""pid"pid:"4769:04769,,"",name"":tasktime"":crucible","""}:
13291 "time"up_listen":,2023-09-22T23:21:37.700838998Z"""{}level2023-09-22T23:21:37.700844571Z"
13292 ",msg,{"""":msg""::30hostname"hostname"":Sep 22 23:21:37.700 INFO UpstairsConnection { upstairs_id: 8e88d9c1-84f3-4981-affe-0c1310d04226, session_id: d738ea25-1afc-4d4c-9b80-99c2ada34ff7, gen: 1 } is now active (read-write)
13293 "ip-10-150-1-55.us-west-2.compute.internal":Wait for all three downstairs to come online"",",[0] Transition from New to WaitActive""v",:"0v,ip-10-150-1-55.us-west-2.compute.internal""",pid"time:","":name04769"pid"::"}",:""name"crucible:""
13294 2023-09-22T23:21:37.700892003Z"crucible,"",level,""4769hostname":"30:level"}:"
13295 ip-10-150-1-55.us-west-2.compute.internal"30,"pid":4769,"{time"}:
13296 "",msg":"2023-09-22T23:21:37.700940437Z""time",:""hostname{"2023-09-22T23:21:37.700948898Z:""[1] 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 looper connected","ip-10-150-1-55.us-west-2.compute.internal"msghostname"",":""pid:,""ip-10-150-1-55.us-west-2.compute.internal"v:"4769[2] 82efa4d5-f84d-4765-a042-3cbb8c544041 looper connected"},
13297 "",pid{"":v"":msg4769"0}::
13298 "0,,{Flush timeout: 0.5"",""name"vmsg""::"0:,""crucible"[0] client is_active_req TRUE, promote! session d738ea25-1afc-4d4c-9b80-99c2ada34ff7name"",name,""::"""cruciblev""level""crucible":,0:,""levelname""30,::"30"crucible"level",:"30level":30,"time":"2023-09-22T23:21:37.701020877Z","hostname,"":"time":"ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:21:37.701025631Z"",","pid",:hostname4769":"}"
13299 ip-10-150-1-55.us-west-2.compute.internal,time{""":time"",""msg"pid:2023-09-22T23:21:37.70101971Z"""::"4769,2023-09-22T23:21:37.70102499Z}""dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 active request set
13300 "hostname,""{v,:""""msg:"ip-10-150-1-55.us-west-2.compute.internal":hostname",{"":pid"":0",4769msg"ip-10-150-1-55.us-west-2.compute.internal",",name[1] 8e88d9c1-84f3-4981-affe-0c1310d04226 (d738ea25-1afc-4d4c-9b80-99c2ada34ff7) WaitActive New New ds_transition to WaitActive"":"",crucible"""pid,v"looper::4769"":"0level,",:"30:name"":"""looper"2":[0] downstairs client at 127.0.0.1:46967 has UUID ff162497-e5a3-4763-b129-29dec4869786""}crucible
13301 ,"",time"1level,""""}{v::"
13302 302023-09-22T23:21:37.701106078Z""",msg":":"hostname"{0:[2] Proc runs for 127.0.0.1:43168 in state New"",",""timeip-10-150-1-55.us-west-2.compute.internal",msg"":,v""pid":2023-09-22T23:21:37.701135053Z"""":name"0:,:",[1] Proc runs for 127.0.0.1:59903 in state New"hostnamename""4769,":"":"v"crucible"ip-10-150-1-55.us-west-2.compute.internalcrucible",,"":"}0level
13303 ,,"{"""pid"msg"level"::4769Sep 22 23:21:37.701 INFO Connection request from 7be22e7c-d55a-4790-a835-081c096012f4 with version 4, task: proc
13304 name"":}"
13305 "::30crucible"30[0] dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 looper connected{",",""msgv""::"0level,""[1] Transition from New to WaitActivename"",:"":vcrucible"":300,",level""Sep 22 23:21:37.701 INFO upstairs UpstairsConnection { upstairs_id: 7be22e7c-d55a-4790-a835-081c096012f4, session_id: 8fe9d3ab-892b-48e5-a656-2b4564d918d0, gen: 1 } connected, version 4, task: proc
13306 :name30":",,""crucible"time"time,":"":,2023-09-22T23:21:37.701206725Z"level,",:"30time""":time"""hostname:""2023-09-22T23:21:37.701208023Z"2023-09-22T23:21:37.701224995Z"2023-09-22T23:21:37.701218764Z",,,,:""time""""hostnamehostname":"":ip-10-150-1-55.us-west-2.compute.internal":2023-09-22T23:21:37.701240522Z""",,ip-10-150-1-55.us-west-2.compute.internal""hostname"hostnamepid"":ip-10-150-1-55.us-west-2.compute.internal""4769,:,:}""""pid"
13307 ip-10-150-1-55.us-west-2.compute.internal":ip-10-150-1-55.us-west-2.compute.internalpid"",:4769,4769""pidpid,"}"
13308 :":4769looper4769":"}0
13309 "{}}{
13310 ""
13311 msg"msg":{"{:"Sep 22 23:21:37.701 INFO Connection request from 7be22e7c-d55a-4790-a835-081c096012f4 with version 4, task: proc
13312 [2] 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 looper connected""[1] client is_active_req TRUE, promote! session d738ea25-1afc-4d4c-9b80-99c2ada34ff7"msg"msg"":,"",v:[0] Proc runs for 127.0.0.1:63497 in state New""":"{,Sep 22 23:21:37.699 INFO Connection request from 82efa4d5-f84d-4765-a042-3cbb8c544041 with version 4, task: proc
13313 ""v"msg:"0:,[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ff162497-e5a3-4763-b129-29dec4869786, encrypted: true, database_read_version: 1, database_write_version: 1 }"""name":"[1] connecting to 127.0.0.1:38174"crucible",,"",v"":v"0:level",:0"30name":","crucible"name",:""level":crucible"30,"level":30,Sep 22 23:21:37.701 INFO upstairs UpstairsConnection { upstairs_id: 82efa4d5-f84d-4765-a042-3cbb8c544041, session_id: 13fb2768-edc0-4158-a955-c73279bce790, gen: 1 } connected, version 4, task: proc
13314 "time":"2023-09-22T23:21:37.701382591Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","pid":2023-09-22T23:21:37.701389311Z"4769Sep 22 23:21:37.701 INFO [0] downstairs client at 127.0.0.1:46295 has UUID ca34832d-be06-44a7-9364-f6c3bc44d7fb
13315 ,"},
13316 hostname""time{0:"""msg,:"ip-10-150-1-55.us-west-2.compute.internal""":"name,2023-09-22T23:21:37.701392161Z"""[1] dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 looper connected,"pid","":v""Sep 22 23:21:37.701 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ca34832d-be06-44a7-9364-f6c3bc44d7fb, encrypted: true, database_read_version: 1, database_write_version: 1 }
13317 :4769:crucible"hostname","0level",:,:"30""name":"looper"crucible":,ip-10-150-1-55.us-west-2.compute.internal""1"",level"":pid30":}4769
13318 }
13319 ,{"time":""2023-09-22T23:21:37.701453921Z"{msg":,",""hostname""msg:[2] connecting to 127.0.0.1:34655time":"":"""ip-10-150-1-55.us-west-2.compute.internal,"2023-09-22T23:21:37.701465019Z"1c48f237-34b8-4484-b38b-7c6b80300cc8 WaitActive WaitActive WaitActivev,""Sep 22 23:21:37.701 INFO f712941d-0577-43e4-8a2e-7814ec270c09 WaitActive WaitActive WaitActive
13320 ":Sep 22 23:21:37.701 INFO Connection request from 82efa4d5-f84d-4765-a042-3cbb8c544041 with version 4, task: proc
13321 0,v":0,"name":"crucible","level":30Sep 22 23:21:37.701 INFO upstairs UpstairsConnection { upstairs_id: 82efa4d5-f84d-4765-a042-3cbb8c544041, session_id: 13fb2768-edc0-4158-a955-c73279bce790, gen: 1 } connected, version 4, task: proc
13322 "name":"crucible","level":30Sep 22 23:21:37.701 INFO upstairs UpstairsConnection { upstairs_id: 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d, session_id: 412fdf38-1b7f-416c-b371-8bd0b7f86f94, gen: 1 } connected, version 4, task: proc
13323 ,"time":"2023-09-22T23:21:37.701536164Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,,""looper":"v2"",}:"
13324 time"Sep 22 23:21:37.701 INFO Connection request from dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 with version 4, task: proc
13325 0,:"{"Sep 22 23:21:37.701 INFO [1] downstairs client at 127.0.0.1:43385 has UUID f7971d88-fcac-4a34-864e-16881c021a65
13326 2023-09-22T23:21:37.701550767Z"name"",:"msg"hostname:"":""crucible"[2] Proc runs for 127.0.0.1:36502 in state New","ip-10-150-1-55.us-west-2.compute.internal",level",v"":Sep 22 23:21:37.701 INFO upstairs UpstairsConnection { upstairs_id: dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9, session_id: 9165874c-5197-4919-861e-2430574d8700, gen: 1 } connected, version 4, task: proc
13327 "pid"::Sep 22 23:21:37.701 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f7971d88-fcac-4a34-864e-16881c021a65, encrypted: true, database_read_version: 1, database_write_version: 1 }
13328 4769,30"0looper",:""name"2":"}crucible"
13329 ,"level":30Sep 22 23:21:37.701 INFO Connection request from 82efa4d5-f84d-4765-a042-3cbb8c544041 with version 4, task: proc
13330 ,{"Sep 22 23:21:37.701 INFO Connection request from 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d with version 4, task: proc
13331 time""msg"::""up_listen starts"2023-09-22T23:21:37.701617566Z",,","v""hostname"time":::""0,"2023-09-22T23:21:37.701631993Z"ip-10-150-1-55.us-west-2.compute.internal"Sep 22 23:21:37.701 INFO upstairs UpstairsConnection { upstairs_id: 82efa4d5-f84d-4765-a042-3cbb8c544041, session_id: 13fb2768-edc0-4158-a955-c73279bce790, gen: 1 } connected, version 4, task: proc
13332 ,nameSep 22 23:21:37.701 INFO f712941d-0577-43e4-8a2e-7814ec270c09 WaitActive WaitActive WaitActive
13333 ":Sep 22 23:21:37.701 INFO upstairs UpstairsConnection { upstairs_id: 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d, session_id: 412fdf38-1b7f-416c-b371-8bd0b7f86f94, gen: 1 } connected, version 4, task: proc
13334 ",pid"""crucible"hostname"::","4769level":ip-10-150-1-55.us-west-2.compute.internal"30},
13335 "pid":4769}
13336 {,""time"msg"::""2023-09-22T23:21:37.701686899Z","hostname":[1] downstairs client at 127.0.0.1:55221 has UUID d803d148-f26b-4671-a1b7-af69788d7c3f"","v":Sep 22 23:21:37.701 INFO Connection request from dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 with version 4, task: proc
13337 0,ip-10-150-1-55.us-west-2.compute.internal"","pid":4769,"task":"name"up_listen":}"
13338 crucible","pidlevel"":{:304769"Sep 22 23:21:37.701 INFO upstairs UpstairsConnection { upstairs_id: dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9, session_id: 9165874c-5197-4919-861e-2430574d8700, gen: 1 } connected, version 4, task: proc
13339 msg":"Sep 22 23:21:37.701 INFO [2] downstairs client at 127.0.0.1:64157 has UUID 4afe10fd-86f8-4da0-8ac1-1f760ded452a
13340 Wait for all three downstairs to come online}",,""timev
13341 ""::"0,"{2023-09-22T23:21:37.701737215Z"name":","crucible"hostname",":""levelmsg"":ip-10-150-1-55.us-west-2.compute.internal":"30Sep 22 23:21:37.701 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 4afe10fd-86f8-4da0-8ac1-1f760ded452a, encrypted: true, database_read_version: 1, database_write_version: 1 }
13342 ,"pid":4769[2] 8e88d9c1-84f3-4981-affe-0c1310d04226 (d738ea25-1afc-4d4c-9b80-99c2ada34ff7) WaitActive WaitActive New ds_transition to WaitActive"}
13343 ,"v":0,"name":"{crucible",,""time"level":":"30msg"2023-09-22T23:21:37.701774668Z":","Sep 22 23:21:37.701 INFO Current flush_numbers [0..12]: [0, 0]
13344 [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: d803d148-f26b-4671-a1b7-af69788d7c3f, encrypted: true, database_read_version: 1, database_write_version: 1 }"hostname,"":v",:"0"time",":ip-10-150-1-55.us-west-2.compute.internal"name"":,{"""{msg":"[0] 82efa4d5-f84d-4765-a042-3cbb8c544041 (13fb2768-edc0-4158-a955-c73279bce790) New New New ds_transition to WaitActive","v":0,""name":"cruciblecrucible"","level",:"30level":30"msg":"[0] 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d (412fdf38-1b7f-416c-b371-8bd0b7f86f94) New New New ds_transition to WaitActive","v":0,"name":"crucible",",level"",:time"":"time30"2023-09-22T23:21:37.701907289Z":","hostname"2023-09-22T23:21:37.701908717Z":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal4769","pid"}:
13345 4769,"time":"}
13346 Sep 22 23:21:37.701 INFO Current flush_numbers [0..12]: [0, 0]
13347 2023-09-22T23:21:37.701933626Z"{{,"hostname":"""ip-10-150-1-55.us-west-2.compute.internal"msg"msg,":pid"""::"4769[0] Transition from New to WaitActive","}v1c48f237-34b8-4484-b38b-7c6b80300cc8 WaitActive WaitActive WaitActive
13348 "":,0",v"{"name"::"0crucible"",,""msgname""level":"::"30crucible","[0] Transition from New to WaitActive"level":,"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.701999981Z","hostname":,""time":"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:21:37.702003899Z",pid"","time"::4769"hostname":"}2023-09-22T23:21:37.70200907Z"
13349 ,ip-10-150-1-55.us-west-2.compute.internal"","hostname"pid"::{4769"}
13350 ip-10-150-1-55.us-west-2.compute.internal"","msg"pid"::"4769}
13351 [0] client is_active_req TRUE, promote! session 13fb2768-edc0-4158-a955-c73279bce790","v":0,"{name":"crucible","level"":msg"30:"[0] client is_active_req TRUE, promote! session 412fdf38-1b7f-416c-b371-8bd0b7f86f94","v":0,"name":"crucible","level":30,"time":"Sep 22 23:21:37.702 INFO Downstairs has completed Negotiation, task: proc
13352 2023-09-22T23:21:37.702067403Z","hostname":"Sep 22 23:21:37.701 INFO upstairs UpstairsConnection { upstairs_id: 7be22e7c-d55a-4790-a835-081c096012f4, session_id: 8fe9d3ab-892b-48e5-a656-2b4564d918d0, gen: 1 } connected, version 4, task: proc
13353 ip-10-150-1-55.us-west-2.compute.internal",,""time"pid"::"4769}2023-09-22T23:21:37.702077668Z
13354 ","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":"4769msg":"}
13355 Sep 22 23:21:37.702 INFO Connection request from 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 with version 4, task: proc
13356 {[1] 82efa4d5-f84d-4765-a042-3cbb8c544041 (13fb2768-edc0-4158-a955-c73279bce790) WaitActive New New ds_transition to WaitActive",""v"msg:"0:,"Sep 22 23:21:37.702 INFO UpstairsConnection { upstairs_id: 82efa4d5-f84d-4765-a042-3cbb8c544041, session_id: 13fb2768-edc0-4158-a955-c73279bce790, gen: 1 } is now active (read-write)
13357 "[1] 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d (412fdf38-1b7f-416c-b371-8bd0b7f86f94) WaitActive New New ds_transition to WaitActive"name":","crucible"v":,"0level",:"30name":Sep 22 23:21:37.702 INFO UpstairsConnection { upstairs_id: 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d, session_id: 412fdf38-1b7f-416c-b371-8bd0b7f86f94, gen: 1 } is now active (read-write)
13358 "crucible"Sep 22 23:21:37.702 INFO upstairs UpstairsConnection { upstairs_id: 8f69534d-528b-4c23-b2c9-ce2dfe832ae1, session_id: 4c909eb0-0138-456e-a20b-76c3a0a91bc8, gen: 1 } connected, version 4, task: proc
13359 ,Sep 22 23:21:37.702 INFO Downstairs has completed Negotiation, task: proc
13360 ",level"":time"30:"2023-09-22T23:21:37.702146908Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13361 ,"{time":""2023-09-22T23:21:37.702171816Z"msg":","hostname":"[1] Transition from New to WaitActive","v"ip-10-150-1-55.us-west-2.compute.internal":,"0pid,""name"::4769"crucible"},
13362 "Sep 22 23:21:37.702 INFO Connection request from 7be22e7c-d55a-4790-a835-081c096012f4 with version 4, task: proc
13363 level{":"30msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,",time"":"hostname"2023-09-22T23:21:37.702216752Z":","hostname":"ip-10-150-1-55.us-west-2.compute.internal",",ip-10-150-1-55.us-west-2.compute.internaltime""":,""pidpid""Sep 22 23:21:37.702 INFO upstairs UpstairsConnection { upstairs_id: 7be22e7c-d55a-4790-a835-081c096012f4, session_id: 8fe9d3ab-892b-48e5-a656-2b4564d918d0, gen: 1 } connected, version 4, task: proc
13364 2023-09-22T23:21:37.702225169Z"::4769,}Sep 22 23:21:37.702 INFO Connection request from 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 with version 4, task: proc
13365 
13366 "Sep 22 23:21:37.702 INFO UpstairsConnection { upstairs_id: 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d, session_id: 412fdf38-1b7f-416c-b371-8bd0b7f86f94, gen: 1 } is now active (read-write)
13367 hostname"{:""msg":"[1] client is_active_req TRUE, promote! session 13fb2768-edc0-4158-a955-c73279bce790","v":ip-10-150-1-55.us-west-2.compute.internal"0,,""name"pid"::"4769crucible"Sep 22 23:21:37.702 INFO upstairs UpstairsConnection { upstairs_id: 8f69534d-528b-4c23-b2c9-ce2dfe832ae1, session_id: 4c909eb0-0138-456e-a20b-76c3a0a91bc8, gen: 1 } connected, version 4, task: proc
13368 },
13369 "level":30{"msg":"[1] client is_active_req TRUE, promote! session 412fdf38-1b7f-416c-b371-8bd0b7f86f94","v":0,"name":"crucible","level":30,"time":"Sep 22 23:21:37.702 INFO UpstairsConnection { upstairs_id: 82efa4d5-f84d-4765-a042-3cbb8c544041, session_id: 13fb2768-edc0-4158-a955-c73279bce790, gen: 1 } is now active (read-write)
13370 2023-09-22T23:21:37.702304478Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal,"","timepid""::"47692023-09-22T23:21:37.702319444Z"}
13371 ,"hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4769"}msg"
13372 :"{"msg":"[2] 82efa4d5-f84d-4765-a042-3cbb8c544041 (13fb2768-edc0-4158-a955-c73279bce790) WaitActive WaitActive New ds_transition to WaitActive","v":0,"name":"crucible"[2] 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d (412fdf38-1b7f-416c-b371-8bd0b7f86f94) WaitActive WaitActive New ds_transition to WaitActive",",level"":v"30:0,"name":"crucible","level":30Sep 22 23:21:37.702 INFO Current flush_numbers [0..12]: [0, 0]
13373 ,"Sep 22 23:21:37.702 INFO Connection request from 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 with version 4, task: proc
13374 ,"{time"time"::"""2023-09-22T23:21:37.702379623Z"2023-09-22T23:21:37.702375019Z"msg",,:"""hostname"hostname"::""[2] downstairs client at 127.0.0.1:59938 has UUID 74dd6474-4427-416d-8d80-1f3912278411"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",,"",pid":"4769v"pid"}:
13375 4769:Sep 22 23:21:37.702 INFO upstairs UpstairsConnection { upstairs_id: 8f69534d-528b-4c23-b2c9-ce2dfe832ae1, session_id: 4c909eb0-0138-456e-a20b-76c3a0a91bc8, gen: 1 } connected, version 4, task: proc
13376 0,}"{name":
13377 ""cruciblemsg"":"{,"level":[2] Transition from New to WaitActive"30",Sep 22 23:21:37.702 INFO UpstairsConnection { upstairs_id: 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d, session_id: 412fdf38-1b7f-416c-b371-8bd0b7f86f94, gen: 1 } is now active (read-write)
13378 Sep 22 23:21:37.702 INFO UpstairsConnection { upstairs_id: 82efa4d5-f84d-4765-a042-3cbb8c544041, session_id: 13fb2768-edc0-4158-a955-c73279bce790, gen: 1 } is now active (read-write)
13379 "msg":v"":0,"[2] Transition from New to WaitActive"name":","crucible"v":,,""level"0:time"30:,""name":"crucible"2023-09-22T23:21:37.702441849Z","level",":hostname"30:"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13380 ,"time":"2023-09-22T23:21:37.702465849Z",{"hostname":",""timemsg""ip-10-150-1-55.us-west-2.compute.internal":,"":"pid2023-09-22T23:21:37.702472599Z"":4769,"hostname":"}
13381 ip-10-150-1-55.us-west-2.compute.internal","[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 74dd6474-4427-416d-8d80-1f3912278411, encrypted: true, database_read_version: 1, database_write_version: 1 }"pid":4769{,"v"}:
13382 0","msg"name"::"{"crucible","level":"[2] client is_active_req TRUE, promote! session 13fb2768-edc0-4158-a955-c73279bce790msg"30",":v"":0,"name":"crucible"[2] client is_active_req TRUE, promote! session 412fdf38-1b7f-416c-b371-8bd0b7f86f94",","level"v"::300,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.702524536Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"time"}:"
13383 ,Sep 22 23:21:37.702 INFO Downstairs has completed Negotiation, task: proc
13384 "{time"msg":"":"1c48f237-34b8-4484-b38b-7c6b80300cc8 WaitActive WaitActive WaitActive","2023-09-22T23:21:37.702539716Zv"":,"hostname":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:21:37.702535045Z0,"",pid",:"4769"hostname"name"::""}crucible"
13385 Sep 22 23:21:37.702 INFO Current flush_numbers [0..12]: [0, 0]
13386 ip-10-150-1-55.us-west-2.compute.internal",",level"":pid"30:4769}
13387 ,"time":"2023-09-22T23:21:37.702611438Z","hostname":"2023-09-22T23:21:37.701795204Z"ip-10-150-1-55.us-west-2.compute.internal",",pid"":hostname"4769:"}
13388 ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4769}
13389 "msg":"{"msg"[0] 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 (4c909eb0-0138-456e-a20b-76c3a0a91bc8) New New New ds_transition to WaitActive:"","v"[2] Transition from New to WaitActive":,"0v",":name"0:","crucible"name":","crucible"level":,"30level":30,"time":","2023-09-22T23:21:37.702676545Z"time":","hostname":2023-09-22T23:21:37.70267779Z"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4769,"pid":}4769
13390 }
13391 {"msg"{:"[0] Transition from New to WaitActive"",msg"":v"":0,"name":"crucible"[2] client is_active_req TRUE, promote! session d738ea25-1afc-4d4c-9b80-99c2ada34ff7",","level"v"::300,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.702734136Z","hostname":,""time":"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:21:37.702738319Z"pid":4769,"hostname":}"
13392 ip-10-150-1-55.us-west-2.compute.internal","pid":4769}{
13393 "msg":"{[0] client is_active_req TRUE, promote! session 4c909eb0-0138-456e-a20b-76c3a0a91bc8"",msg":""v":0,"name":"crucible"[0] downstairs client at 127.0.0.1:36647 has UUID b778f7e5-519c-44d5-90ea-a92dbeffe813",",level"":v":Sep 22 23:21:37.702 INFO Downstairs has completed Negotiation, task: proc
13394 30Sep 22 23:21:37.702 INFO Current flush_numbers [0..12]: [0, 0]
13395 0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.702794285Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13396 ,"{time":""2023-09-22T23:21:37.702806229Z"msg":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769[1] 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 (4c909eb0-0138-456e-a20b-76c3a0a91bc8) WaitActive New New ds_transition to WaitActive",}"
13397 v":0,"name":"crucible"{,"level":30"msg":"[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: b778f7e5-519c-44d5-90ea-a92dbeffe813, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,",name"":"time":crucible"","level":2023-09-22T23:21:37.702842373Z"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13398 pid{"msg":4769}
13399 {{"msg":""msgFlush timeout: 0.5"":","v":0,"Sep 22 23:21:37.702 INFO Downstairs has completed Negotiation, task: proc
13400 name"[0] downstairs client at 127.0.0.1:43936 has UUID 2ad3fd00-b9f7-450b-935d-a7c44c290e9e":"crucible",","vlevel""::30"0:","name":"crucible"[1] Transition from New to WaitActive","level",":v":300,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.702995113Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13401 ,,""time":time{"":"2023-09-22T23:21:37.703007472Z""2023-09-22T23:21:37.703002874Z","msg"hostname,":"":hostname"":"7be22e7c-d55a-4790-a835-081c096012f4 active request set","ip-10-150-1-55.us-west-2.compute.internalv"":ip-10-150-1-55.us-west-2.compute.internal0",,""name,":""pid"pid"crucible:"4769:,"4769level"}:}30
13402 
13403 {Sep 22 23:21:37.703 INFO UpstairsConnection { upstairs_id: 7be22e7c-d55a-4790-a835-081c096012f4, session_id: 8fe9d3ab-892b-48e5-a656-2b4564d918d0, gen: 1 } is now active (read-write)
13404 "msg",:"{"time":"2023-09-22T23:21:37.703058195Z""msg":","hostname":"ip-10-150-1-55.us-west-2.compute.internal"[1] client is_active_req TRUE, promote! session 4c909eb0-0138-456e-a20b-76c3a0a91bc8[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 2ad3fd00-b9f7-450b-935d-a7c44c290e9e, encrypted: true, database_read_version: 1, database_write_version: 1 }","",pid"",v":"4769:v"0},":
13405 0name",:""name"crucible":{","crucible"level":,""30level"msg:":30"[0] 7be22e7c-d55a-4790-a835-081c096012f4 looper connected","v":0,"name":"crucible","level":30,",time"":"time":"2023-09-22T23:21:37.703117282Z"2023-09-22T23:21:37.703114711Z",Sep 22 23:21:37.703 INFO Current flush_numbers [0..12]: [0, 0]
13406 "hostname",,:"""time"hostname"::ip-10-150-1-55.us-west-2.compute.internal""","pid"2023-09-22T23:21:37.703125675Zip-10-150-1-55.us-west-2.compute.internal:""4769,,""pid"hostname"}:
13407 :"4769Sep 22 23:21:37.701 INFO f712941d-0577-43e4-8a2e-7814ec270c09 WaitActive WaitActive WaitActive
13408 ip-10-150-1-55.us-west-2.compute.internal",}{
13409 ""pid":msg"4769:",{"looper":"0"}
13410 [2] 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 (4c909eb0-0138-456e-a20b-76c3a0a91bc8) WaitActive WaitActive New ds_transition to WaitActive""msg,"":v":{"0,"name":""crucible"msg"69c4c5e1-2fd5-4777-bf6a-fcd75618f55d WaitActive WaitActive WaitActive",:","level"":v[0] Proc runs for 127.0.0.1:54884 in state New""30,":v"0:,"0name",:""Sep 22 23:21:37.703 INFO UpstairsConnection { upstairs_id: 7be22e7c-d55a-4790-a835-081c096012f4, session_id: 8fe9d3ab-892b-48e5-a656-2b4564d918d0, gen: 1 } is now active (read-write)
13411 crucible"name":","crucible"level",:"30level":30,"time":"2023-09-22T23:21:37.703208937Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13412 ,",{time":""time""2023-09-22T23:21:37.703225997Z:""msg",":"2023-09-22T23:21:37.703224696Z"hostname":",[2] Transition from New to WaitActive""ip-10-150-1-55.us-west-2.compute.internal"hostname,,"""v"::pid"":04769,"ip-10-150-1-55.us-west-2.compute.internal"name,}"
13413 :""pid"crucible":,4769"Sep 22 23:21:37.703 INFO Current flush_numbers [0..12]: [0, 0]
13414 {level"Sep 22 23:21:37.703 INFO Current flush_numbers [0..12]: [0, 0]
13415 }:
13416 "30msg":"{[1] 7be22e7c-d55a-4790-a835-081c096012f4 looper connected","v":0","msg"name"::""crucible","level":30[1] downstairs client at 127.0.0.1:39320 has UUID ab7c04fa-de73-4af6-8f95-d0dc6939a823","v":,0","timename""::""crucible"2023-09-22T23:21:37.703295275Z,"Sep 22 23:21:37.703 INFO Downstairs has completed Negotiation, task: proc
13417 ,""hostname",":"level"time"::Sep 22 23:21:37.703 INFO UpstairsConnection { upstairs_id: 7be22e7c-d55a-4790-a835-081c096012f4, session_id: 8fe9d3ab-892b-48e5-a656-2b4564d918d0, gen: 1 } is now active (read-write)
13418 30"ip-10-150-1-55.us-west-2.compute.internal","2023-09-22T23:21:37.70330854Z"pid":,4769"hostname":"}
13419 ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"{looper":",1"""}msg"
13420 :time"":"Sep 22 23:21:37.703 INFO Current flush_numbers [0..12]: [0, 0]
13421 {[2] client is_active_req TRUE, promote! session 4c909eb0-0138-456e-a20b-76c3a0a91bc8"2023-09-22T23:21:37.703333876Z"",msg"":,v""":hostname"0:[1] Proc runs for 127.0.0.1:38174 in state New","","name"vip-10-150-1-55.us-west-2.compute.internal:""",crucible":",0"pidlevel","::30"4769name":"crucible}"
13422 ,"level":30{"msg":,""time":"2023-09-22T23:21:37.703389033Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ab7c04fa-de73-4af6-8f95-d0dc6939a823, encrypted: true, database_read_version: 1, database_write_version: 1 }",}","
13423 time"v:"":02023-09-22T23:21:37.703398721Z","name":,""hostname":crucible"","level":ip-10-150-1-55.us-west-2.compute.internal"30,"pid":4769}
13424 {"msg":"[2] 7be22e7c-d55a-4790-a835-081c096012f4 looper connected","v":0,",name"":"time"crucible":,""level":2023-09-22T23:21:37.70343473Z"30,"hostname":"Sep 22 23:21:37.703 INFO Downstairs has completed Negotiation, task: proc
13425 Sep 22 23:21:37.703 INFO Downstairs has completed Negotiation, task: proc
13426 ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13427 ,"time":"2023-09-22T23:21:37.703460464Z"{,"hostname":""msg"ip-10-150-1-55.us-west-2.compute.internal":","pid":4769,"69c4c5e1-2fd5-4777-bf6a-fcd75618f55d WaitActive WaitActive WaitActive"looper":",2""v"}:
13428 0,"name":"crucible"{,"level":30"msg":"[2] Proc runs for 127.0.0.1:34655 in state New","v":0,"name":"crucible","level":30{,"time":"2023-09-22T23:21:37.703505405Z"","msghostname""::""ip-10-150-1-55.us-west-2.compute.internal",","timepid""::Sep 22 23:21:37.702 INFO UpstairsConnection { upstairs_id: 8f69534d-528b-4c23-b2c9-ce2dfe832ae1, session_id: 4c909eb0-0138-456e-a20b-76c3a0a91bc8, gen: 1 } is now active (read-write)
13429 "[0] 1c48f237-34b8-4484-b38b-7c6b80300cc8 (126b8002-1d8d-46c3-951e-15087f60a7c9) WaitActive WaitActive WaitActive ds_transition to WaitQuorum"2023-09-22T23:21:37.703514929ZSep 22 23:21:37.703 INFO Downstairs has completed Negotiation, task: proc
13430 ",,""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13431 {"msg":"[0] 7be22e7c-d55a-4790-a835-081c096012f4 (8fe9d3ab-892b-48e5-a656-2b4564d918d0) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.703580717Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","Sep 22 23:21:37.701 INFO Connection request from dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 with version 4, task: proc
13432 pid,"time":"2023-09-22T23:21:37.703591587Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13433 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30":4769Sep 22 23:21:37.703 INFO upstairs UpstairsConnection { upstairs_id: dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9, session_id: 9165874c-5197-4919-861e-2430574d8700, gen: 1 } connected, version 4, task: proc
13434 }
13435 ,{"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30Sep 22 23:21:37.703 INFO UpstairsConnection { upstairs_id: 8f69534d-528b-4c23-b2c9-ce2dfe832ae1, session_id: 4c909eb0-0138-456e-a20b-76c3a0a91bc8, gen: 1 } is now active (read-write)
13436 ,"time"":"time":"2023-09-22T23:21:37.70366437Z",2023-09-22T23:21:37.703638612Z""hostname":","hostname":Sep 22 23:21:37.703 INFO Current flush_numbers [0..12]: [0, 0]
13437 ip-10-150-1-55.us-west-2.compute.internal"Sep 22 23:21:37.703 INFO Current flush_numbers [0..12]: [0, 0]
13438 ","ip-10-150-1-55.us-west-2.compute.internalpid":4769}
13439 {"msg":"[0] client is_active_req TRUE, promote! session 8fe9d3ab-892b-48e5-a656-2b4564d918d0","v":0,"name":"crucible","level":30","pid":4769}
13440 ,{""msg":"[0] new RM replaced this: None","v":0,"name":"crucible","level":40,"time":"time":"2023-09-22T23:21:37.703754703Z",2023-09-22T23:21:37.703725413Z""hostname":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pidip-10-150-1-55.us-west-2.compute.internal"":,"4769pid":4769}
13441 }
13442 Sep 22 23:21:37.703 INFO UpstairsConnection { upstairs_id: 8f69534d-528b-4c23-b2c9-ce2dfe832ae1, session_id: 4c909eb0-0138-456e-a20b-76c3a0a91bc8, gen: 1 } is now active (read-write)
13443 {"msg":"[1] 7be22e7c-d55a-4790-a835-081c096012f4 (8fe9d3ab-892b-48e5-a656-2b4564d918d0) WaitActive New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,Sep 22 23:21:37.703 INFO Current flush_numbers [0..12]: [0, 0]
13444 "time":"2023-09-22T23:21:37.703815115Z","hostname":"4769ip-10-150-1-55.us-west-2.compute.internal","pid":4769}}
13445 
13446 {{""msgmsg"Sep 22 23:21:37.703 INFO Downstairs has completed Negotiation, task: proc
13447 Sep 22 23:21:37.703 INFO Downstairs has completed Negotiation, task: proc
13448 "::""[1] Transition from New to WaitActive","v":[2] downstairs client at 127.0.0.1:57826 has UUID 440ce80c-c0c9-48b9-ba41-98710683ab87"0,"name",:""v"crucible":,"0level",:"30name":"crucible","level":30,"time":"2023-09-22T23:21:37.703883983Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"time,""pid":":47692023-09-22T23:21:37.703887713Z"}
13449 ,"hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":4769"msg":}"
13450 [1] client is_active_req TRUE, promote! session 8fe9d3ab-892b-48e5-a656-2b4564d918d0","v":0,"name"{:"crucible","level":"30msg":"[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 440ce80c-c0c9-48b9-ba41-98710683ab87, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible","level",":time":"302023-09-22T23:21:37.703936771Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13451 {,""msg":"time":"2023-09-22T23:21:37.70395514Z","hostname":"[2] 7be22e7c-d55a-4790-a835-081c096012f4 (8fe9d3ab-892b-48e5-a656-2b4564d918d0) WaitActive WaitActive New ds_transition to WaitActive"ip-10-150-1-55.us-west-2.compute.internal",",v"":pid"0:,4769"name":"}
13452 crucible","level":30{"msg":"69c4c5e1-2fd5-4777-bf6a-fcd75618f55d WaitActive WaitActive WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.70399868Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13453 Sep 22 23:21:37.704 INFO Downstairs has completed Negotiation, task: proc
13454 ,{"time":""msg":2023-09-22T23:21:37.704012705Z"","[2] Transition from New to WaitActive"hostname":","v":0ip-10-150-1-55.us-west-2.compute.internal",","namepid":"4769:"crucible"},"
13455 level":30,"time":"2023-09-22T23:21:37.704052766Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","Sep 22 23:21:37.704 INFO Current flush_numbers [0..12]: [0, 0]
13456 Sep 22 23:21:37.704 INFO Current flush_numbers [0..12]: [0, 0]
13457 pid":4769}
13458 {"msg":"[2] client is_active_req TRUE, promote! session 8fe9d3ab-892b-48e5-a656-2b4564d918d0","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.704104203Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":Sep 22 23:21:37.704 INFO UpstairsConnection { upstairs_id: dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9, session_id: 9165874c-5197-4919-861e-2430574d8700, gen: 1 } is now active (read-write)
13459 4769}
13460 ,"time":"2023-09-22T23:21:37.702862262Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
134612023-09-22T23:21:37.704ZINFOcrucible: 8e88d9c1-84f3-4981-affe-0c1310d04226 WaitActive WaitActive WaitActive
13462 {"msg":"[1] downstairs client at 127.0.0.1:40751 has UUID 4e7888c2-7beb-4f8a-a2f2-dd1592f0ed78","v":0,"name":"crucible","level":30Sep 22 23:21:37.704 INFO Downstairs has completed Negotiation, task: proc
13463 Sep 22 23:21:37.704 INFO Downstairs has completed Negotiation, task: proc
13464 ,Sep 22 23:21:37.704 INFO UpstairsConnection { upstairs_id: dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9, session_id: 9165874c-5197-4919-861e-2430574d8700, gen: 1 } is now active (read-write)
13465 Sep 22 23:21:37.704 INFO Current flush_numbers [0..12]: [0, 0]
13466 "time":"2023-09-22T23:21:37.704233977Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
134672023-09-22T23:21:37.704ZINFOcrucible: [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 4e7888c2-7beb-4f8a-a2f2-dd1592f0ed78, encrypted: true, database_read_version: 1, database_write_version: 1 }
134682023-09-22T23:21:37.704ZINFOcrucible: 8e88d9c1-84f3-4981-affe-0c1310d04226 WaitActive WaitActive WaitActive
13469 {"msg":"[2] downstairs client at 127.0.0.1:42674 has UUID 7bf7d359-28ba-4346-a3ee-541f5bc5a1c0","v":0,"name":"crucible","level":304769,"looper":"1"}
13470 ,"time":"2023-09-22T23:21:37.704389982Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid":"4769msg":"}
13471 [1] Proc runs for 127.0.0.1:62341 in state New","v":0{,"name":"crucible"",msg":""level":30[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 7bf7d359-28ba-4346-a3ee-541f5bc5a1c0, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible","level":30Sep 22 23:21:37.704 INFO Downstairs has completed Negotiation, task: proc
13472 ,"time":",2023-09-22T23:21:37.704434541Z""time":","hostname":"2023-09-22T23:21:37.70444842Z","hostname"ip-10-150-1-55.us-west-2.compute.internal":","pid":4769ip-10-150-1-55.us-west-2.compute.internal","}pid
13473 ":4769}
13474 {"{msg":""msg"[2] dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 looper connected":","v":0,"name":8e88d9c1-84f3-4981-affe-0c1310d04226 WaitActive WaitActive WaitActive"",crucible""v",:"0level,""name"::"30crucible","level":30,","time":time"":"2023-09-22T23:21:37.704505431Z"2023-09-22T23:21:37.704502787Z",","hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",",pid"":pid"4769:4769}
13475 ,"looper":"2"}
13476 {{"{msg"":"msg"":"msg":"[2] Proc runs for 127.0.0.1:51713 in state New",[0] 8e88d9c1-84f3-4981-affe-0c1310d04226 (d738ea25-1afc-4d4c-9b80-99c2ada34ff7) WaitActive WaitActive WaitActive ds_transition to WaitQuorum""v","[0] 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d (412fdf38-1b7f-416c-b371-8bd0b7f86f94) WaitActive WaitActive WaitActive ds_transition to WaitQuorumv:""0:,",0name,""v""name":"::crucible"0",,crucible""",level"":name"level"::"3030crucible","level":30,"time":"2023-09-22T23:21:37.704592437Z","hostname":",,""ip-10-150-1-55.us-west-2.compute.internal"timetime"",::""pid"":2023-09-22T23:21:37.704595872Z47692023-09-22T23:21:37.704593086Z""},
13477 ,""hostname"hostname":":"{ip-10-150-1-55.us-west-2.compute.internal",ip-10-150-1-55.us-west-2.compute.internal""",msg""pid"pid:":":47694769[0] Transition from WaitActive to WaitQuorum"}}
13478 
13479 ,Sep 22 23:21:37.704 INFO [0] f712941d-0577-43e4-8a2e-7814ec270c09 (512f09eb-7e91-421e-9fa9-f1bb0acbe6ae) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
13480 "v{"{:0,""name":"msg"crucible:"",""msg"level"::"[0] Transition from WaitActive to WaitQuorum"Sep 22 23:21:37.704 INFO [0] Transition from WaitActive to WaitQuorum
13481 30Sep 22 23:21:37.704 INFO Current flush_numbers [0..12]: [0, 0]
13482 [0] dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 (9165874c-5197-4919-861e-2430574d8700) New New New ds_transition to WaitActive",,""v"v:",0":,"timename":0"",crucible":""nameSep 22 23:21:37.704 WARN [0] new RM replaced this: None
13483 2023-09-22T23:21:37.704680405Z","":level","":crucible"30hostname",:""level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13484 {,""msg":time""Sep 22 23:21:37.704 INFO Current flush_numbers [0..12]: [0, 0]
13485 :",[0] new RM replaced this: None""2023-09-22T23:21:37.704714149Z",time,"":""v"hostname2023-09-22T23:21:37.704718329Z""::0",,""nameip-10-150-1-55.us-west-2.compute.internalhostname":""",":pid"ip-10-150-1-55.us-west-2.compute.internal":",4769"crucible"pid"},:
13486 4769"level":}40
13487 {"msg"{:"[0] new RM replaced this: None",""msgv""::"Sep 22 23:21:37.704 INFO [0] Starts reconcile loop
13488 ,"[0] Transition from New to WaitActive"0time,"":"v",:2023-09-22T23:21:37.704770848Z""0name",,":""name"hostname:"crucible"":crucible",",""level"ip-10-150-1-55.us-west-2.compute.internal:"40level",:"30pid":4769}
13489 {"msg":"[0] Starts reconcile loop","v":,"0time",:,"""name"time"2023-09-22T23:21:37.704814844Z::"""crucible",,2023-09-22T23:21:37.704817293Z"""hostname"level:,""":hostname"30ip-10-150-1-55.us-west-2.compute.internal":,""pid":4769ip-10-150-1-55.us-west-2.compute.internal","}pid"
13490 :4769}
13491 ,{"Sep 22 23:21:37.704 INFO Downstairs has completed Negotiation, task: proc
13492 {time""msg"Sep 22 23:21:37.704 INFO [1] f712941d-0577-43e4-8a2e-7814ec270c09 (512f09eb-7e91-421e-9fa9-f1bb0acbe6ae) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
13493 "::""msg2023-09-22T23:21:37.704852322Z"[0] Starts reconcile loop",,""v"hostname":":0:,"""name":Sep 22 23:21:37.704 INFO [1] Transition from WaitActive to WaitQuorum
13494 ip-10-150-1-55.us-west-2.compute.internal"",[0] client is_active_req TRUE, promote! session 9165874c-5197-4919-861e-2430574d8700""cruciblepid,""":,4769v"":}level
13495 Sep 22 23:21:37.704 WARN [1] new RM replaced this: None
13496 0"{:,30Sep 22 23:21:37.704 INFO Downstairs has completed Negotiation, task: proc
13497 ""msg"name"::""crucible","level":30,"[1] 8e88d9c1-84f3-4981-affe-0c1310d04226 (d738ea25-1afc-4d4c-9b80-99c2ada34ff7) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorumtime"":","v":2023-09-22T23:21:37.704920297Z"0,","name":"hostname":crucibleSep 22 23:21:37.704 INFO [1] Starts reconcile loop
13498 ",""level":30ip-10-150-1-55.us-west-2.compute.internal",,""time"pid"::"47692023-09-22T23:21:37.704933687Z"}
13499 ,"hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal"{,2023-09-22T23:21:37.704950472Z""pid","":hostname"msg"4769::""}
13500 ip-10-150-1-55.us-west-2.compute.internal",[1] 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d (412fdf38-1b7f-416c-b371-8bd0b7f86f94) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum""pid",:"4769{v":}0
13501 ,""msg":"name":"{crucible","level":30"msg"[1] dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 (9165874c-5197-4919-861e-2430574d8700) WaitActive New New ds_transition to WaitActive":","v":0[1] Transition from WaitActive to WaitQuorum",",name"":v"":crucible0",",name"":"level"crucible":,","level"30time"::"{302023-09-22T23:21:37.705002119Z","hostname":""msg":"Sep 22 23:21:37.705 INFO [2] f712941d-0577-43e4-8a2e-7814ec270c09 (512f09eb-7e91-421e-9fa9-f1bb0acbe6ae) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
13502 [0] Starts reconcile loop",ip-10-150-1-55.us-west-2.compute.internal,""v"time:":,"0",""2023-09-22T23:21:37.705021001Ztime"name",,"":"hostname:"""Sep 22 23:21:37.705 INFO [2] Transition from WaitActive to WaitQuorum
13503 2023-09-22T23:21:37.705023806Z"crucible:"",pid"",ip-10-150-1-55.us-west-2.compute.internallevel"",:"":pid30":hostname"4769:Sep 22 23:21:37.705 WARN [2] new RM replaced this: None
13504 "ip-10-150-1-55.us-west-2.compute.internal"},4769"
13505 pid},"
13506 :{"4769time":""}msg2023-09-22T23:21:37.705064238Z""{
13507 :,""Sep 22 23:21:37.705 INFO [2] Starts reconcile loop
13508 {[1] Transition from New to WaitActivehostname"""":,"msg"msg:"v"""::ip-10-150-1-55.us-west-2.compute.internal"[1] Transition from WaitActive to WaitQuorum""0,",[1] new RM replaced this: None",pid","":"4769namev""}:
13509 "v:crucible0"{,""":,0msglevel""::",30""name":name"":"crucible"crucible",[1] 1c48f237-34b8-4484-b38b-7c6b80300cc8 (126b8002-1d8d-46c3-951e-15087f60a7c9) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum","",levellevel"","v"time:"0:,":""2023-09-22T23:21:37.70513402Zname"":Sep 22 23:21:37.705 INFO Current flush_numbers [0..12]: [0, 0]
13510 30Sep 22 23:21:37.705 INFO [0] 127.0.0.1:46295 task reports connection:true
13511 40:,""cruciblehostname"":,""level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13512 ,"time":{"",msg",time""::"""2023-09-22T23:21:37.705157125Z"time":,2023-09-22T23:21:37.705167378Z""[1] client is_active_req TRUE, promote! session 9165874c-5197-4919-861e-2430574d8700",",""Sep 22 23:21:37.705 INFO f712941d-0577-43e4-8a2e-7814ec270c09 WaitQuorum WaitQuorum WaitQuorum
13513 2023-09-22T23:21:37.705162779Z"hostname,vhostname""""::"0:",hostname"ip-10-150-1-55.us-west-2.compute.internal""name:ip-10-150-1-55.us-west-2.compute.internal"",:,""""pid"pidcrucible""ip-10-150-1-55.us-west-2.compute.internal",:"4769:level}4769
13514 "}Sep 22 23:21:37.705 INFO [0]R flush_numbers: [0, 0]
13515 ,"
13516 pid{:"30":msg"4769{:"}
13517 Sep 22 23:21:37.705 INFO [0]R generation: [0, 0]
13518 "[1] Transition from WaitActive to WaitQuorum,""{msg"time,"":v"":"2023-09-22T23:21:37.705231984Z:"msg0,,""""Sep 22 23:21:37.705 INFO [0]R dirty: [false, false]
13519 :"[1] new RM replaced this: None"hostnamename""::"",[1] Starts reconcile loopcrucibleip-10-150-1-55.us-west-2.compute.internal"",,"""pidlevel"":"4769:Sep 22 23:21:37.705 INFO [1]R flush_numbers: [0, 0]
13520 ,v"v""}::
13521 3000,{","name""msg"Sep 22 23:21:37.705 INFO [1]R generation: [0, 0]
13522 name":,:""time"::"""crucible"crucible",2023-09-22T23:21:37.705280215Z,[2] dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 (9165874c-5197-4919-861e-2430574d8700) WaitActive WaitActive New ds_transition to WaitActive""",,"Sep 22 23:21:37.705 INFO [1]R dirty: [false, false]
13523 level"""v"hostname:0",:""level"nameip-10-150-1-55.us-west-2.compute.internal::""4030:,""pidcrucible"":4769,"level}"
13524 :Sep 22 23:21:37.705 INFO Downstairs has completed Negotiation, task: proc
13525 Sep 22 23:21:37.705 INFO [2]R flush_numbers: [0, 0]
13526 ,","timetime"{30:""":msg"2023-09-22T23:21:37.705314865Z"":"2023-09-22T23:21:37.705315163ZSep 22 23:21:37.705 INFO [2]R generation: [0, 0]
13527 ",,[1] new RM replaced this: None""time",:"","vhostname"2023-09-22T23:21:37.705335771Z"":",0":Sep 22 23:21:37.705 INFO [2]R dirty: [false, false]
13528 "hostname,"hostname"":name"ip-10-150-1-55.us-west-2.compute.internal":"ip-10-150-1-55.us-west-2.compute.internal"":,"",{cruciblepid"",:"4769"Sep 22 23:21:37.705 INFO Max found gen is 1
13529 pid":}level
13530 ""ip-10-150-1-55.us-west-2.compute.internal{:4769msg"40msg":}"""
13531 Sep 22 23:21:37.705 INFO Generation requested: 1 >= found:1
13532 {,",[2] Transition from New to WaitActive""time,"":msgv:"""""2023-09-22T23:21:37.705387142Z::"0,"",hostnameSep 22 23:21:37.705 INFO Next flush: 1
13533 pid[0] downstairs client at 127.0.0.1:39150 has UUID ea1b6b61-6132-4ae7-8fec-05be62de606d"":name"""[2] 8e88d9c1-84f3-4981-affe-0c1310d04226 (d738ea25-1afc-4d4c-9b80-99c2ada34ff7) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum":ip-10-150-1-55.us-west-2.compute.internal"":",,crucible""",pid""level,v":47690":,Sep 22 23:21:37.705 INFO All extents match
13534 "4769name""}::}v30
13535 
13536 "Sep 22 23:21:37.705 INFO No downstairs repair required
13537 "{{,{"crucible"""",msgtime"""::":level[1] Starts reconcile loop"msgmsg""::"2023-09-22T23:21:37.705449062ZSep 22 23:21:37.705 INFO No initial repair work was required
13538 "":[1] Starts reconcile loop30","","0,hostname,"":v"""[0] downstairs client at 127.0.0.1:54884 has UUID f2d164d9-9cbb-47eb-a838-b7f17a4d0411v""Sep 22 23:21:37.705 INFO Set Downstairs and Upstairs active
13539 ",ip-10-150-1-55.us-west-2.compute.internal",::"0,,""pidname":"4769:0",time""}crucible
13540 ":v"":"0name,name"""name2023-09-22T23:21:37.705483726Z:":"":crucible,,""crucible"{levelSep 22 23:21:37.705 INFO f712941d-0577-43e4-8a2e-7814ec270c09 is now active with session: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae
13541 "hostname",:"":msg30"","""cruciblelevel"",:ip-10-150-1-55.us-west-2.compute.internal""level:",,30"Sep 22 23:21:37.705 INFO f712941d-0577-43e4-8a2e-7814ec270c09 Set Active after no repair
13542 "pid"[2] client is_active_req TRUE, promote! session 9165874c-5197-4919-861e-2430574d8700"leveltime"":"4769::30}"
13543 2023-09-22T23:21:37.705537337Z,"",v"""{Sep 22 23:21:37.705 INFO Notify all downstairs, region set compare is done.
13544 ",:msghostname:"0:,""time"":"time:","""2023-09-22T23:21:37.705549008Z:""[2] Transition from WaitActive to WaitQuorum"nameip-10-150-1-55.us-west-2.compute.internal"",:",30pid""":v"47692023-09-22T23:21:37.705561027Z,}"":
13545 crucible"Sep 22 23:21:37.705 INFO Set check for repair
13546 0,,{"hostname,"""hostname:name":""",crucible""ip-10-150-1-55.us-west-2.compute.internal:"time",:"",""ip-10-150-1-55.us-west-2.compute.internal""pid,"levelpid"":msglevelSep 22 23:21:37.705 INFO [1] 127.0.0.1:43385 task reports connection:true
13547 ":2023-09-22T23:21:37.705590544Z304769:""::30""}4769
13548 }[0] 127.0.0.1:46967 task reports connection:true"
13549 {,,""The guest has finished waiting for activation
13550 ,,Sep 22 23:21:37.705 INFO f712941d-0577-43e4-8a2e-7814ec270c09 Active Active Active
13551 "{time"vtime"""msghostname"msg"":::"""::""":2023-09-22T23:21:37.705638044Z0",2023-09-22T23:21:37.705635209Z",name""hostname:"""crucible:"","ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internallevel"[2] 69c4c5e1-2fd5-4777-bf6a-fcd75618f55d (412fdf38-1b7f-416c-b371-8bd0b7f86f94) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f2d164d9-9cbb-47eb-a838-b7f17a4d0411, encrypted: true, database_read_version: 1, database_write_version: 1 }"",,"":Sep 22 23:21:37.705 INFO Set check for repair
13552 ""30pid","pidhostname"",,""::":4769,vv"""::time}"
13553 :"ip-10-150-1-55.us-west-2.compute.internal"00,,""name",name:"":2023-09-22T23:21:37.70569801Z"crucible""",crucible"pid4769Sep 22 23:21:37.705 INFO [2] 127.0.0.1:64157 task reports connection:true
13554 ""level",,:4769":level30"":}30
13555 hostname"}:"
13556 ip-10-150-1-55.us-west-2.compute.internal"{,,,""time"time:"":"Sep 22 23:21:37.705 INFO f712941d-0577-43e4-8a2e-7814ec270c09 Active Active Active
13557 "msg"2023-09-22T23:21:37.705743687Z:2023-09-22T23:21:37.705740717Z""""Sep 22 23:21:37.704 INFO UpstairsConnection { upstairs_id: dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9, session_id: 9165874c-5197-4919-861e-2430574d8700, gen: 1 } is now active (read-write)
13558 pid,,{[2] new RM replaced this: None""":4769hostnamehostname""::"Sep 22 23:21:37.705 INFO Set check for repair
13559 ",}ip-10-150-1-55.us-west-2.compute.internal
13560 "","""ip-10-150-1-55.us-west-2.compute.internalmsg{""pid,"":pid4769""}:
13561 v":msg"::"0{4769,"""}msg
13562 "name"{::1c48f237-34b8-4484-b38b-7c6b80300cc8 WaitQuorum WaitQuorum WaitActive""""msg",":crucible""7be22e7c-d55a-4790-a835-081c096012f4 WaitActive WaitActive WaitActive","v"[2] Transition from WaitActive to WaitQuorum,":,""vv""::00,,""name"[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ea1b6b61-6132-4ae7-8fec-05be62de606d, encrypted: true, database_read_version: 1, database_write_version: 1 }0level:",name"""crucible"":name",:":"crucible""levelcrucible"":40,,30"",levellevel""Sep 22 23:21:37.705 INFO [0] received reconcile message
13563 ,::30",""vtimetime30"":",:"2023-09-22T23:21:37.705856283Z"",time"""hostname:":"":2023-09-22T23:21:37.705855201Z"02023-09-22T23:21:37.705871698Zip-10-150-1-55.us-west-2.compute.internal,,""",name,""pid""hostname:"4769":hostname:}"
13564 ip-10-150-1-55.us-west-2.compute.internal"Sep 22 23:21:37.705 INFO [0] All repairs completed, exit
13565 ":,""{,crucible""ip-10-150-1-55.us-west-2.compute.internal"pidmsg""::"""time",[2] new RM replaced this: None4769"}pid"
13566 "::,{4769""",msg2023-09-22T23:21:37.705878796Z"v:"":""0}Sep 22 23:21:37.705 INFO [0] Starts cmd_loop
13567 
13568 ,level"[1] downstairs client at 127.0.0.1:38174 has UUID 4b784146-9bfd-4506-a702-f66a5880a2a3,"""name{hostname,"":msg:"""cruciblev:""":,0":",level""30"name:"ip-10-150-1-55.us-west-2.compute.internal40:""[2] Starts reconcile loop",crucible",",""pid"v"level:":0:,,"30time"":name":"4769",2023-09-22T23:21:37.705965403Z"crucible}","",time,"":"hostname""
13569 :2023-09-22T23:21:37.705978977Z""level,"time"::30ip-10-150-1-55.us-west-2.compute.internal"""hostname,"":pid""{:2023-09-22T23:21:37.70596311Zip-10-150-1-55.us-west-2.compute.internal4769"",}"
13570 pid","{:Sep 22 23:21:37.706 INFO [1] received reconcile message
13571 ,""timemsg"4769"msg}:
13572 "hostname:""{:""[2] Starts reconcile loop""Waiting for 1 more clients to be ready,msg""v"":"Sep 22 23:21:37.706 INFO [1] All repairs completed, exit
13573 2023-09-22T23:21:37.706006075Z",:"0,"",vhostname""":"name"::[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 4b784146-9bfd-4506-a702-f66a5880a2a3, encrypted: true, database_read_version: 1, database_write_version: 1 }":crucible""0ip-10-150-1-55.us-west-2.compute.internal",",,"ip-10-150-1-55.us-west-2.compute.internal"v",level,""pidname"""::030":Sep 22 23:21:37.706 INFO [1] Starts cmd_loop
13574 "",cruciblepid"",name"":::47694769time""}:crucible"
13575 {{2023-09-22T23:21:37.706079561Z"msg":"","hostname":"[0] downstairs client at 127.0.0.1:34554 has UUID a4638f9d-a209-4281-803d-046dfafa8f18","vip-10-150-1-55.us-west-2.compute.internal"":,0",pid"":name":"4769crucible"},"
13576 levelSep 22 23:21:37.706 INFO [2] received reconcile message
13577 "{:""30msg"msg"::""82efa4d5-f84d-4765-a042-3cbb8c544041 WaitActive WaitActive WaitActive","v":0,"name":"crucible","level":30Sep 22 23:21:37.706 INFO [2] All repairs completed, exit
13578 ,"time":"2023-09-22T23:21:37.706168943Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13579 {"msg":"[1] downstairs client at 127.0.0.1:64794 has UUID 51f6c6d4-74e6-40e9-8073-39006e8120ae","v":0,"name":"crucible",""level":30,"",,"level""time:level""30:[0] 127.0.0.1:43936 task reports connection:true:""30,"v2023-09-22T23:21:37.7061952Z"":0,,""name"hostname:"":,"crucible"time"":","ip-10-150-1-55.us-west-2.compute.internal"level",:"30pid"2023-09-22T23:21:37.70621476Z":,4769"hostname"}:
13580 ",,""timetime""::"ip-10-150-1-55.us-west-2.compute.internal""2023-09-22T23:21:37.706223325Z",{",2023-09-22T23:21:37.706222303Z""time""msg:,,hostname"""hostname"":""pid2023-09-22T23:21:37.706232308Z:""ip-10-150-1-55.us-west-2.compute.internal:"","",ip-10-150-1-55.us-west-2.compute.internal""pid,":"pid:"47694769:[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a4638f9d-a209-4281-803d-046dfafa8f18, encrypted: true, database_read_version: 1, database_write_version: 1 }"hostname,"":}v"}"
13581 4769
13582 }{
13583 "ip-10-150-1-55.us-west-2.compute.internalmsg":{{"0,,""":namepid""::{Sep 22 23:21:37.706 INFO Current flush_numbers [0..12]: [0, 0]
13584 "msgSep 22 23:21:37.706 INFO [2] Starts cmd_loop
13585 ":4769}"
13586 [0] downstairs client at 127.0.0.1:63497 has UUID f652e01a-1c15-4c9f-9d6f-06b9109bda2e","v"{:0",""cruciblename""msg"::",""crucible"level":69c4c5e1-2fd5-4777-bf6a-fcd75618f55d WaitQuorum WaitQuorum WaitQuorum"30,","level"v"::030,"name":"crucible",""level":30Set check for repair","v":0,"name":"crucible"msg,,"""level"time::,"":30time""":2023-09-22T23:21:37.706400458Z"",","2023-09-22T23:21:37.706410052Z"hostname",:""time"hostname":"ip-10-150-1-55.us-west-2.compute.internal:"","2023-09-22T23:21:37.70640568Zpid"ip-10-150-1-55.us-west-2.compute.internal:"4769"[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 51f6c6d4-74e6-40e9-8073-39006e8120ae, encrypted: true, database_read_version: 1, database_write_version: 1 },},"
13587 pid"""hostname,{:",4769"":"}msg
13588 "v""The guest has finished waiting for activation
13589 :time{":":msg""0:"",ip-10-150-1-55.us-west-2.compute.internal[0]R flush_numbers: [0, 0]8f69534d-528b-4c23-b2c9-ce2dfe832ae1 WaitActive WaitActive WaitActive""""2023-09-22T23:21:37.706425301Z",,"name",v""v:"0:,0,,":""name"Sep 22 23:21:37.706 INFO Downstairs has completed Negotiation, task: proc
13590 pidhostname":""crucible:"4769""name:"":},"cruciblecrucible""ip-10-150-1-55.us-west-2.compute.internal,"level",
13591 "":,""levellevel""::3030{30pid":4769}"
13592 msg":",,""timetime""::""{2023-09-22T23:21:37.706518303Z2023-09-22T23:21:37.706518172Z"",,""hostname"hostname""::"msg[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f652e01a-1c15-4c9f-9d6f-06b9109bda2e, encrypted: true, database_read_version: 1, database_write_version: 1 }"":ip-10-150-1-55.us-west-2.compute.internal""",,"ip-10-150-1-55.us-west-2.compute.internal,[1] 127.0.0.1:55221 task reports connection:true"time""",pid""pid:"4769:v}","
13593 4769"}v:{
13594 ""msg"::"2023-09-22T23:21:37.706520126Z0:"{",0","[1] downstairs client at 127.0.0.1:59903 has UUID f9162fd6-864f-4c42-be11-73fe673d33ac"msg,name"name""":::""",crucible""[0]R generation: [0, 0]""crucible"v,"":,hostname"0v,"":"0name,"":name""level:crucible":,:""30"crucible,"",""levellevel""::3030level"ip-10-150-1-55.us-west-2.compute.internal":,"30pid":4769}
13595 ,,""timetime""::"","2023-09-22T23:21:37.706610814Z2023-09-22T23:21:37.706610705Z"time"{,"":,hostname"":hostname""":"ip-10-150-1-55.us-west-2.compute.internal"msg",2023-09-22T23:21:37.706604995Zip-10-150-1-55.us-west-2.compute.internal""",,""pid,:"pid:"4769""}"time
13596 :hostname"{":"4769msg"82efa4d5-f84d-4765-a042-3cbb8c544041 WaitActive WaitActive WaitActive}:""
13597 :,ip-10-150-1-55.us-west-2.compute.internal"{","[0]R dirty: [false, false]""pid"",""msgv"":"v":::2023-09-22T23:21:37.706614481Z04769,""0}name",[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f9162fd6-864f-4c42-be11-73fe673d33ac, encrypted: true, database_read_version: 1, database_write_version: 1 }""
13598 :name,""v:"":"{0crucible",,""name",level""::"30"cruciblemsghostname"":crucible"":,""ip-10-150-1-55.us-west-2.compute.internal"",level",time"":,1c48f237-34b8-4484-b38b-7c6b80300cc8 WaitQuorum WaitQuorum WaitActive"30:Sep 22 23:21:37.706 INFO Current flush_numbers [0..12]: [0, 0]
13599 "pid"",:level4769"",v""2023-09-22T23:21:37.706700295Z"time"},
13600 :""0:hostname30,:"":""name"2023-09-22T23:21:37.706716406Z:"ip-10-150-1-55.us-west-2.compute.internal,"","hostname""pid:"":crucible"4769,"ip-10-150-1-55.us-west-2.compute.internal}level""
13601 ,:"30{pid":4769"}
13602 msg,"{":""msg"time":"[1]R flush_numbers: [0, 0]":","v":2023-09-22T23:21:37.706742126Z"8f69534d-528b-4c23-b2c9-ce2dfe832ae1 WaitActive WaitActive WaitActive0",",,name""v:"":"crucible0"hostname,,,""""namelevel"":":time"crucible::30"",""2023-09-22T23:21:37.706758366Z"level":ip-10-150-1-55.us-west-2.compute.internal,"",30hostname"":"pid":4769,"ip-10-150-1-55.us-west-2.compute.internal"time,""}:pid"
13603 ,"":The guest has finished waiting for activation
13604 47692023-09-22T23:21:37.706784368Ztime""{}:,""
13605 2023-09-22T23:21:37.706790859Zhostname""",:""msg"hostname{"ip-10-150-1-55.us-west-2.compute.internal:"",:"""msg"pidip-10-150-1-55.us-west-2.compute.internal"":,:4769"}pid
13606 "":[2] downstairs client at 127.0.0.1:43168 has UUID 61a97a70-da7b-4cb5-91fa-9da487068e7c{4769"Waiting for 1 more clients to be ready"},"
13607 msg"":{",v"[1]R generation: [0, 0]""",msg""v:""v"::0:,0[2] downstairs client at 127.0.0.1:36502 has UUID 88162c01-9da1-4e8d-8218-aee73fa16e70,""0name,"":v""",namecrucible:"0",,"""level"name:name"30":::""crucible"",cruciblecrucible""",level"":,30,level""":timelevel30"":":302023-09-22T23:21:37.70686965Z",",hostname"":time"":"ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:21:37.706879606Z"",","pid":hostname"4769:"}
13608 ip-10-150-1-55.us-west-2.compute.internal","pid":4769{}
13609 "msg":",{[1]R dirty: [false, false]",,""msg"v:"":""0timetime",:""name"":2023-09-22T23:21:37.706884418Z":[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 88162c01-9da1-4e8d-8218-aee73fa16e70, encrypted: true, database_read_version: 1, database_write_version: 1 }"",,"crucible""v",:"0level,"":2023-09-22T23:21:37.706886626Z"30name"hostname":"":Sep 22 23:21:37.706 INFO Downstairs has completed Negotiation, task: proc
13610 ,crucible""time,,""level:"":""2023-09-22T23:21:37.706925189Z30"hostname":ip-10-150-1-55.us-west-2.compute.internal",",""hostname"pid":ip-10-150-1-55.us-west-2.compute.internal"":,,ip-10-150-1-55.us-west-2.compute.internal""time"4769,"":pid"pid""}:2023-09-22T23:21:37.706945185Z:"4769
13611 4769},"
13612 hostname":}{"{
13613 "ip-10-150-1-55.us-west-2.compute.internal""msg,"":{"pid"msg"[2]R flush_numbers: [0, 0]::"4769",}"
13614 "Set check for repair"{v"msg":msg0",:"","name"v"":8f69534d-528b-4c23-b2c9-ce2dfe832ae1 WaitActive WaitActive WaitActive"":,crucible:"",v""0level:"0:,30",""name"name"::""crucible"crucible,"","level"level",:"30:[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 61a97a70-da7b-4cb5-91fa-9da487068e7c, encrypted: true, database_read_version: 1, database_write_version: 1 }30time"":","v":02023-09-22T23:21:37.707007957Z",","name"hostname":,:"""time"crucible:ip-10-150-1-55.us-west-2.compute.internal""","2023-09-22T23:21:37.707018347Z"pid",:"4769,hostname}"
13615 :,""{"timeip-10-150-1-55.us-west-2.compute.internal""level",msg""pid:""::4769[2]R generation: [0, 0]"}"":
13616 ,2023-09-22T23:21:37.707020307Z""30,"v"hostname"::0","name":"crucible"ip-10-150-1-55.us-west-2.compute.internal",","levelpid""::476930,"time"}:
13617 "2023-09-22T23:21:37.707054156Z","hostname":"{,"time":"ip-10-150-1-55.us-west-2.compute.internal",""pid"msg":2023-09-22T23:21:37.707064806Z"":,"4769hostname":}"
13618 [2] 1c48f237-34b8-4484-b38b-7c6b80300cc8 (126b8002-1d8d-46c3-951e-15087f60a7c9) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum"ip-10-150-1-55.us-west-2.compute.internal",,{""pidv""":msg"0::",4769"name"}:
13619 82efa4d5-f84d-4765-a042-3cbb8c544041 WaitActive WaitActive WaitActive"","crucible{"v":,0","msglevel""":name""::"[2]R dirty: [false, false]"30crucible",","v"level"::030,"name":"crucible","level":30}
13620 {"msg":",",time",[0] 127.0.0.1:36647 task reports connection:truetime"""":,time"":":v""":2023-09-22T23:21:37.707127105Z0"2023-09-22T23:21:37.707118809Z2023-09-22T23:21:37.707123574Z"",,"",,hostnamename""::"""hostname"hostname":"ip-10-150-1-55.us-west-2.compute.internal"crucible",,"":levelpid""::304769""}
13621 ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"{,""pid"pidmsg"""::47694769:,"}}"Max found gen is 1time""
13622 ,:
13623 ""{v"2023-09-22T23:21:37.707163305Z:"0,,""name"hostname:"{""msg"crucible::"""","msg"level[2] Transition from WaitActive to WaitQuorumip-10-150-1-55.us-west-2.compute.internal"":"30,:,""pid"v""::04769,"}name
13624 ,"[0] 82efa4d5-f84d-4765-a042-3cbb8c544041 (13fb2768-edc0-4158-a955-c73279bce790) WaitActive WaitActive WaitActive ds_transition to WaitQuorum""{time:,""msg""::""crucible""2023-09-22T23:21:37.707205363Z,v"8e88d9c1-84f3-4981-affe-0c1310d04226 WaitQuorum WaitQuorum WaitQuorum"""level,,"""hostnamev":::""0:30,ip-10-150-1-55.us-west-2.compute.internal0",,"""pidname"nameThe guest has finished waiting for activation
13625 :",:4769""}time"
13626 :crucible"""{msg",:""msg"":level""2023-09-22T23:21:37.707236443Z":7be22e7c-d55a-4790-a835-081c096012f4 WaitActive WaitActive WaitActive"Generation requested: 1 >= found:1",30,,""vv"":"0:,0"hostnamename,"":"name"":crucible:"""crucible,"","level"level:":ip-10-150-1-55.us-west-2.compute.internal3030",,""time"pid:"":4769}2023-09-22T23:21:37.707271534Z"
13627 ,",,"{hostname"time""":time""::""2023-09-22T23:21:37.70728467Zmsg"":,2023-09-22T23:21:37.707284991Z"""hostname,"":hostname""ip-10-150-1-55.us-west-2.compute.internal:"ip-10-150-1-55.us-west-2.compute.internal"",[2] new RM replaced this: None""ip-10-150-1-55.us-west-2.compute.internal,",,""pid":pid4769"":}4769
13628 }v"
13629 {:0,"{"name""msgmsg":"":":Next flush: 1""crucible,""[2] downstairs client at 127.0.0.1:34655 has UUID 10863a73-c1c6-434a-bb7b-15af38914b1bv"":,0",v"",name""::0",level""cruciblename"",:"":levelcrucible"":,pid"3040level""::476930}
13630 ,"time"{:","2023-09-22T23:21:37.707351675Ztime"",",":"time""hostname"::"2023-09-22T23:21:37.707355579Z""msg",":2023-09-22T23:21:37.707352447Z"hostnameip-10-150-1-55.us-west-2.compute.internal"",:,""""hostname"ip-10-150-1-55.us-west-2.compute.internalpid"",:"4769pid:}[0] Transition from WaitActive to WaitQuorum""
13631 "ip-10-150-1-55.us-west-2.compute.internal",{:,4769""msg}"
13632 :pid""{:"4769"All extents matchmsg"",:""vv""}::0
13633 ,0"[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 10863a73-c1c6-434a-bb7b-15af38914b1b, encrypted: true, database_read_version: 1, database_write_version: 1 }name"",:,"{"cruciblev"",:"0",levelmsg"""":name30":":name""[2] Starts reconcile loop":crucible","crucible,""v"level"",:,:""time30"level:0"",:"302023-09-22T23:21:37.707423619Zname"":","crucible"hostname,"":time"",:"ip-10-150-1-55.us-west-2.compute.internal""level",2023-09-22T23:21:37.707437451Z""pid","::hostname30"4769:"}
13634 ip-10-150-1-55.us-west-2.compute.internal",,"{"time"pid"":":msg"4769:"}2023-09-22T23:21:37.707442624Z
13635 No downstairs repair required"",{,,"""msg"time"":v""hostname:"::7be22e7c-d55a-4790-a835-081c096012f4 WaitActive WaitActive WaitActive0"",""name2023-09-22T23:21:37.707455364Z"",:""vip-10-150-1-55.us-west-2.compute.internal",crucible":0",",hostname",""pidnamelevel""::"30"crucible"":":,4769"ip-10-150-1-55.us-west-2.compute.internal"},
13636 ,level""":time30"{:pid"":"4769msg2023-09-22T23:21:37.707495149Z""},
13637 :,"""hostnametime""::""{[0] new RM replaced this: None","2023-09-22T23:21:37.70750698Zip-10-150-1-55.us-west-2.compute.internal""",,"msg""hostnamepid""::"4769:v}ip-10-150-1-55.us-west-2.compute.internal""
13638 ,"":{pid"0:"4769[2] 127.0.0.1:59938 task reports connection:true"msg}"
13639 :,"{",v"No initial repair work was required"msg",:""v""name":"0::,""name[0] 7be22e7c-d55a-4790-a835-081c096012f4 (8fe9d3ab-892b-48e5-a656-2b4564d918d0) WaitActive WaitActive WaitActive ds_transition to WaitQuorum0",crucible","""vname":"0:,,""name:"":"level"crucible""crucible",crucible",level"",":"30level:"level:"30:4030,"time":","time"2023-09-22T23:21:37.707575943Z:"","hostname,"2023-09-22T23:21:37.707579173Z:""","time":ip-10-150-1-55.us-west-2.compute.internalhostname"",:""pid",:ip-10-150-1-55.us-west-2.compute.internal4769""},
13640 ""pid"time"::{4769"2023-09-22T23:21:37.707579909Z""}
13641 msg,"{2023-09-22T23:21:37.707580708Z"""hostname",:msg"""::""hostname"Set Downstairs and Upstairs active":,[0] Transition from WaitActive to WaitQuorum""ip-10-150-1-55.us-west-2.compute.internal"v"",ip-10-150-1-55.us-west-2.compute.internal"",,:"0v":"pid",0"pid"name,:""4769::name"4769crucible"":,""}levelcrucible"":,30"}
13642 level"
13643 {,:"30time":""msg"{2023-09-22T23:21:37.707643947Z:"",""hostnamemsg""[0] Starts reconcile loop:"",:"",time""1c48f237-34b8-4484-b38b-7c6b80300cc8 WaitQuorum WaitQuorum WaitQuorum:ip-10-150-1-55.us-west-2.compute.internal"""v2023-09-22T23:21:37.707654244Z,"",""v"pid,:"0"hostname":,:4769:0}"
13644 ip-10-150-1-55.us-west-2.compute.internal","{name""",msg""pid:""name"::4769:""69c4c5e1-2fd5-4777-bf6a-fcd75618f55d is now active with session: 412fdf38-1b7f-416c-b371-8bd0b7f86f94crucible""crucible,,}""v
13645 "level:"",0{:"30",msg"":name""level"::"[0] new RM replaced this: None"crucible30,"",v"":level0":,"30name":"crucible","level":40,"time":",",time2023-09-22T23:21:37.707708237Z""",:"",time"time":2023-09-22T23:21:37.707716295Z""hostname",2023-09-22T23:21:37.707719192Z"":,hostname"""hostname:"""::ip-10-150-1-55.us-west-2.compute.internal""ip-10-150-1-55.us-west-2.compute.internal,"","ip-10-150-1-55.us-west-2.compute.internal"",pid""pid:"4769:pid2023-09-22T23:21:37.707713979Z"}4769
13646 }"
13647 {,":hostname{"4769msg"""}:msg"::""
13648 [0] Starts reconcile loop69c4c5e1-2fd5-4777-bf6a-fcd75618f55d Set Active after no repair""",,""vv""::00,ip-10-150-1-55.us-west-2.compute.internal,""name"name:"{,"""pid"crucible::"4769""},crucible
13649 ""msg":"{,level""[0]R flush_numbers: [0, 0]:level30"""msg":,30":v"":0,"name":"crucible",",level""time"::,""30[1] 82efa4d5-f84d-4765-a042-3cbb8c544041 (13fb2768-edc0-4158-a955-c73279bce790) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum"2023-09-22T23:21:37.707798357Ztime"",:,"""v"2023-09-22T23:21:37.707802482Zhostname"":,:""0hostname",:ip-10-150-1-55.us-west-2.compute.internal""","nameip-10-150-1-55.us-west-2.compute.internalpid"",:""pid4769"::},"
13650 4769"crucible"}{,
13651 "time"{msg":":""msg""2023-09-22T23:21:37.707813553Z"level":,:""30hostname"[1] 7be22e7c-d55a-4790-a835-081c096012f4 (8fe9d3ab-892b-48e5-a656-2b4564d918d0) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorumNotify all downstairs, region set compare is done."":,,""v"v:"0:,0","name"":name"":"crucible"ip-10-150-1-55.us-west-2.compute.internal",crucible"",,level""level:"30:"30pid,"":time"4769:"}
13652 ,,""2023-09-22T23:21:37.707850245Z"timetime""::""{,"2023-09-22T23:21:37.707864405Zhostname""2023-09-22T23:21:37.707865715Z"",,""msg"hostname"::hostname""":":ip-10-150-1-55.us-west-2.compute.internal""ip-10-150-1-55.us-west-2.compute.internal,"ip-10-150-1-55.us-west-2.compute.internal",""pidpid""::47694769,[0]R generation: [0, 0]"}}
13653 
13654 {,"{"pid"msgmsg":"":v"""4769:Set check for repair""},:
13655 0[1] Transition from WaitActive to WaitQuorum""v",,":v{""0:name"0,,:""name""name""::""cruciblemsg"cruciblecrucible"",,"""level:The guest has finished waiting for activation
13656 "level:",:30"30"level":[1] Transition from WaitActive to WaitQuorum30","v":0,"name":"crucible","level":30,,""timetime""::""2023-09-22T23:21:37.707949009Z2023-09-22T23:21:37.707947973Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47694769}}
13657 
13658 {,,""{msg""time":msg"""::time"""[1] 127.0.0.1:39320 task reports connection:true[1] new RM replaced this: None"":,",""vv":2023-09-22T23:21:37.707951902Z2023-09-22T23:21:37.707958074Z""0",:"0,,name""name:"":"crucible",hostname"crucible:""",,"""levellevelhostname"""::40:ip-10-150-1-55.us-west-2.compute.internal"",30"pid":ip-10-150-1-55.us-west-2.compute.internal"4769,"pid":}4769
13659 ,,""}timetime""::""{
13660 2023-09-22T23:21:37.708009295Z2023-09-22T23:21:37.708006646Z"""{,,""hostname"hostname:"":msg"":ip-10-150-1-55.us-west-2.compute.internal""msg",ip-10-150-1-55.us-west-2.compute.internal""pid,"""pid:"4769:}:"4769
13661 }[1] new RM replaced this: None
13662 {[0]R dirty: [false, false]""{,"""msgmsg""::v",""":v"[1] Starts reconcile loop0"69c4c5e1-2fd5-4777-bf6a-fcd75618f55d Active Active Active,",,":v""v:"0:,0"0name",,""name""name"::"namecrucible"":,""level"crucible:"30,":"crucible""level",crucible"":,30level"":,40"level"time"::"302023-09-22T23:21:37.70807867Z","hostname,"":"time":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:21:37.708086492Z",","pid,hostname""::"4769"}time"ip-10-150-1-55.us-west-2.compute.internal
13663 ":","{pid":2023-09-22T23:21:37.708088881Z""4769msg,,"}"
13664 :time"{":"""msg":"hostname2023-09-22T23:21:37.708092577Z""Set check for repair:",,"[2] 7be22e7c-d55a-4790-a835-081c096012f4 (8fe9d3ab-892b-48e5-a656-2b4564d918d0) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum"""hostname"v,"":v0"ip-10-150-1-55.us-west-2.compute.internal":,0",:name"","name:"":ip-10-150-1-55.us-west-2.compute.internal"crucible"",""pid,crucible""level,pid""""level:"30:::3047694769}
13665 }
13666 ,,""{timetime""::"""{msg"2023-09-22T23:21:37.708149339Z2023-09-22T23:21:37.708147738Z"":,,""""hostnamehostname"":[1] Starts reconcile loop:"""msg",:"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,"v"pid"":"pid:"4769:[1]R flush_numbers: [0, 0]"}4769
13667 },0"{
13668 "vmsg",:{""":name""[2] Transition from WaitActive to WaitQuorummsg"":",0",v[2] 127.0.0.1:57826 task reports connection:true"":":name"0,,""v:"name""::0",""cruciblename"":crucible","",level"crucible"crucible:"30",,level""level"":level:30"30:30,"time":"2023-09-22T23:21:37.708225132Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,4769"time"}:
13669 ","2023-09-22T23:21:37.708233486Z{"time","":"hostnamemsg""::"",2023-09-22T23:21:37.708232977Z""[2] new RM replaced this: Noneip-10-150-1-55.us-west-2.compute.internal"",,,""pidv""::0time",4769""name}hostname
13670 "::"""{:2023-09-22T23:21:37.70823208Z""cruciblemsg","",:"""levelip-10-150-1-55.us-west-2.compute.internalhostname"69c4c5e1-2fd5-4777-bf6a-fcd75618f55d Active Active Active":"40,"":,"v"":pid0ip-10-150-1-55.us-west-2.compute.internal","",name:","":time""pid"crucible:4769"":}4769
13671 },2023-09-22T23:21:37.708280233Z""level{
13672 ,"":"30hostname"msg"::"{"ip-10-150-1-55.us-west-2.compute.internal","pid"":msg",4769":}time
13673 ""[2] 82efa4d5-f84d-4765-a042-3cbb8c544041 (13fb2768-edc0-4158-a955-c73279bce790) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum:{[1]R generation: [0, 0]"""",2023-09-22T23:21:37.70831015Zmsg"",",:""v"hostname[2] Starts reconcile loop""::,"0","vip-10-150-1-55.us-west-2.compute.internal"":,0",pid"":name4769"name"}:
13674 ":"crucible{"crucible,""msg":level""":Set check for repair30",,""level"v"v"::030,"","name:time":"":0crucible"",,"2023-09-22T23:21:37.708351496Z""levelname","":hostname:"30:""crucible","ip-10-150-1-55.us-west-2.compute.internal"level",":pid"30:,4769"}time"
13675 ,":time"":{"2023-09-22T23:21:37.708359596Z""2023-09-22T23:21:37.708370612Z"msg":,"","hostname"hostname":[0] 127.0.0.1:54884 task reports connection:true"":","vip-10-150-1-55.us-west-2.compute.internal"":,0",ip-10-150-1-55.us-west-2.compute.internal",pid","name:""4769:"pid""}:
13676 crucible"4769time"{},
13677 :"""msglevel""::"302023-09-22T23:21:37.70837662Z"{[0] received reconcile message",,""hostname"":msg"":"v":0[1]R dirty: [false, false]ip-10-150-1-55.us-west-2.compute.internal,,"""nametime,"":"":","2023-09-22T23:21:37.708413934Z"crucible"v,,""""levelhostname""::30"pid"::0ip-10-150-1-55.us-west-2.compute.internal",4769,"}"
13678 ,pid""name":time"4769::{}"
13679 2023-09-22T23:21:37.70843782Z"",{"msg"hostname"":":msg""":crucible"ip-10-150-1-55.us-west-2.compute.internal""[2] Transition from WaitActive to WaitQuorum,",","pid7be22e7c-d55a-4790-a835-081c096012f4 WaitQuorum WaitQuorum WaitQuorum"""vlevel:,""4769"v}:
13680 ":30{:00",,"msg""name:"":name":"[0] All repairs completed, exit""crucible"crucible,"",v,"":"0level,"":level"30name"::30"crucible",,""timelevel""::"30,"2023-09-22T23:21:37.708481212Z"time":","hostname":2023-09-22T23:21:37.708495279Z"",,""timeip-10-150-1-55.us-west-2.compute.internal",:"hostnametime":"":""",ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:21:37.708503597Z"",",""pid2023-09-22T23:21:37.708497252Z"hostname""::4769",}pid
13681 ip-10-150-1-55.us-west-2.compute.internal"",{":pid""msg""::4769"4769}hostname[0]R flush_numbers: [0, 0]
13682 "",{":}"vmsg""::0"
13683 [0] Starts cmd_loop,"""name,"":"v":crucible0",,""{levelname""::30"ip-10-150-1-55.us-west-2.compute.internal""crucible"msg,"",:""pidlevel"",::[2]R flush_numbers: [0, 0]""304769time",:}""v"
13684 2023-09-22T23:21:37.708559631Z,"":,time"":{"hostname0"2023-09-22T23:21:37.708571052Z":,"","hostname""ip-10-150-1-55.us-west-2.compute.internal:name"""msg,""ip-10-150-1-55.us-west-2.compute.internalpid"",:"4769pid"}::4769
13685 }:
13686 {""crucible""{[2] new RM replaced this: None,msg""msg:":""""[0]R generation: [0, 0][1] received reconcile message"level"",,""v",:":v0"v:,"0name","":name""30:crucible"":,crucible""0,level"","level:"30name"::30"crucible","level":40,",time"":time"":","2023-09-22T23:21:37.70863222Z"2023-09-22T23:21:37.708634261Z"time,,"""hostnamehostname""::"",:""time"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid"":::476947692023-09-22T23:21:37.708626752Z"}}
13687 
13688 {",{""2023-09-22T23:21:37.70863756Zmsg""msg:"":hostname""[0]R dirty: [false, false]"[1] All repairs completed, exit"":,,""vv""::00,,""name"name:"""crucible:"",crucibleip-10-150-1-55.us-west-2.compute.internal,""""level",:"30,"levelpid"hostname"::"4769":30}
13689 ,ip-10-150-1-55.us-west-2.compute.internal"","timepid"{,""::time"""4769msg":2023-09-22T23:21:37.708692661Z"":},2023-09-22T23:21:37.708698902Z"""
13690 hostname,"":[2]R generation: [0, 0]""hostname{",ip-10-150-1-55.us-west-2.compute.internal:"",""vip-10-150-1-55.us-west-2.compute.internal""pid,msg""""pid"::47694769::0}}
13691 
13692 {,"{""msg""[2] Starts reconcile loopname"":,":msg"""crucible"[1]R flush_numbers: [0, 0]:""v"[1] Starts cmd_loop:,,"""0level,v,"":"0v,":"0:name,"":name"30crucible"":,"""name"crucible:""level",:"30crucible"level,"":level"30:30,"time":"2023-09-22T23:21:37.708775538Z",,","time""hostname:time"",:2023-09-22T23:21:37.708779368Z"""time,"":"":ip-10-150-1-55.us-west-2.compute.internalhostname"",:""2023-09-22T23:21:37.70877022Z"pid","ip-10-150-1-55.us-west-2.compute.internal""2023-09-22T23:21:37.708780156Z,:"4769"hostname",:pid}""
13693 "hostname":{ip-10-150-1-55.us-west-2.compute.internal":,"""msg4769":pid"}"
13694 [1]R generation: [0, 0]":,{"4769ip-10-150-1-55.us-west-2.compute.internal""v",}"
13695 :msg0",:""pid"name{[2] received reconcile message""::,""crucible4769"v,"}":msg
13696 0","level""name:"30:":{crucible"","level"[2]R dirty: [false, false]":"msg"30,:""v",:"0[0] 127.0.0.1:39150 task reports connection:true",time":",name"""v:"2023-09-22T23:21:37.708855901Z,"":time,0"",hostname:"""":2023-09-22T23:21:37.708863446Z""crucible"name,ip-10-150-1-55.us-west-2.compute.internal,"","""hostnamelevel":"pid:":"4769:"30}ip-10-150-1-55.us-west-2.compute.internal
13697 "crucible,"{",pid"""msg"level"::"4769:30}[1]R dirty: [false, false]
13698 ","v"{:0,""name"msg:"":,crucible""","time"level[2] All repairs completed, exit""::,30""v":0,2023-09-22T23:21:37.708893953Z"",name",:"""crucible"time",hostname",""level"time:"30::"":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:21:37.708916807Z,""pid",2023-09-22T23:21:37.708904356Z:",4769"hostname"":,"time}""ip-10-150-1-55.us-west-2.compute.internalhostname":"",
13699 :"2023-09-22T23:21:37.70892917Z""pid",:"4769hostname"}:
13700 "{ip-10-150-1-55.us-west-2.compute.internal","{ip-10-150-1-55.us-west-2.compute.internal"pid,"""pidmsg"":"4769::msg"4769":}"}
13701 [2]R flush_numbers: [0, 0]"Max found gen is 1"{,,"""msgvv"
13702 :""::0","0{,name[2] Starts cmd_loop"""msg:"name",""::""crucible""v",:"0crucible"82efa4d5-f84d-4765-a042-3cbb8c544041 WaitQuorum WaitQuorum WaitQuorum",level",:,30""name"level""::v"30":0crucible",","namelevel,""":"time:"30:crucible"","level":2023-09-22T23:21:37.708999788Z30","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid",:"4769time":"}
13703 2023-09-22T23:21:37.709012443Z",",{,time""""msg"hostname:""::[2]R generation: [0, 0]"""time"ip-10-150-1-55.us-west-2.compute.internal",2023-09-22T23:21:37.709005603Z",v"""::0pid",:"4769","}name
13704 "2023-09-22T23:21:37.709015964Zhostname":":"",crucible""ip-10-150-1-55.us-west-2.compute.internal,""hostnamelevel",":"30pid"::"4769}ip-10-150-1-55.us-west-2.compute.internal"
13705 ,"pid":4769,"}time"
13706 :{"{2023-09-22T23:21:37.709058874Z"",msg"""msg"hostname:""::"Generation requested: 1 >= found:1""[0]R flush_numbers: [0, 0]","ip-10-150-1-55.us-west-2.compute.internal",,v""":v"0:,0pid",:"4769name""}:
13707 "name"crucible":",{"crucible"level",":msg"30":"level":30[2]R dirty: [false, false]","v":0,"name":"crucible","level":30,",time"":"time":"2023-09-22T23:21:37.709112891Z"2023-09-22T23:21:37.709110892Z",",,""timehostname"hostname"":::"""ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:21:37.709119378Z"",",pid""ip-10-150-1-55.us-west-2.compute.internalhostname""::,""4769pid":}ip-10-150-1-55.us-west-2.compute.internal
13708 "4769,"pid":}4769{
13709 "}msg":"
13710 {[0]R generation: [0, 0]","v{"":"0msg,"msg:":"""Next flush: 1"Max found gen is 1name",:""",v"":v0crucible"",:"0name,,""":level"name":"crucible"30,":"level":crucible"30,"level":30,"time":"2023-09-22T23:21:37.709197976Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"time,"",:""pidtime""::"2023-09-22T23:21:37.709202694Z"47692023-09-22T23:21:37.709201246Z",}"
13711 ,hostname"":"{hostname":""ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"",msg"pid"pid"":::4769"4769}
13712 [0]R dirty: [false, false]}"
13713 {,"v":0","msg"{:name"":"crucible"","msg"Generation requested: 1 >= found:1":level"",":v"All extents match":300,",v"":name"0:","crucible"name",:""level"crucible:"30,",level"":time"30:"2023-09-22T23:21:37.70927582Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",",pid"":time4769":"}
13714 2023-09-22T23:21:37.709286221Z","hostname{":"",msg"ip-10-150-1-55.us-west-2.compute.internal"time"":,:""pid"":2023-09-22T23:21:37.709290435Z"4769[1]R flush_numbers: [0, 0]",},
13715 ""v"hostname"{::"0","msgname"ip-10-150-1-55.us-west-2.compute.internal"",:""pid":Next flush: 1"4769,"v":}0
13716 ,"name":"crucible","level"{:30"msg":":No downstairs repair required"",crucible""v",:"0level,"",name""::Sep 22 23:21:37.709 INFO Current flush_numbers [0..12]: [0, 0]
13717 "crucible"time30,"":"level":302023-09-22T23:21:37.709343947Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13718 ,"time":"{2023-09-22T23:21:37.709364771Z",,""msg""time"hostname":::"""All extents match"ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:21:37.709368092Z,"""v",,"":hostname":pid"0":,4769Sep 22 23:21:37.709 DEBG IO Write 1000 has deps []
13719 }
13720 ip-10-150-1-55.us-west-2.compute.internal"","name"pid"{::"4769crucible"",}"
13721 msg"level:"":{30[1]R generation: [0, 0]","v":"0msg":,""name":"No initial repair work was requiredcrucible"","level",:"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.709429357Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,4769"time":}"
13722 2023-09-22T23:21:37.709439796Z",","time"hostname":{":"2023-09-22T23:21:37.709445361Z""ip-10-150-1-55.us-west-2.compute.internal"msg",:"",hostname""No downstairs repair required:"pid"":,4769"v"}:ip-10-150-1-55.us-west-2.compute.internal
13723 "0,",pid"":name":"4769{crucible"},
13724 ""msg"level":":{30[1]R dirty: [false, false]","v":"0msg,"":name"":"crucible"Set Downstairs and Upstairs active",","levelv""::300,"name":"crucible","level":,30"time":"2023-09-22T23:21:37.709501163Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13725 ,"time,"":"time"{:2023-09-22T23:21:37.709513469Z"","2023-09-22T23:21:37.709518552Z"msg","":hostnameSep 22 23:21:37.709 INFO Downstairs has completed Negotiation, task: proc
13726 ":"hostname"":"ip-10-150-1-55.us-west-2.compute.internal","No initial repair work was requiredpid"ip-10-150-1-55.us-west-2.compute.internal"":,,4769""v"}:
13727 0pid",:"4769name{":"}crucible"
13728 ,""msglevel""::{30""msg":"1c48f237-34b8-4484-b38b-7c6b80300cc8 is now active with session: 126b8002-1d8d-46c3-951e-15087f60a7c9"[2]R flush_numbers: [0, 0]",",v":"0v",":name":"0crucible",","namelevel""::"30crucible",","time":"level":302023-09-22T23:21:37.709577578Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13729 ,"time":"2023-09-22T23:21:37.709593493Z"{,"hostname,"""msg"time":"::""ip-10-150-1-55.us-west-2.compute.internal","pid2023-09-22T23:21:37.709597985Z"Set Downstairs and Upstairs active"",":,hostname"4769v"":}"
13730 ip-10-150-1-55.us-west-2.compute.internal":,0","{pid"name"::4769""msg"}:crucible"
13731 ","level":30{1c48f237-34b8-4484-b38b-7c6b80300cc8 Set Active after no repair","v":"0msg,"":"name":"[2]R generation: [0, 0]crucible"",","level"v"::300,"name":"crucible","level",:"30time":"2023-09-22T23:21:37.709653098Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13732 ,"time":","2023-09-22T23:21:37.709665982Z{time"":",""hostname"2023-09-22T23:21:37.709671284Z"msg"::"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal7be22e7c-d55a-4790-a835-081c096012f4 is now active with session: 8fe9d3ab-892b-48e5-a656-2b4564d918d0"4769",",}v"
13733 :"0pid",:"4769{name":"}crucible"
13734 ","msg"level:"":{30Notify all downstairs, region set compare is done.",""v":msg0",:""name":"crucible"[2]R dirty: [false, false]",",level"":v"30:0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.709725366Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13735 ,,""time"time"::{""2023-09-22T23:21:37.709736835Z"2023-09-22T23:21:37.70974142Z"",msg":,"""hostnamehostname""::"7be22e7c-d55a-4790-a835-081c096012f4 Set Active after no repair"",ip-10-150-1-55.us-west-2.compute.internal""ip-10-150-1-55.us-west-2.compute.internalv","":pid,"":0pid",4769:"4769name"}:
13736 "}crucible"
13737 {,"level":30{"msg":"Set check for repair"",msg""v":":0Max found gen is 1","name",:""v"crucible":,"0level",:"30name":","crucible"time",:""level":302023-09-22T23:21:37.709802572Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13738 ,"time":"The guest has finished waiting for activation
13739 ,2023-09-22T23:21:37.709819717Z""{time",:"""hostname"msg2023-09-22T23:21:37.709825352Z"":,":""hostname"ip-10-150-1-55.us-west-2.compute.internal":","Notify all downstairs, region set compare is done.pid""ip-10-150-1-55.us-west-2.compute.internal",:,4769""vpid}"
13740 "::04769,"{name":}"
13741 crucible"","msg"level"::"30{[0] received reconcile message","v":"0msg",:""name":"crucible"Generation requested: 1 >= found:1,""level",:"30v":0,"name":"crucible",",level"":time30":"2023-09-22T23:21:37.70988159Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13742 ,"time":"2023-09-22T23:21:37.709893374Z"{,,""hostname":""time"msg":ip-10-150-1-55.us-west-2.compute.internal":"","pidSet check for repair"2023-09-22T23:21:37.709899856Z"":,,""4769hostname"v":}":
13743 ip-10-150-1-55.us-west-2.compute.internal"0,",pid""{:name"4769:"crucible""},
13744 "msg"level:"":{30[0] All repairs completed, exit","v":"0msg,"":name"":"Next flush: 1"crucible",,""v"level"::030,"name":"crucible",","time"level:"":302023-09-22T23:21:37.709955701Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13745 ,"time":"2023-09-22T23:21:37.709968556Z"{,"hostname":",""time"ip-10-150-1-55.us-west-2.compute.internal":,""msg"pid"::"47692023-09-22T23:21:37.709974898Z"},
13746 [1] 127.0.0.1:38174 task reports connection:true"",hostname"":{v"":0,""ip-10-150-1-55.us-west-2.compute.internal"name",msg":"":pidcrucible"",":"4769[0] Starts cmd_loop"level":,30"}v"
13747 :0,"name":"crucible",{"level":30"msg":"All extents match","v":,"0time,"":name":""crucible",2023-09-22T23:21:37.710023519Z""level",:"30hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","pid"2023-09-22T23:21:37.710035123Z":4769,"hostname"}:
13748 "ip-10-150-1-55.us-west-2.compute.internal","{pid",:"4769"time"msg":}:
13749 ""2023-09-22T23:21:37.71005024Z"7be22e7c-d55a-4790-a835-081c096012f4 Active Active Active{",","hostnamev""::"0",msg"":"ip-10-150-1-55.us-west-2.compute.internal"name":,[1] received reconcile message""",crucible""pid,"v""::04769level",:}"
13750 name"30:"crucible","level{":30"msg":"No downstairs repair required","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:21:37.710108105Z"time":","hostname":2023-09-22T23:21:37.710111955Z"","hostname":ip-10-150-1-55.us-west-2.compute.internal"","pid":4769ip-10-150-1-55.us-west-2.compute.internal","}pid"
13751 :,4769"time":"}{
13752 2023-09-22T23:21:37.710122609Z""msg",:"{"hostname":"Set check for repair"","ip-10-150-1-55.us-west-2.compute.internal"vmsg,"""pid":::0",4769"name[1] All repairs completed, exit"}:
13753 ""crucible",,"{"v"level"::"30msg":"0,"name":"No initial repair work was required"crucible",,""v"level"::030,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.710181779Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"},time"
13754 ":"time":"2023-09-22T23:21:37.710190251Z"{2023-09-22T23:21:37.710194368Z",","hostname"":hostname"msg""::""ip-10-150-1-55.us-west-2.compute.internal","pid":[2] 127.0.0.1:34655 task reports connection:true4769"ip-10-150-1-55.us-west-2.compute.internal",},"
13755 v"":pid"0:,{"4769name":"}
13756 "crucible"msg",:"{level"":"30[1] Starts cmd_loop"msg":","v":0Set Downstairs and Upstairs active",","namev""::0","cruciblename"":","crucible"level",:"30level":30,"time":"2023-09-22T23:21:37.710250342Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"time"}:
13757 ,""time":"2023-09-22T23:21:37.710264802Z"{,"2023-09-22T23:21:37.710264162Z"hostname":",""hostnamemsg""ip-10-150-1-55.us-west-2.compute.internal"::"","pid":4769ip-10-150-1-55.us-west-2.compute.internal"}7be22e7c-d55a-4790-a835-081c096012f4 Active Active Active",
13758 ",pid""{v"::47690",}"msg"
13759 :name"":"crucible"{,"level":3082efa4d5-f84d-4765-a042-3cbb8c544041 is now active with session: 13fb2768-edc0-4158-a955-c73279bce790"",msg"":v"":0[2] received reconcile message","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.710324424Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13760 ,,""name"time":{:""crucible"",2023-09-22T23:21:37.710336283Zmsg"":","level""Set check for repair":hostname,""30v":":0,"nameip-10-150-1-55.us-west-2.compute.internal"",:""pid":crucible"4769,"level"}:
13761 30,"time":"{2023-09-22T23:21:37.710371105Z","hostname"":"msg":"ip-10-150-1-55.us-west-2.compute.internal"[2] All repairs completed, exit",",pid"":v"4769:0},
13762 ,""name"time":{:""crucible","2023-09-22T23:21:37.710385989Z""msglevel"",:"30:"hostname":"82efa4d5-f84d-4765-a042-3cbb8c544041 Set Active after no repair"ip-10-150-1-55.us-west-2.compute.internal",,""v"pid"::04769,"name":"}
13763 crucible",",level"":time"30{:"2023-09-22T23:21:37.710415773Z""msg",:""hostname":"[0] received reconcile message",ip-10-150-1-55.us-west-2.compute.internal"",v"":pid"0:,4769"name":"}crucible"
13764 ,,""level":time"{30:"2023-09-22T23:21:37.710435769Z""msg",:""hostname":"[2] Starts cmd_loop","v":ip-10-150-1-55.us-west-2.compute.internal"0,","pid"name"::"4769crucible","}level"
13765 :,30"time":"{2023-09-22T23:21:37.710459405Z","hostname"":"msg":"ip-10-150-1-55.us-west-2.compute.internal","Notify all downstairs, region set compare is done."pid":,4769"v":}0
13766 ,",name"":{"time"crucible":"","2023-09-22T23:21:37.710480894Zlevel"":msg",:""30hostname"[0] All repairs completed, exit":","v":0ip-10-150-1-55.us-west-2.compute.internal",","namepid""::"4769crucible","}level"
13767 :30,"time":"2023-09-22T23:21:37.710509326Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13768 ,"time":"2023-09-22T23:21:37.710524632Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","Set check for repair"pid":,4769"v":0},
13769 "name":"crucible","level"{:30"msg":"[0] Starts cmd_loop","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.710560637Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13770 ,"time":"{2023-09-22T23:21:37.710569854Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":[1] 127.0.0.1:64794 task reports connection:true"4769,"v":}0
13771 ,"The guest has finished waiting for activation
13772 {name"{:"crucible","level"":"30msg"msg":":"[1] received reconcile message","v":dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 WaitActive WaitActive WaitActive"0,"name,"":"v":crucible"0,","level"name":":crucible30",","time":level"":302023-09-22T23:21:37.710614978Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13773 ,"time":"2023-09-22T23:21:37.710630265Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal",,""82efa4d5-f84d-4765-a042-3cbb8c544041 Active Active Activetimepid""":,:4769""v"}:
13774 02023-09-22T23:21:37.710634904Z","name":,""{crucible"hostname,"""level"msg"::":30"ip-10-150-1-55.us-west-2.compute.internal",[1] All repairs completed, exit""pid":,"4769v":0,"}name"
13775 :"crucible","level":30,"time":"2023-09-22T23:21:37.710680316Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13776 ,"time":"{2023-09-22T23:21:37.710695167Z","hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal"Set check for repair",",pid"":v"4769:0,"}name":
13777 "crucible","level":30{"msg":"[1] Starts cmd_loop","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.710731624Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769},
13778 "time":"2023-09-22T23:21:37.710742347Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769[2] 127.0.0.1:43168 task reports connection:true",}"
13779 v":0,"name":"{crucible","level":30"msg":"[2] received reconcile message","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.710781732Z","hostname":""ip-10-150-1-55.us-west-2.compute.internal",":pid",:"4769"time"crucible:"}"
13780 ,2023-09-22T23:21:37.710790331Z"",level""{:hostname":"30"ip-10-150-1-55.us-west-2.compute.internal"msg,":""pid":476982efa4d5-f84d-4765-a042-3cbb8c544041 Active Active Active"}
13781 ,"v":0,"name":"{crucible","level":30"msg":"[2] All repairs completed, exit",,""time"v"::"0,"name"2023-09-22T23:21:37.71082574Z":"crucible",","hostname"level"::"30,"ip-10-150-1-55.us-west-2.compute.internal"time",:""pid":47692023-09-22T23:21:37.71084244Z"},"
13782 hostname":"ip-10-150-1-55.us-west-2.compute.internal",",{"pid"time:"4769:""msg"}:
13783 "2023-09-22T23:21:37.710857975Z"[0]R flush_numbers: [0, 0]","{,"hostname"v":msg"""::"0ip-10-150-1-55.us-west-2.compute.internal",,Set check for repair"""name,"pid":":crucible"4769",v"}"
13784 level:"0:,30{"name":"crucible"","msg"level"::"30[2] Starts cmd_loop","v":0,"name":"crucible",,""time"level:"":302023-09-22T23:21:37.710910967Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",",pid"":time"4769:"}
13785 2023-09-22T23:21:37.71091745Z","{hostname":,"""msg":time""ip-10-150-1-55.us-west-2.compute.internal:"",[0]R generation: [0, 0]""2023-09-22T23:21:37.710927771Z,pid"""v:"4769:,"0}hostname
13786 ",":name""{:"crucibleip-10-150-1-55.us-west-2.compute.internal""",msg,""level"pid""::"4769:30[0] received reconcile message}"
13787 ,"v":0,"name":"crucible",,""level"time"::30"2023-09-22T23:21:37.710976451Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13788 {"msg":","time[0]R dirty: [false, false]"",:"{"v":0,"2023-09-22T23:21:37.710990928Z"name":""crucible"msg,","level"::""30hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769[0] 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 (4c909eb0-0138-456e-a20b-76c3a0a91bc8) WaitActive WaitActive WaitActive ds_transition to WaitQuorum"}
13789 ,","timev""::{0","name":"2023-09-22T23:21:37.71102085Z"crucible"",msg",""hostname:"level""::"[0] All repairs completed, exit"30,"ip-10-150-1-55.us-west-2.compute.internal"v,""pid"::04769,"}name":
13790 "crucible","{level":30"msg":"[1]R flush_numbers: [0, 0]","v":0,"name":","crucible"time,"":"level":302023-09-22T23:21:37.711048273Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":,4769"time":"}
13791 ,2023-09-22T23:21:37.711061075Z""time",:""{hostname":"2023-09-22T23:21:37.711071294Z","hostname""msg"ip-10-150-1-55.us-west-2.compute.internal":",:""pid[0] Transition from WaitActive to WaitQuorum""ip-10-150-1-55.us-west-2.compute.internal:,""v,"":pid04769,"":name"4769}:
13792 }"
13793 crucible"{,"{level":30""msg":"msg":"[1]R generation: [0, 0]"[0] Starts cmd_loop","v",":v"0:,"0name",":name"":"crucible"crucible",","level"level:"30:,"30time":"2023-09-22T23:21:37.711120467Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"time}":"
13794 ,2023-09-22T23:21:37.711136988Z"","time"hostname"{Sep 22 23:21:37.711 DEBG up_ds_listen was notified
13795 :":""2023-09-22T23:21:37.71113849Z"msg":","ip-10-150-1-55.us-west-2.compute.internal"hostname"[0] new RM replaced this: None,:"""pid":4769,ip-10-150-1-55.us-west-2.compute.internal"},
13796 ""pidv"{:"0":msg"4769,:"}"
13797 Sep 22 23:21:37.711 DEBG up_ds_listen process 1000
13798 [1]R dirty: [false, false]"name{,"":v""":msg0",crucible":",name"":""level"crucible":,40"[1] received reconcile message"level":,30"v":0,"name":"crucible","level":30Sep 22 23:21:37.711 DEBG [A] ack job 1000:1, : downstairs
13799 ,"time":"2023-09-22T23:21:37.711215832Z",",hostname"":"time":","ip-10-150-1-55.us-west-2.compute.internal"time,""2023-09-22T23:21:37.711213701Z":"pid,"":2023-09-22T23:21:37.711221512Z4769hostname"}"
13800 ,:""{hostname":""ip-10-150-1-55.us-west-2.compute.internal"msg",:""pid"ip-10-150-1-55.us-west-2.compute.internal"[2]R flush_numbers: [0, 0]:"4769,","pid}"v"
13801 ::04769,"name":"}{crucible
13802 ""msg",":"level"{:[0] Starts reconcile loop"30,"v"":Sep 22 23:21:37.711 DEBG up_ds_listen checked 1 jobs, back to waiting
13803 msg0":","name":"[1] All repairs completed, exit",crucible"",,time""":level""v"::302023-09-22T23:21:37.711278643Z"0,,""hostname"name"::""crucible","level"ip-10-150-1-55.us-west-2.compute.internal":,"30pid":4769}
13804 {,""msg":"time":"[2]R generation: [0, 0]",2023-09-22T23:21:37.711297423Z""v":,0","hostname":name"":,""ip-10-150-1-55.us-west-2.compute.internal"time":crucible",,""pid"level"::476930}
13805 ,"time":"2023-09-22T23:21:37.711333363Z","{hostname":""ip-10-150-1-55.us-west-2.compute.internal"","2023-09-22T23:21:37.711307152Z"pid"msg:,4769""}hostname":
13806 :""{ip-10-150-1-55.us-west-2.compute.internal"",msg"":pid"":[1] 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 (4c909eb0-0138-456e-a20b-76c3a0a91bc8) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum"[2]R dirty: [false, false]4769,"",}"v"v"
13807 ::00,",name""{:name"":"crucible"",crucible"msg"":level"",:"[1] Starts cmd_loop"30level",:"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.711394753Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13808 ,"time"{:"","msgtime""2023-09-22T23:21:37.711397693Z"::"","Max found gen is 1"hostname2023-09-22T23:21:37.711403109Z","":v"":,0",ip-10-150-1-55.us-west-2.compute.internalhostname"":name"":"","crucible"pid",ip-10-150-1-55.us-west-2.compute.internal"":level,"4769":pid"30:}4769
13809 }
13810 {,"time":"{"2023-09-22T23:21:37.711445059Z""msg",:"msg"hostname"":[1] Transition from WaitActive to WaitQuorum:""","[2] received reconcile messagevip-10-150-1-55.us-west-2.compute.internal""",,:""0pid,"":name":v""4769:crucible0",},""level"
13811 name"::30"{crucible",""levelmsg""::"30Generation requested: 1 >= found:1","v":0,"name":"crucible","level":30,",time":""time":"2023-09-22T23:21:37.71149051Z",2023-09-22T23:21:37.711496357Z""time",,:"""hostname"hostname"::"2023-09-22T23:21:37.711503023Z"",ip-10-150-1-55.us-west-2.compute.internal"",ip-10-150-1-55.us-west-2.compute.internalhostname""pid""::4769","pid"}ip-10-150-1-55.us-west-2.compute.internal
13812 ":,4769"pid"}:{4769
13813 }"
13814 msg"{:"{"[1] new RM replaced this: None""msgmsg",:"":"v""Next flush: 1":[2] All repairs completed, exit",0","vv,"":"name0":,:""name"crucible:0",",""name"cruciblelevel""::,40""level":crucible30","level":30,"time":"2023-09-22T23:21:37.711583353Z",","time"hostname"::,"""time":"ip-10-150-1-55.us-west-2.compute.internal"2023-09-22T23:21:37.711581428Z",2023-09-22T23:21:37.711585492Z",""pid"hostname",":"hostname:"4769ip-10-150-1-55.us-west-2.compute.internal":,}"
13815 ip-10-150-1-55.us-west-2.compute.internal"",pid"":{pid"4769:"4769}msg"
13816 }:
13817 {"All extents match","v"":{msg"0:,"""msg[1] Starts reconcile loop"name","":v":"":crucible"[2] Starts cmd_loop,""0,level""v","::300name":","crucible"name":,""cruciblelevel":"30,"level":30,"time":"2023-09-22T23:21:37.711660482Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13818 {,""time"msg",:"":"time"No downstairs repair required"2023-09-22T23:21:37.711667761Z:,""v"":,0",2023-09-22T23:21:37.71166939Z""hostname"name:","":"ip-10-150-1-55.us-west-2.compute.internalhostname""crucible",,""pid"::level"4769":30}ip-10-150-1-55.us-west-2.compute.internal"
13819 ,"pid":4769}
13820 ,{"time":""2023-09-22T23:21:37.711710776Z"msg":,""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13821 [2] 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 (4c909eb0-0138-456e-a20b-76c3a0a91bc8) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum"{,"v":"0msg",:""name":"crucibleNo initial repair work was required"",",v"":level"0:,"30name":"crucible","level":30,"time":"2023-09-22T23:21:37.711753832Z","hostname":","ip-10-150-1-55.us-west-2.compute.internaltime"":,""pid":47692023-09-22T23:21:37.711751098Z"}
13822 ,"hostname":"{"ip-10-150-1-55.us-west-2.compute.internalmsg"":,""pid":4769Set Downstairs and Upstairs active"},
13823 "v":0,"name":"crucible"{,"level":30"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level",:"30time":"2023-09-22T23:21:37.711795557Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13824 {"msg":","time":"2023-09-22T23:21:37.711807133Z"8e88d9c1-84f3-4981-affe-0c1310d04226 is now active with session: d738ea25-1afc-4d4c-9b80-99c2ada34ff7",","hostnamev""::0","name":"crucible"ip-10-150-1-55.us-west-2.compute.internal,"","level"pid"::304769}
13825 {,"time":""msg":2023-09-22T23:21:37.711837434Z"","hostname":"[2] new RM replaced this: None","v":ip-10-150-1-55.us-west-2.compute.internal0",,""namepid""::"4769crucible"},
13826 "level":40{"msg":"8e88d9c1-84f3-4981-affe-0c1310d04226 Set Active after no repair","v":0,"name":"crucible","level":30,"time,"":"time":"2023-09-22T23:21:37.711876839Z"2023-09-22T23:21:37.711866809Z",","hostname"hostname":":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",,""pidpid""::47694769}
13827 }
13828 {"msg":"{Notify all downstairs, region set compare is done.",""v"msg"::0","name":"[2] Starts reconcile loop"crucible","v":0,"name":",crucible"",level"":level30":30,"time":"2023-09-22T23:21:37.711933338Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","pid":2023-09-22T23:21:37.711934409Z"4769,"}
13829 hostname":"{ip-10-150-1-55.us-west-2.compute.internal",""pid"msg"::4769"}Set check for repair"
13830 ,"v":0,"name":"{crucible","level":30"msg":"[0] 127.0.0.1:34554 task reports connection:true","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.711976614Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13831 {"msg":,""time":"[1] 127.0.0.1:40751 task reports connection:true"2023-09-22T23:21:37.711985853Z","v",:"0,hostname""name"::""crucible","level":ip-10-150-1-55.us-west-2.compute.internal30","pid":4769}
13832 ,"{time":"2023-09-22T23:21:37.712014905Z"","msg"hostname"::""ip-10-150-1-55.us-west-2.compute.internal","8f69534d-528b-4c23-b2c9-ce2dfe832ae1 WaitQuorum WaitQuorum WaitQuorumpid"":4769,"v"}:
13833 0,"name":{"crucible"","msg"level"::"308e88d9c1-84f3-4981-affe-0c1310d04226 Active Active Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.712049847Z","time",:""hostname":"2023-09-22T23:21:37.712057162Z","ip-10-150-1-55.us-west-2.compute.internal"hostname",":"pid":4769ip-10-150-1-55.us-west-2.compute.internal"},
13834 "pid":4769}
13835 {{""msg"msg"::""Set check for repair"[0]R flush_numbers: [0, 0]",",v"":v"0:,"0name",:""name":crucible"","crucible"level",:"30level":30,"time":"2023-09-22T23:21:37.712108278Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,}"
13836 time":"{2023-09-22T23:21:37.71210977Z"",msg"":"hostname":"[2] 127.0.0.1:42674 task reports connection:true"ip-10-150-1-55.us-west-2.compute.internal",",v"":pid"0:,4769"name":"}
13837 crucible","level":30{"msg":"[0]R generation: [0, 0]","v":0,"name":","crucible"time",:""level":302023-09-22T23:21:37.712150517Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13838 {"msg":"8e88d9c1-84f3-4981-affe-0c1310d04226 Active Active Active",,""v":time"0:,""name":"crucible"2023-09-22T23:21:37.71216455Z,""level":30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13839 ,"time":"2023-09-22T23:21:37.712189007Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid"[0]R dirty: [false, false]":4769,"}v"
13840 :0,"{name":"crucible""msg",:""level":30Set check for repair","v":0,"name":"crucible","level":30Sep 22 23:21:37.712 INFO current number of open files limit 65536 is already the maximum
13841 ,","time"time":":"2023-09-22T23:21:37.712226712Z"2023-09-22T23:21:37.712221296Z",",hostname":""hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internalpid"":,"4769pid":}4769
13842 }
13843 {"msg":"{[0] received reconcile message",""v"msg"::0","name":"[1]R flush_numbers: [0, 0]"crucible",,""v"level"::030,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.712273948Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"}
13844 time":"{2023-09-22T23:21:37.712277937Z"","msg"hostname"::""[0] All repairs completed, exit"ip-10-150-1-55.us-west-2.compute.internal",",v"":pid"0:,"4769name":"}crucible"
13845 ,"level":30{"msg":"[1]R generation: [0, 0]","v":0,",name"":"time":crucible"","level":2023-09-22T23:21:37.712316923Z"30,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13846 {"msg":"[0] Starts cmd_loop","v":0,","name"time"::""crucible","2023-09-22T23:21:37.712332942Zlevel"":30,"hostname":Sep 22 23:21:37.712 INFO Created new region file "/tmp/downstairs-qImf5Xje/region.json"
13847 "ip-10-150-1-55.us-west-2.compute.internal","pid":,4769"time":}"
13848 2023-09-22T23:21:37.712354866Z","hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid"":msg":4769"}
13849 [1]R dirty: [false, false]","v{":0","msg"name"::""crucible",[1] received reconcile message"","level"v"::300,"name":"crucible","level":30,"time":","time":2023-09-22T23:21:37.712399315Z"","hostname"2023-09-22T23:21:37.712395673Z:"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769ip-10-150-1-55.us-west-2.compute.internal"},
13850 "pid":4769{}
13851 "msg":"[1] All repairs completed, exit"{,"v":0,""name"msg"::""crucible","[2]R flush_numbers: [0, 0]"level":,"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.712446684Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13852 ,"time":"{2023-09-22T23:21:37.712451625Z""msg":","hostname[1] Starts cmd_loop"":,""v":0,"ip-10-150-1-55.us-west-2.compute.internalname"":,""crucible"pid,"":level"4769:30}
13853 {,""time"msg"::""2023-09-22T23:21:37.712486074Z"[2]R generation: [0, 0]",",hostname""v":":0,"nameip-10-150-1-55.us-west-2.compute.internal"",:""crucible"pid",":level4769":}
13854 30Sep 22 23:21:37.712 DEBG Read :1000 deps:[] res:true
13855 {"msg":"[2] received reconcile message","v":0,"name":"crucible",",level":"30time":"2023-09-22T23:21:37.712517579Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13856 ,"time":"2023-09-22T23:21:37.71253251Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid[2]R dirty: [false, false]"":4769,"v"}:
13857 0,"name":"{crucible",""level"msg"::"30[2] All repairs completed, exit","v":0,"name":"crucible","level":30,"time":","time"2023-09-22T23:21:37.712566154Z:"","hostname"2023-09-22T23:21:37.712572358Z":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4769,"pid}"
13858 :4769}
13859 {{""msg"msg"::""Max found gen is 1"[2] Starts cmd_loop",",v""v"::00,","name"name:"":"crucible"crucible",,""level"level"::3030,"time":"2023-09-22T23:21:37.712623824Z","hostname":","ip-10-150-1-55.us-west-2.compute.internal"time":,""pid":47692023-09-22T23:21:37.71262379Z"}
13860 ,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
138612023-09-22T23:21:37.712ZINFOcrucible: Generation requested: 1 >= found:1
138622023-09-22T23:21:37.712ZINFOcrucible: Next flush: 1
138632023-09-22T23:21:37.712ZINFOcrucible: All extents match
138642023-09-22T23:21:37.712ZINFOcrucible: No downstairs repair required
138652023-09-22T23:21:37.712ZINFOcrucible: No initial repair work was required
138662023-09-22T23:21:37.712ZINFOcrucible: Set Downstairs and Upstairs active
13867 {"msg":"8f69534d-528b-4c23-b2c9-ce2dfe832ae1 is now active with session: 4c909eb0-0138-456e-a20b-76c3a0a91bc8","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.712904969Z"Sep 22 23:21:37.712 DEBG Read :1000 deps:[] res:true
13868 ,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
138692023-09-22T23:21:37.712ZINFOcrucible: 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 Set Active after no repair
138702023-09-22T23:21:37.712ZINFOcrucible: Notify all downstairs, region set compare is done.
138712023-09-22T23:21:37.713ZINFOcrucible: Set check for repair
138722023-09-22T23:21:37.713ZINFOcrucible: [1] 127.0.0.1:59903 task reports connection:true
138732023-09-22T23:21:37.713ZINFOcrucible: 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 Active Active Active
138742023-09-22T23:21:37.713ZINFOcrucible: Set check for repair
138752023-09-22T23:21:37.713ZINFOcrucible: [2] 127.0.0.1:36502 task reports connection:true
138762023-09-22T23:21:37.713ZINFOcrucible: 8f69534d-528b-4c23-b2c9-ce2dfe832ae1 Active Active Active
138772023-09-22T23:21:37.713ZINFOcrucible: Set check for repair
138782023-09-22T23:21:37.713ZINFOcrucible: [0] received reconcile message
13879 {"msg":"[0] All repairs completed, exit","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.713337138Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769Sep 22 23:21:37.713 DEBG Read :1000 deps:[] res:true
13880 }
138812023-09-22T23:21:37.713ZINFOcrucible: [0] Starts cmd_loop
138822023-09-22T23:21:37.713ZINFOcrucible: [1] received reconcile message
138832023-09-22T23:21:37.713ZINFOcrucible: [1] All repairs completed, exit
138842023-09-22T23:21:37.713ZINFOcrucible: [1] Starts cmd_loop
138852023-09-22T23:21:37.713ZINFOcrucible: [2] received reconcile message
138862023-09-22T23:21:37.713ZINFOcrucible: [2] All repairs completed, exit
138872023-09-22T23:21:37.713ZINFOcrucible: [2] Starts cmd_loop
138882023-09-22T23:21:37.713ZINFOcrucible: [1] downstairs client at 127.0.0.1:62341 has UUID edb26b00-76c7-4c82-b262-a6cf7181b4aa
13889 {"msg":"[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: edb26b00-76c7-4c82-b262-a6cf7181b4aa, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":"Sep 22 23:21:37.713 INFO Current flush_numbers [0..12]: [0, 0]
13890 crucible","level":30,"time":"2023-09-22T23:21:37.713741738Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
138912023-09-22T23:21:37.713ZINFOcrucible: dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 WaitActive WaitActive WaitActive
138922023-09-22T23:21:37.713ZINFOcrucible: [2] downstairs client at 127.0.0.1:51713 has UUID e80e1f8b-7f5a-43fe-b856-a2bd94bd70c3
138932023-09-22T23:21:37.713ZINFOcrucible: [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e80e1f8b-7f5a-43fe-b856-a2bd94bd70c3, encrypted: true, database_read_version: 1, database_write_version: 1 }
13894 {"msg":"dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 WaitActive WaitActive WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.713910248Z"Sep 22 23:21:37.713 INFO Downstairs has completed Negotiation, task: proc
13895 ,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13896 Sep 22 23:21:37.714 INFO Current flush_numbers [0..12]: [0, 0]
13897 Sep 22 23:21:37.714 INFO Downstairs has completed Negotiation, task: proc
13898 Sep 22 23:21:37.714 INFO Current flush_numbers [0..12]: [0, 0]
13899 Sep 22 23:21:37.714 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13900 Sep 22 23:21:37.715 INFO Downstairs has completed Negotiation, task: proc
13901 Sep 22 23:21:37.715 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
139022023-09-22T23:21:37.715ZINFOcrucible: [0] dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 (9165874c-5197-4919-861e-2430574d8700) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
13903 Sep 22 23:21:37.715 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13904 Sep 22 23:21:37.715 INFO current number of open files limit 65536 is already the maximum
13905 Sep 22 23:21:37.716 INFO Opened existing region file "/tmp/downstairs-qImf5Xje/region.json"
13906 Sep 22 23:21:37.716 INFO Database read version 1
13907 Sep 22 23:21:37.716 INFO Database write version 1
13908 Sep 22 23:21:37.716 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13909 Sep 22 23:21:37.716 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13910 Sep 22 23:21:37.717 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13911 Sep 22 23:21:37.717 DEBG Read :1001 deps:[JobId(1000)] res:true
13912 Sep 22 23:21:37.717 INFO UUID: de5d1b92-54ba-463b-93eb-b2678e77643b
13913 Sep 22 23:21:37.717 INFO Blocks per extent:5 Total Extents: 2
13914 Sep 22 23:21:37.717 INFO Crucible Version: Crucible Version: 0.0.1
13915 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
13916 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
13917 rustc: 1.70.0 stable x86_64-unknown-illumos
13918 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
13919 Sep 22 23:21:37.717 INFO Upstairs <-> Downstairs Message Version: 4, task: main
13920 Sep 22 23:21:37.717 INFO Using address: 127.0.0.1:47135, task: main
13921 Sep 22 23:21:37.717 DEBG Read :1001 deps:[JobId(1000)] res:true
13922 Sep 22 23:21:37.718 INFO Repair listens on 127.0.0.1:0, task: repair
13923 Sep 22 23:21:37.718 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52307, task: repair
13924 Sep 22 23:21:37.718 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52307, task: repair
13925 Sep 22 23:21:37.718 INFO listening, local_addr: 127.0.0.1:52307, task: repair
13926 Sep 22 23:21:37.718 DEBG Read :1001 deps:[JobId(1000)] res:true
13927 Sep 22 23:21:37.718 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52307, task: repair
13928 Sep 22 23:21:37.718 INFO Using repair address: 127.0.0.1:52307, task: main
13929 Sep 22 23:21:37.718 INFO No SSL acceptor configured, task: main
13930 Sep 22 23:21:37.718 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13931 Sep 22 23:21:37.718 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13932 Sep 22 23:21:37.718 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13933 Sep 22 23:21:37.719 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13934 Sep 22 23:21:37.719 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13935 Sep 22 23:21:37.719 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13936 Sep 22 23:21:37.719 DEBG Read :1003 deps:[JobId(1001)] res:true
13937 Sep 22 23:21:37.719 DEBG Read :1003 deps:[JobId(1001)] res:true
13938 Sep 22 23:21:37.720 DEBG Read :1003 deps:[JobId(1001)] res:true
139392023-09-22T23:21:37.720ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
139402023-09-22T23:21:37.720ZWARNcrucible: [0] new RM replaced this: None
139412023-09-22T23:21:37.720ZINFOcrucible: [0] Starts reconcile loop
139422023-09-22T23:21:37.720ZINFOcrucible: [1] dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 (9165874c-5197-4919-861e-2430574d8700) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
139432023-09-22T23:21:37.720ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
13944 {"msg":Sep 22 23:21:37.720 INFO listening on 127.0.0.1:0, task: main
13945 "[1] new RM replaced this: None","v":0,"name":"crucible","level":40,"time":"2023-09-22T23:21:37.720394909Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13946 {"msg":"[1] Starts reconcile loop","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.720436685Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769Sep 22 23:21:37.720 WARN f712941d-0577-43e4-8a2e-7814ec270c09 request to replace downstairs 127.0.0.1:46295 with 127.0.0.1:47135
13947 }
13948 {"msg":"[2] dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 (9165874c-5197-4919-861e-2430574d8700) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30Sep 22 23:21:37.720 INFO f712941d-0577-43e4-8a2e-7814ec270c09 found old target: 127.0.0.1:46295 at 0
13949 ,"time":"2023-09-22T23:21:37.720489466Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13950 Sep 22 23:21:37.720 INFO f712941d-0577-43e4-8a2e-7814ec270c09 replacing old: 127.0.0.1:46295 at 0
13951 {"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30Sep 22 23:21:37.720 INFO [0] client skip 1 in process jobs because fault, : downstairs
13952 ,"time":"2023-09-22T23:21:37.72053247Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13953 {"msg":"[2] new RM replaced this: None","v":0,"name":"crucible","level":40Sep 22 23:21:37.720 INFO [0] changed 1 jobs to fault skipped, : downstairs
13954 ,"time":"2023-09-22T23:21:37.720575984Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
13955 {"msg":"Sep 22 23:21:37.720 INFO [0] f712941d-0577-43e4-8a2e-7814ec270c09 (512f09eb-7e91-421e-9fa9-f1bb0acbe6ae) Active Active Active ds_transition to Replacing
13956 [2] Starts reconcile loop","v":0,"name":"crucible","level":30Sep 22 23:21:37.720 INFO [0] Transition from Active to Replacing
13957 ,"time":"2023-09-22T23:21:37.720621021Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
139582023-09-22T23:21:37.720ZINFOcrucible: [0] 127.0.0.1:63497 task reports connection:true
139592023-09-22T23:21:37.720ZINFOcrucible: dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 WaitQuorum WaitQuorum WaitQuorum
139602023-09-22T23:21:37.720ZINFOcrucible: [0]R flush_numbers: [0, 0]
139612023-09-22T23:21:37.720ZINFOcrucible: [0]R generation: [0, 0]
139622023-09-22T23:21:37.720ZINFOcrucible: [0]R dirty: [false, false]
139632023-09-22T23:21:37.720ZINFOcrucible: [1]R flush_numbers: [0, 0]
139642023-09-22T23:21:37.720ZINFOcrucible: [1]R generation: [0, 0]
139652023-09-22T23:21:37.720ZINFOcrucible: [1]R dirty: [false, false]
13966 The guest has finished waiting for activation
139672023-09-22T23:21:37.720ZINFOcrucible: [2]R flush_numbers: [0, 0]
139682023-09-22T23:21:37.721ZINFOcrucible: [2]R generation: [0, 0]
139692023-09-22T23:21:37.721ZINFOcrucible: [2]R dirty: [false, false]
139702023-09-22T23:21:37.721ZINFOcrucible: Max found gen is 1
139712023-09-22T23:21:37.721ZINFOcrucible: Generation requested: 1 >= found:1
139722023-09-22T23:21:37.721ZINFOcrucible: Next flush: 1
139732023-09-22T23:21:37.721ZINFOcrucible: All extents match
139742023-09-22T23:21:37.721ZINFOcrucible: No downstairs repair required
139752023-09-22T23:21:37.721ZINFOcrucible: No initial repair work was required
139762023-09-22T23:21:37.721ZINFOcrucible: Set Downstairs and Upstairs active
139772023-09-22T23:21:37.721ZINFOcrucible: dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 is now active with session: 9165874c-5197-4919-861e-2430574d8700
139782023-09-22T23:21:37.721ZINFOcrucible: dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 Set Active after no repair
13979 {"msg":"Notify all downstairs, region set compare is done.","v":0,"name":"crucible","level":30,"time":"Sep 22 23:21:37.721 DEBG Write :1000 deps:[] res:true
13980 2023-09-22T23:21:37.721451796Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
139812023-09-22T23:21:37.721ZINFOcrucible: Set check for repair
139822023-09-22T23:21:37.721ZINFOcrucible: [1] 127.0.0.1:62341 task reports connection:true
139832023-09-22T23:21:37.721ZINFOcrucible: dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 Active Active Active
139842023-09-22T23:21:37.721ZINFOcrucible: Set check for repair
139852023-09-22T23:21:37.721ZINFOcrucible: [2] 127.0.0.1:51713 task reports connection:true
139862023-09-22T23:21:37.721ZINFOcrucible: dbfe1e65-b9a0-4fe4-9ed9-1dc6d4d66db9 Active Active Active
139872023-09-22T23:21:37.721ZINFOcrucible: Set check for repair
139882023-09-22T23:21:37.721ZINFOcrucible: [0] received reconcile message
139892023-09-22T23:21:37.721ZINFOcrucible: [0] All repairs completed, exit
139902023-09-22T23:21:37.721ZINFOcrucible: [0] Starts cmd_loop
139912023-09-22T23:21:37.721ZINFOcrucible: [1] received reconcile message
139922023-09-22T23:21:37.721ZINFOcrucible: [1] All repairs completed, exit
139932023-09-22T23:21:37.721ZINFOcrucible: [1] Starts cmd_loop
139942023-09-22T23:21:37.721ZINFOcrucible: [2] received reconcile message
139952023-09-22T23:21:37.722ZINFOcrucible: [2] All repairs completed, exit
139962023-09-22T23:21:37.722ZINFOcrucible: [2] Starts cmd_loop
13997 Sep 22 23:21:37.722 DEBG Write :1000 deps:[] res:true
13998 Sep 22 23:21:37.722 DEBG Write :1001 deps:[JobId(1000)] res:true
13999 Sep 22 23:21:37.723 DEBG Write :1000 deps:[] res:true
14000 Sep 22 23:21:37.723 WARN [0] f712941d-0577-43e4-8a2e-7814ec270c09 WARNING finish job 1000 when downstairs state:Replacing
14001 Sep 22 23:21:37.723 WARN [0] Dropping already skipped job 1000, : downstairs
14002 Sep 22 23:21:37.723 WARN [0] will exit pm_task, this downstairs Replacing
14003 Sep 22 23:21:37.723 DEBG up_ds_listen was notified
14004 Sep 22 23:21:37.723 DEBG up_ds_listen checked 0 jobs, back to waiting
14005 Sep 22 23:21:37.724 ERRO 127.0.0.1:46295: proc: [0] client work task ended, Ok(Err([0] This downstairs now in Replacing)), so we end too, looper: 0
14006 Sep 22 23:21:37.724 INFO [0] f712941d-0577-43e4-8a2e-7814ec270c09 Gone missing, transition from Replacing to Replaced
14007 Sep 22 23:21:37.724 INFO [0] f712941d-0577-43e4-8a2e-7814ec270c09 connection to 127.0.0.1:46295 closed, looper: 0
14008 Sep 22 23:21:37.724 INFO [0] 127.0.0.1:46295 task reports connection:false
14009 Sep 22 23:21:37.724 INFO f712941d-0577-43e4-8a2e-7814ec270c09 Replaced Active Active
14010 Sep 22 23:21:37.724 INFO [0] 127.0.0.1:46295 task reports offline
14011 Sep 22 23:21:37.724 WARN upstairs UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } disconnected, 0 jobs left, task: main
14012 Sep 22 23:21:37.724 WARN upstairs UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } was previously active, clearing, task: main
14013 Sep 22 23:21:37.724 INFO connection (127.0.0.1:40703): all done
14014 Sep 22 23:21:37.726 DEBG Write :1001 deps:[JobId(1000)] res:true
14015 test test::integration_test_guest_downstairs_unwritten_sparse_end ... ok
14016 test test::integration_test_guest_downstairs_unwritten_span ... ok
14017 Sep 22 23:21:37.728 DEBG Write :1001 deps:[JobId(1000)] res:true
14018 test test::integration_test_guest_downstairs_unwritten_sparse_1 ... ok
14019 Sep 22 23:21:37.728 INFO current number of open files limit 65536 is already the maximum
14020 Sep 22 23:21:37.728 INFO current number of open files limit 65536 is already the maximum
14021 Sep 22 23:21:37.728 INFO current number of open files limit 65536 is already the maximum
14022 test test::integration_test_guest_downstairs_unwritten_sparse_mid ... ok
14023 Sep 22 23:21:37.728 INFO Created new region file "/tmp/downstairs-vIWrHsDo/region.json"
14024 Sep 22 23:21:37.728 INFO Created new region file "/tmp/downstairs-GwPUeOkX/region.json"
14025 Sep 22 23:21:37.728 INFO Created new region file "/tmp/downstairs-JXdGfx2t/region.json"
14026 Sep 22 23:21:37.729 INFO current number of open files limit 65536 is already the maximum
14027 Sep 22 23:21:37.729 INFO Created new region file "/tmp/downstairs-7LycXkyy/region.json"
14028 Sep 22 23:21:37.729 DEBG Read :1002 deps:[JobId(1001)] res:true
14029 Sep 22 23:21:37.729 DEBG Read :1002 deps:[JobId(1001)] res:true
14030 Sep 22 23:21:37.730 DEBG Read :1002 deps:[JobId(1001)] res:true
14031 Sep 22 23:21:37.731 INFO current number of open files limit 65536 is already the maximum
14032 Sep 22 23:21:37.731 INFO Opened existing region file "/tmp/downstairs-GwPUeOkX/region.json"
14033 Sep 22 23:21:37.731 INFO Database read version 1
14034 Sep 22 23:21:37.731 INFO Database write version 1
14035 Sep 22 23:21:37.732 INFO current number of open files limit 65536 is already the maximum
14036 Sep 22 23:21:37.732 INFO Opened existing region file "/tmp/downstairs-vIWrHsDo/region.json"
14037 Sep 22 23:21:37.732 INFO Database read version 1
14038 Sep 22 23:21:37.732 INFO Database write version 1
14039 Sep 22 23:21:37.732 INFO current number of open files limit 65536 is already the maximum
14040 Sep 22 23:21:37.732 INFO Opened existing region file "/tmp/downstairs-JXdGfx2t/region.json"
14041 Sep 22 23:21:37.732 INFO Database read version 1
14042 Sep 22 23:21:37.732 INFO Database write version 1
14043 Sep 22 23:21:37.732 INFO UUID: 4a9c9bb9-ba03-459d-9045-08a986b4daa9
14044 Sep 22 23:21:37.732 INFO Blocks per extent:5 Total Extents: 2
14045 Sep 22 23:21:37.733 INFO Crucible Version: Crucible Version: 0.0.1
14046 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14047 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14048 rustc: 1.70.0 stable x86_64-unknown-illumos
14049 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14050 Sep 22 23:21:37.733 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14051 Sep 22 23:21:37.733 INFO Using address: 127.0.0.1:44443, task: main
14052 Sep 22 23:21:37.733 INFO UUID: 1108770c-6fd1-4459-a60b-614b367fa7d4
14053 Sep 22 23:21:37.733 INFO Blocks per extent:5 Total Extents: 2
14054 Sep 22 23:21:37.733 INFO current number of open files limit 65536 is already the maximum
14055 Sep 22 23:21:37.733 INFO Crucible Version: Crucible Version: 0.0.1
14056 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14057 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14058 rustc: 1.70.0 stable x86_64-unknown-illumos
14059 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14060 Sep 22 23:21:37.733 INFO Opened existing region file "/tmp/downstairs-7LycXkyy/region.json"
14061 Sep 22 23:21:37.733 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14062 Sep 22 23:21:37.733 INFO Database read version 1
14063 Sep 22 23:21:37.733 INFO Using address: 127.0.0.1:45992, task: main
14064 Sep 22 23:21:37.733 INFO Database write version 1
14065 Sep 22 23:21:37.733 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
14066 Sep 22 23:21:37.733 INFO UUID: 96640be5-25af-4671-856f-955e28c0fd1e
14067 Sep 22 23:21:37.733 INFO Repair listens on 127.0.0.1:0, task: repair
14068 Sep 22 23:21:37.733 INFO Blocks per extent:5 Total Extents: 2
14069 Sep 22 23:21:37.733 INFO Repair listens on 127.0.0.1:0, task: repair
14070 Sep 22 23:21:37.733 INFO Crucible Version: Crucible Version: 0.0.1
14071 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14072 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14073 rustc: 1.70.0 stable x86_64-unknown-illumos
14074 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14075 Sep 22 23:21:37.733 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:37497, task: repair
14076 Sep 22 23:21:37.733 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14077 Sep 22 23:21:37.733 INFO Using address: 127.0.0.1:63880, task: main
14078 Sep 22 23:21:37.733 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:37497, task: repair
14079 Sep 22 23:21:37.733 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58240, task: repair
14080 Sep 22 23:21:37.733 INFO listening, local_addr: 127.0.0.1:37497, task: repair
14081 Sep 22 23:21:37.733 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58240, task: repair
14082 Sep 22 23:21:37.733 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
14083 Sep 22 23:21:37.733 INFO listening, local_addr: 127.0.0.1:58240, task: repair
14084 Sep 22 23:21:37.733 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58240, task: repair
14085 Sep 22 23:21:37.733 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:37497, task: repair
14086 Sep 22 23:21:37.733 INFO Using repair address: 127.0.0.1:58240, task: main
14087 Sep 22 23:21:37.733 INFO No SSL acceptor configured, task: main
14088 Sep 22 23:21:37.733 INFO Using repair address: 127.0.0.1:37497, task: main
14089 Sep 22 23:21:37.733 INFO No SSL acceptor configured, task: main
14090 Sep 22 23:21:37.733 INFO Repair listens on 127.0.0.1:0, task: repair
14091 Sep 22 23:21:37.733 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
14092 Sep 22 23:21:37.734 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:61212, task: repair
14093 Sep 22 23:21:37.734 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:61212, task: repair
14094 Sep 22 23:21:37.734 INFO current number of open files limit 65536 is already the maximum
14095 Sep 22 23:21:37.734 INFO listening, local_addr: 127.0.0.1:61212, task: repair
14096 Sep 22 23:21:37.734 INFO UUID: c0634833-e1bc-4dc4-a207-c663e8474e16
14097 Sep 22 23:21:37.734 INFO Blocks per extent:5 Total Extents: 2
14098 Sep 22 23:21:37.734 INFO current number of open files limit 65536 is already the maximum
14099 Sep 22 23:21:37.734 INFO Created new region file "/tmp/downstairs-wFpZaG9t/region.json"
14100 Sep 22 23:21:37.734 INFO Crucible Version: Crucible Version: 0.0.1
14101 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14102 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14103 rustc: 1.70.0 stable x86_64-unknown-illumos
14104 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14105 Sep 22 23:21:37.734 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14106 Sep 22 23:21:37.734 INFO Using address: 127.0.0.1:35164, task: main
14107 Sep 22 23:21:37.734 INFO Created new region file "/tmp/downstairs-ccMKGrRU/region.json"
14108 Sep 22 23:21:37.734 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:61212, task: repair
14109 Sep 22 23:21:37.734 INFO Using repair address: 127.0.0.1:61212, task: main
14110 Sep 22 23:21:37.734 INFO No SSL acceptor configured, task: main
14111 Sep 22 23:21:37.734 INFO current number of open files limit 65536 is already the maximum
14112 Sep 22 23:21:37.734 INFO Repair listens on 127.0.0.1:0, task: repair
14113 Sep 22 23:21:37.734 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53445, task: repair
14114 Sep 22 23:21:37.734 INFO Created new region file "/tmp/downstairs-URcgTXmZ/region.json"
14115 Sep 22 23:21:37.734 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53445, task: repair
14116 Sep 22 23:21:37.734 INFO listening, local_addr: 127.0.0.1:53445, task: repair
14117 Sep 22 23:21:37.734 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53445, task: repair
14118 Sep 22 23:21:37.734 INFO Using repair address: 127.0.0.1:53445, task: main
14119 Sep 22 23:21:37.734 INFO No SSL acceptor configured, task: main
14120 Sep 22 23:21:37.735 INFO current number of open files limit 65536 is already the maximum
14121 Sep 22 23:21:37.735 INFO Created new region file "/tmp/downstairs-DB2vwjG7/region.json"
14122 Sep 22 23:21:37.735 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
14123 Sep 22 23:21:37.736 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
14124 Sep 22 23:21:37.736 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
14125 Sep 22 23:21:37.736 INFO current number of open files limit 65536 is already the maximum
14126 Sep 22 23:21:37.736 INFO Opened existing region file "/tmp/downstairs-wFpZaG9t/region.json"
14127 Sep 22 23:21:37.736 INFO Database read version 1
14128 Sep 22 23:21:37.736 INFO Database write version 1
14129 Sep 22 23:21:37.737 INFO current number of open files limit 65536 is already the maximum
14130 Sep 22 23:21:37.737 INFO Opened existing region file "/tmp/downstairs-ccMKGrRU/region.json"
14131 Sep 22 23:21:37.737 INFO Database read version 1
14132 Sep 22 23:21:37.737 INFO Database write version 1
14133 Sep 22 23:21:37.737 INFO current number of open files limit 65536 is already the maximum
14134 Sep 22 23:21:37.737 INFO Opened existing region file "/tmp/downstairs-URcgTXmZ/region.json"
14135 Sep 22 23:21:37.737 INFO Database read version 1
14136 Sep 22 23:21:37.737 INFO Database write version 1
14137 Sep 22 23:21:37.738 INFO UUID: 5b712e05-efd4-443b-8e3d-e394f3713c4b
14138 Sep 22 23:21:37.738 INFO Blocks per extent:5 Total Extents: 2
14139 Sep 22 23:21:37.738 INFO Crucible Version: Crucible Version: 0.0.1
14140 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14141 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14142 rustc: 1.70.0 stable x86_64-unknown-illumos
14143 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14144 Sep 22 23:21:37.738 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14145 Sep 22 23:21:37.738 INFO Using address: 127.0.0.1:57990, task: main
14146 Sep 22 23:21:37.738 INFO Repair listens on 127.0.0.1:0, task: repair
14147 Sep 22 23:21:37.738 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58749, task: repair
14148 Sep 22 23:21:37.738 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58749, task: repair
14149 Sep 22 23:21:37.738 INFO listening, local_addr: 127.0.0.1:58749, task: repair
14150 Sep 22 23:21:37.738 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58749, task: repair
14151 Sep 22 23:21:37.738 INFO Using repair address: 127.0.0.1:58749, task: main
14152 Sep 22 23:21:37.738 INFO No SSL acceptor configured, task: main
14153 Sep 22 23:21:37.739 INFO current number of open files limit 65536 is already the maximum
14154 Sep 22 23:21:37.739 INFO Opened existing region file "/tmp/downstairs-DB2vwjG7/region.json"
14155 Sep 22 23:21:37.739 INFO Database read version 1
14156 Sep 22 23:21:37.739 INFO Database write version 1
14157 Sep 22 23:21:37.739 INFO current number of open files limit 65536 is already the maximum
14158 Sep 22 23:21:37.739 INFO Created new region file "/tmp/downstairs-qX3gXVLO/region.json"
14159 Sep 22 23:21:37.739 INFO UUID: 2d0b2cb1-76dc-4a6c-9791-1330cfcd6cae
14160 Sep 22 23:21:37.739 INFO Blocks per extent:5 Total Extents: 2
14161 Sep 22 23:21:37.739 INFO UUID: 854ff323-c929-4b97-8826-1ca400654147
14162 Sep 22 23:21:37.739 INFO Blocks per extent:5 Total Extents: 2
14163 Sep 22 23:21:37.739 INFO Crucible Version: Crucible Version: 0.0.1
14164 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14165 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14166 rustc: 1.70.0 stable x86_64-unknown-illumos
14167 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14168 Sep 22 23:21:37.739 INFO Crucible Version: Crucible Version: 0.0.1
14169 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14170 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14171 rustc: 1.70.0 stable x86_64-unknown-illumos
14172 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14173 Sep 22 23:21:37.739 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14174 Sep 22 23:21:37.739 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14175 Sep 22 23:21:37.739 INFO Using address: 127.0.0.1:60947, task: main
14176 Sep 22 23:21:37.739 INFO Using address: 127.0.0.1:57484, task: main
14177 Sep 22 23:21:37.740 INFO Repair listens on 127.0.0.1:0, task: repair
14178 Sep 22 23:21:37.740 INFO Repair listens on 127.0.0.1:0, task: repair
14179 Sep 22 23:21:37.740 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52396, task: repair
14180 Sep 22 23:21:37.740 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57392, task: repair
14181 Sep 22 23:21:37.740 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57392, task: repair
14182 Sep 22 23:21:37.740 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52396, task: repair
14183 Sep 22 23:21:37.740 INFO listening, local_addr: 127.0.0.1:52396, task: repair
14184 Sep 22 23:21:37.740 INFO listening, local_addr: 127.0.0.1:57392, task: repair
14185 Sep 22 23:21:37.740 INFO UUID: 9aaa216b-96cd-4a39-8952-f14cf6369014
14186 test test::integration_test_guest_downstairs_unwritten_span_2 ... ok
14187 Sep 22 23:21:37.740 INFO Blocks per extent:5 Total Extents: 2
14188 Sep 22 23:21:37.740 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52396, task: repair
14189 Sep 22 23:21:37.740 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57392, task: repair
14190 Sep 22 23:21:37.740 INFO Using repair address: 127.0.0.1:57392, task: main
14191 Sep 22 23:21:37.740 INFO Using repair address: 127.0.0.1:52396, task: main
14192 Sep 22 23:21:37.740 INFO No SSL acceptor configured, task: main
14193 Sep 22 23:21:37.740 INFO No SSL acceptor configured, task: main
14194 Sep 22 23:21:37.740 INFO Crucible Version: Crucible Version: 0.0.1
14195 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14196 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14197 rustc: 1.70.0 stable x86_64-unknown-illumos
14198 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14199 Sep 22 23:21:37.740 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14200 Sep 22 23:21:37.740 INFO Using address: 127.0.0.1:62068, task: main
14201 Sep 22 23:21:37.741 INFO Repair listens on 127.0.0.1:0, task: repair
14202 Sep 22 23:21:37.741 INFO current number of open files limit 65536 is already the maximum
14203 Sep 22 23:21:37.741 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:54442, task: repair
14204 Sep 22 23:21:37.741 INFO current number of open files limit 65536 is already the maximum
14205 Sep 22 23:21:37.741 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:54442, task: repair
14206 Sep 22 23:21:37.741 INFO Created new region file "/tmp/downstairs-K70Wpcfy/region.json"
14207 Sep 22 23:21:37.741 INFO current number of open files limit 65536 is already the maximum
14208 Sep 22 23:21:37.741 INFO listening, local_addr: 127.0.0.1:54442, task: repair
14209 Sep 22 23:21:37.741 INFO Created new region file "/tmp/downstairs-wR25ZUX2/region.json"
14210 Sep 22 23:21:37.741 INFO Created new region file "/tmp/downstairs-VaaXtxW5/region.json"
14211 Sep 22 23:21:37.741 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:54442, task: repair
14212 Sep 22 23:21:37.741 INFO Using repair address: 127.0.0.1:54442, task: main
14213 Sep 22 23:21:37.741 INFO No SSL acceptor configured, task: main
14214 Sep 22 23:21:37.742 INFO current number of open files limit 65536 is already the maximum
14215 Sep 22 23:21:37.742 INFO current number of open files limit 65536 is already the maximum
14216 Sep 22 23:21:37.742 INFO Opened existing region file "/tmp/downstairs-qX3gXVLO/region.json"
14217 Sep 22 23:21:37.742 INFO Created new region file "/tmp/downstairs-nn01FdgV/region.json"
14218 Sep 22 23:21:37.742 INFO Database read version 1
14219 Sep 22 23:21:37.742 INFO Database write version 1
14220 Sep 22 23:21:37.743 INFO UUID: 890345ba-e71f-4b71-b001-a87d38d96b67
14221 Sep 22 23:21:37.743 INFO Blocks per extent:5 Total Extents: 2
14222 Sep 22 23:21:37.743 INFO Crucible Version: Crucible Version: 0.0.1
14223 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14224 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14225 rustc: 1.70.0 stable x86_64-unknown-illumos
14226 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14227 Sep 22 23:21:37.743 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14228 Sep 22 23:21:37.743 INFO Using address: 127.0.0.1:35001, task: main
14229 Sep 22 23:21:37.743 INFO Repair listens on 127.0.0.1:0, task: repair
14230 test test::integration_test_guest_downstairs ... ok
14231 Sep 22 23:21:37.743 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:54304, task: repair
14232 Sep 22 23:21:37.743 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:54304, task: repair
14233 Sep 22 23:21:37.744 INFO listening, local_addr: 127.0.0.1:54304, task: repair
14234 Sep 22 23:21:37.744 INFO current number of open files limit 65536 is already the maximum
14235 Sep 22 23:21:37.744 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:54304, task: repair
14236 Sep 22 23:21:37.744 INFO Using repair address: 127.0.0.1:54304, task: main
14237 Sep 22 23:21:37.744 INFO No SSL acceptor configured, task: main
14238 Sep 22 23:21:37.744 INFO Created new region file "/tmp/downstairs-8BOPdUGn/region.json"
14239 Sep 22 23:21:37.745 INFO Upstairs starts
14240 Sep 22 23:21:37.745 INFO Crucible Version: BuildInfo {
14241 version: "0.0.1",
14242 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
14243 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
14244 git_branch: "main",
14245 rustc_semver: "1.70.0",
14246 rustc_channel: "stable",
14247 rustc_host_triple: "x86_64-unknown-illumos",
14248 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
14249 cargo_triple: "x86_64-unknown-illumos",
14250 debug: true,
14251 opt_level: 0,
14252 }
14253 Sep 22 23:21:37.745 INFO Upstairs <-> Downstairs Message Version: 4
14254 Sep 22 23:21:37.745 INFO Crucible stats registered with UUID: 7743f09f-f465-4fba-892d-457fc58d9abf
14255 Sep 22 23:21:37.745 INFO Crucible 7743f09f-f465-4fba-892d-457fc58d9abf has session id: 9ebddc35-6adf-4dff-a4c5-9317a93426bd
14256 The guest has requested activation
14257 Sep 22 23:21:37.745 INFO listening on 127.0.0.1:0, task: main
14258 Sep 22 23:21:37.745 INFO listening on 127.0.0.1:0, task: main
14259 Sep 22 23:21:37.745 INFO current number of open files limit 65536 is already the maximum
14260 Sep 22 23:21:37.745 INFO Opened existing region file "/tmp/downstairs-K70Wpcfy/region.json"
14261 Sep 22 23:21:37.745 INFO Database read version 1
14262 Sep 22 23:21:37.745 INFO listening on 127.0.0.1:0, task: main
14263 Sep 22 23:21:37.745 INFO Database write version 1
14264 Sep 22 23:21:37.745 INFO [0] connecting to 127.0.0.1:45992, looper: 0
14265 Sep 22 23:21:37.745 INFO current number of open files limit 65536 is already the maximum
14266 Sep 22 23:21:37.745 INFO Opened existing region file "/tmp/downstairs-VaaXtxW5/region.json"
14267 Sep 22 23:21:37.745 INFO Database read version 1
14268 Sep 22 23:21:37.745 INFO Database write version 1
14269 Sep 22 23:21:37.745 INFO [1] connecting to 127.0.0.1:57990, looper: 1
14270 Sep 22 23:21:37.745 INFO current number of open files limit 65536 is already the maximum
14271 Sep 22 23:21:37.745 INFO Opened existing region file "/tmp/downstairs-wR25ZUX2/region.json"
14272 Sep 22 23:21:37.745 INFO Database read version 1
14273 Sep 22 23:21:37.745 INFO [2] connecting to 127.0.0.1:35001, looper: 2
14274 Sep 22 23:21:37.745 INFO Database write version 1
14275 Sep 22 23:21:37.745 INFO up_listen starts, task: up_listen
14276 Sep 22 23:21:37.745 INFO Wait for all three downstairs to come online
14277 Sep 22 23:21:37.745 INFO Flush timeout: 0.5
14278 Sep 22 23:21:37.745 INFO current number of open files limit 65536 is already the maximum
14279 Sep 22 23:21:37.746 INFO 7743f09f-f465-4fba-892d-457fc58d9abf active request set
14280 Sep 22 23:21:37.746 INFO Opened existing region file "/tmp/downstairs-nn01FdgV/region.json"
14281 Sep 22 23:21:37.746 INFO Database read version 1
14282 Sep 22 23:21:37.746 INFO Database write version 1
14283 Sep 22 23:21:37.746 INFO accepted connection from 127.0.0.1:62688, task: main
14284 Sep 22 23:21:37.746 INFO accepted connection from 127.0.0.1:38002, task: main
14285 Sep 22 23:21:37.746 INFO accepted connection from 127.0.0.1:38970, task: main
14286 Sep 22 23:21:37.746 INFO [0] 7743f09f-f465-4fba-892d-457fc58d9abf looper connected, looper: 0
14287 Sep 22 23:21:37.746 INFO [0] Proc runs for 127.0.0.1:45992 in state New
14288 Sep 22 23:21:37.746 INFO UUID: 9ea11016-ca01-4f8d-8100-108f556909ca
14289 Sep 22 23:21:37.746 INFO Blocks per extent:5 Total Extents: 2
14290 Sep 22 23:21:37.746 INFO Crucible Version: Crucible Version: 0.0.1
14291 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14292 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14293 rustc: 1.70.0 stable x86_64-unknown-illumos
14294 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14295 Sep 22 23:21:37.746 INFO [1] 7743f09f-f465-4fba-892d-457fc58d9abf looper connected, looper: 1
14296 Sep 22 23:21:37.746 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14297 Sep 22 23:21:37.746 INFO Using address: 127.0.0.1:54777, task: main
14298 Sep 22 23:21:37.746 INFO [1] Proc runs for 127.0.0.1:57990 in state New
14299 Sep 22 23:21:37.746 INFO [2] 7743f09f-f465-4fba-892d-457fc58d9abf looper connected, looper: 2
14300 Sep 22 23:21:37.746 INFO [2] Proc runs for 127.0.0.1:35001 in state New
14301 Sep 22 23:21:37.746 INFO UUID: 62fc2f22-4d19-4964-a68a-fe053dd165a7
14302 Sep 22 23:21:37.747 INFO Blocks per extent:5 Total Extents: 2
14303 Sep 22 23:21:37.747 INFO Repair listens on 127.0.0.1:0, task: repair
14304 Sep 22 23:21:37.747 INFO UUID: 69d03179-d586-4376-83f6-52e7dee79d82
14305 Sep 22 23:21:37.747 INFO Crucible Version: Crucible Version: 0.0.1
14306 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14307 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14308 rustc: 1.70.0 stable x86_64-unknown-illumos
14309 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14310 Sep 22 23:21:37.747 INFO UUID: a32c7e82-e087-4524-b977-a5b003758c5f
14311 Sep 22 23:21:37.747 INFO Blocks per extent:5 Total Extents: 2
14312 Sep 22 23:21:37.747 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14313 Sep 22 23:21:37.747 INFO Connection request from 7743f09f-f465-4fba-892d-457fc58d9abf with version 4, task: proc
14314 Sep 22 23:21:37.747 INFO Blocks per extent:5 Total Extents: 2
14315 Sep 22 23:21:37.747 INFO Using address: 127.0.0.1:51595, task: main
14316 Sep 22 23:21:37.747 INFO upstairs UpstairsConnection { upstairs_id: 7743f09f-f465-4fba-892d-457fc58d9abf, session_id: 3175f1fa-0abb-4faa-9a83-0df20993aaa9, gen: 1 } connected, version 4, task: proc
14317 Sep 22 23:21:37.747 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58414, task: repair
14318 Sep 22 23:21:37.747 INFO Crucible Version: Crucible Version: 0.0.1
14319 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14320 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14321 rustc: 1.70.0 stable x86_64-unknown-illumos
14322 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14323 Sep 22 23:21:37.747 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58414, task: repair
14324 Sep 22 23:21:37.747 INFO Crucible Version: Crucible Version: 0.0.1
14325 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14326 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14327 rustc: 1.70.0 stable x86_64-unknown-illumos
14328 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14329 Sep 22 23:21:37.747 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14330 Sep 22 23:21:37.747 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14331 Sep 22 23:21:37.747 INFO Using address: 127.0.0.1:42987, task: main
14332 Sep 22 23:21:37.747 INFO listening, local_addr: 127.0.0.1:58414, task: repair
14333 Sep 22 23:21:37.747 INFO Using address: 127.0.0.1:35178, task: main
14334 Sep 22 23:21:37.747 INFO Connection request from 7743f09f-f465-4fba-892d-457fc58d9abf with version 4, task: proc
14335 Sep 22 23:21:37.747 INFO upstairs UpstairsConnection { upstairs_id: 7743f09f-f465-4fba-892d-457fc58d9abf, session_id: 3175f1fa-0abb-4faa-9a83-0df20993aaa9, gen: 1 } connected, version 4, task: proc
14336 Sep 22 23:21:37.747 INFO Connection request from 7743f09f-f465-4fba-892d-457fc58d9abf with version 4, task: proc
14337 Sep 22 23:21:37.747 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58414, task: repair
14338 Sep 22 23:21:37.747 INFO upstairs UpstairsConnection { upstairs_id: 7743f09f-f465-4fba-892d-457fc58d9abf, session_id: 3175f1fa-0abb-4faa-9a83-0df20993aaa9, gen: 1 } connected, version 4, task: proc
14339 Sep 22 23:21:37.747 INFO Using repair address: 127.0.0.1:58414, task: main
14340 Sep 22 23:21:37.747 INFO No SSL acceptor configured, task: main
14341 Sep 22 23:21:37.747 INFO Repair listens on 127.0.0.1:0, task: repair
14342 Sep 22 23:21:37.747 INFO current number of open files limit 65536 is already the maximum
14343 Sep 22 23:21:37.747 INFO Opened existing region file "/tmp/downstairs-8BOPdUGn/region.json"
14344 Sep 22 23:21:37.747 INFO Database read version 1
14345 Sep 22 23:21:37.747 INFO Database write version 1
14346 Sep 22 23:21:37.747 INFO Repair listens on 127.0.0.1:0, task: repair
14347 Sep 22 23:21:37.747 INFO Repair listens on 127.0.0.1:0, task: repair
14348 Sep 22 23:21:37.747 INFO [0] 7743f09f-f465-4fba-892d-457fc58d9abf (3175f1fa-0abb-4faa-9a83-0df20993aaa9) New New New ds_transition to WaitActive
14349 Sep 22 23:21:37.747 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:35253, task: repair
14350 Sep 22 23:21:37.747 INFO [0] Transition from New to WaitActive
14351 Sep 22 23:21:37.747 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:35253, task: repair
14352 Sep 22 23:21:37.747 INFO [0] client is_active_req TRUE, promote! session 3175f1fa-0abb-4faa-9a83-0df20993aaa9
14353 Sep 22 23:21:37.747 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:61848, task: repair
14354 Sep 22 23:21:37.747 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:42081, task: repair
14355 Sep 22 23:21:37.747 INFO listening, local_addr: 127.0.0.1:35253, task: repair
14356 Sep 22 23:21:37.747 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:61848, task: repair
14357 Sep 22 23:21:37.747 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:42081, task: repair
14358 Sep 22 23:21:37.747 INFO listening, local_addr: 127.0.0.1:61848, task: repair
14359 Sep 22 23:21:37.747 INFO listening, local_addr: 127.0.0.1:42081, task: repair
14360 Sep 22 23:21:37.747 INFO [1] 7743f09f-f465-4fba-892d-457fc58d9abf (3175f1fa-0abb-4faa-9a83-0df20993aaa9) WaitActive New New ds_transition to WaitActive
14361 Sep 22 23:21:37.747 INFO [1] Transition from New to WaitActive
14362 Sep 22 23:21:37.747 INFO [1] client is_active_req TRUE, promote! session 3175f1fa-0abb-4faa-9a83-0df20993aaa9
14363 Sep 22 23:21:37.747 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:35253, task: repair
14364 Sep 22 23:21:37.747 INFO [2] 7743f09f-f465-4fba-892d-457fc58d9abf (3175f1fa-0abb-4faa-9a83-0df20993aaa9) WaitActive WaitActive New ds_transition to WaitActive
14365 Sep 22 23:21:37.747 INFO Using repair address: 127.0.0.1:35253, task: main
14366 Sep 22 23:21:37.747 INFO [2] Transition from New to WaitActive
14367 Sep 22 23:21:37.747 INFO No SSL acceptor configured, task: main
14368 Sep 22 23:21:37.747 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:61848, task: repair
14369 Sep 22 23:21:37.747 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:42081, task: repair
14370 Sep 22 23:21:37.747 INFO [2] client is_active_req TRUE, promote! session 3175f1fa-0abb-4faa-9a83-0df20993aaa9
14371 Sep 22 23:21:37.747 INFO Using repair address: 127.0.0.1:42081, task: main
14372 Sep 22 23:21:37.747 INFO Using repair address: 127.0.0.1:61848, task: main
14373 Sep 22 23:21:37.747 INFO No SSL acceptor configured, task: main
14374 Sep 22 23:21:37.747 INFO No SSL acceptor configured, task: main
14375 Sep 22 23:21:37.748 INFO UpstairsConnection { upstairs_id: 7743f09f-f465-4fba-892d-457fc58d9abf, session_id: 3175f1fa-0abb-4faa-9a83-0df20993aaa9, gen: 1 } is now active (read-write)
143762023-09-22T23:21:37.748ZINFOcrucible: Upstairs starts
14377 {"msg":"Crucible Version: BuildInfo {\n version: \"0.0.1\",\n git_sha: \"ed48f294784d46ea7d4bb99336918b74358eca46\",\n git_commit_timestamp: \"2023-09-22T22:51:18.000000000Z\",\n git_branch: \"main\",\n rustc_semver: \"1.70.0\",\n rustc_channel: \"stable\",\n rustc_host_triple: \"x86_64-unknown-illumos\",\n rustc_commit_sha: \"90c541806f23a127002de5b4038be731ba1458ca\",\n cargo_triple: \"x86_64-unknown-illumos\",\n debug: true,\n opt_level: 0,\n}","v":0Sep 22 23:21:37.748 INFO UpstairsConnection { upstairs_id: 7743f09f-f465-4fba-892d-457fc58d9abf, session_id: 3175f1fa-0abb-4faa-9a83-0df20993aaa9, gen: 1 } is now active (read-write)
14378 ,"name":"crucible","level":30The guest has requested activation
14379 ,"time":"2023-09-22T23:21:37.7482556Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
143802023-09-22T23:21:37.748ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4
14381 {"msg":"Sep 22 23:21:37.748 INFO listening on 127.0.0.1:0, task: main
14382 Crucible stats registered with UUID: d1885708-091b-4eda-a722-18751f0474d0","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.748332484Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14383 {"msg":"Crucible d1885708-091b-4eda-a722-18751f0474d0 has session id: 159bcbd6-d767-46d9-9279-e00f10ab7b2e","v":0Sep 22 23:21:37.748 INFO UpstairsConnection { upstairs_id: 7743f09f-f465-4fba-892d-457fc58d9abf, session_id: 3175f1fa-0abb-4faa-9a83-0df20993aaa9, gen: 1 } is now active (read-write)
14384 Sep 22 23:21:37.748 INFO current number of open files limit 65536 is already the maximum
14385 ,Sep 22 23:21:37.748 INFO listening on 127.0.0.1:0, task: main
14386 "name":"crucible","level":30,"time":"2023-09-22T23:21:37.748480875Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14387 Sep 22 23:21:37.748 INFO listening on 127.0.0.1:0, task: main
14388 Sep 22 23:21:37.748 INFO UUID: 17366ba3-9d52-4dce-a4fd-d4657621aec9
14389 Sep 22 23:21:37.748 INFO Created new region file "/tmp/downstairs-Ztyww1CP/region.json"
14390 Sep 22 23:21:37.748 INFO Blocks per extent:5 Total Extents: 2
143912023-09-22T23:21:37.748ZINFOcrucible: [0] connecting to 127.0.0.1:63880 looper = 0
14392 {Sep 22 23:21:37.748 INFO Crucible Version: Crucible Version: 0.0.1
14393 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14394 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14395 rustc: 1.70.0 stable x86_64-unknown-illumos
14396 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14397 "msg":"Upstairs starts","v":0,"name":"crucible","level":30Sep 22 23:21:37.748 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14398 ,"time":"2023-09-22T23:21:37.748665328Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal"Sep 22 23:21:37.748 INFO Using address: 127.0.0.1:61806, task: main
14399 ,"pid":4769}
144002023-09-22T23:21:37.748ZINFOcrucible: Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
14401 {"msg":"Upstairs <-> Downstairs Message Version: 4","v":0,"name":"crucible","level":30The guest has requested activation
14402 {,"time":"2023-09-22T23:21:37.748777338Z"","msghostname""::""Upstairs starts"ip-10-150-1-55.us-west-2.compute.internal,"",v"":pid":04769,"name}":
14403 "crucible","level"{:30"msg":"Crucible stats registered with UUID: c8c6d6f5-1396-4842-801b-a86e7e53a2f6","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.748809806Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14404 {"msg":"Crucible Version: BuildInfo {\n version: \"0.0.1\",\n git_sha: \"ed48f294784d46ea7d4bb99336918b74358eca46\",\nSep 22 23:21:37.748 INFO listening on 127.0.0.1:0, task: main
14405 git_commit_timestamp: \"2023-09-22T22:51:18.000000000Z\",\n git_branch: \"main\",\n rustc_semver: \"1.70.0\",\n rustc_channel: \"stable\",\n rustc_host_triple: \"x86_64-unknown-illumos\",\n rustc_commit_sha: \"90c541806f23a127002de5b4038be731ba1458ca\",\n cargo_triple: \"x86_64-unknown-illumos\",\n debug: true,\n opt_level: 0,\n}","v":0,"name":"crucible","level":30The guest has requested activation
14406 Sep 22 23:21:37.748 INFO listening on 127.0.0.1:0, task: main
14407 ,"time":"2023-09-22T23:21:37.748818979Z","hostname":","time":"ip-10-150-1-55.us-west-2.compute.internal","pid"2023-09-22T23:21:37.748886624Z:"4769,"hostname"}:"
14408 ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14409 {{"msg":""msg":"Sep 22 23:21:37.748 INFO listening on 127.0.0.1:0, task: main
14410 Sep 22 23:21:37.748 INFO listening on 127.0.0.1:0, task: main
14411 Upstairs <-> Downstairs Message Version: 4Crucible c8c6d6f5-1396-4842-801b-a86e7e53a2f6 has session id: 12f55a26-57d6-442d-bf1e-32b5d4058f11"",,""vv""::00,,""namename""::""cruciblecrucible"",,""levellevel""::3030,,""timetime""::""2023-09-22T23:21:37.748993689Z2023-09-22T23:21:37.748993685Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"pid:":47694769Sep 22 23:21:37.748 INFO Repair listens on 127.0.0.1:0, task: repair
14412 Sep 22 23:21:37.749 INFO listening on 127.0.0.1:0, task: main
14413 {"msg":"Sep 22 23:21:37.749 INFO [0] downstairs client at 127.0.0.1:45992 has UUID 1108770c-6fd1-4459-a60b-614b367fa7d4
14414 [1] connecting to 127.0.0.1:57484","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.749057175Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"Sep 22 23:21:37.749 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 1108770c-6fd1-4459-a60b-614b367fa7d4, encrypted: true, database_read_version: 1, database_write_version: 1 }
14415 1"}Sep 22 23:21:37.749 INFO listening on 127.0.0.1:0, task: main
14416 }
14417 {}
14418 "Sep 22 23:21:37.749 INFO 7743f09f-f465-4fba-892d-457fc58d9abf WaitActive WaitActive WaitActive
14419 {msg"Sep 22 23:21:37.749 INFO accepted connection from 127.0.0.1:34175, task: main
14420 Sep 22 23:21:37.749 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57954, task: repair
14421 ":msg"":"[0] connecting to 127.0.0.1:44443","v":Crucible stats registered with UUID: c0eead19-4586-47fd-9018-7a0ce84810090",","name"v:"":0crucible",",name"":"level":crucible"30,"level":30,,""timetime""::""2023-09-22T23:21:37.749174306Z2023-09-22T23:21:37.749173045Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47694769,}"
14422 looper":"0"}
14423 {Sep 22 23:21:37.749 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57954, task: repair
14424 {"msg"":msg"":"[1] connecting to 127.0.0.1:60947","v":0,Crucible c0eead19-4586-47fd-9018-7a0ce8481009 has session id: cbe2288b-c8af-400a-a089-f27ee961a317""name",:""v":crucible0",","name"level:"":30crucible","level":30
14425 ,,""timetime""::""2023-09-22T23:21:37.749223501ZSep 22 23:21:37.749 INFO [1] downstairs client at 127.0.0.1:57990 has UUID 5b712e05-efd4-443b-8e3d-e394f3713c4b
14426 {"2023-09-22T23:21:37.749221765Z",","hostname"hostname:"":""msgip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"""pid,"":Sep 22 23:21:37.749 INFO listening, local_addr: 127.0.0.1:57954, task: repair
14427 :"pid4769":4769}
14428 ,[2] connecting to 127.0.0.1:54777""{looper":"1","}msg"
14429 :Sep 22 23:21:37.749 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 5b712e05-efd4-443b-8e3d-e394f3713c4b, encrypted: true, database_read_version: 1, database_write_version: 1 }
14430 "v"":0[0] connecting to 127.0.0.1:35164",","namev""::0","cruciblename"":",crucible"","level"level"::3030,"time":"2023-09-22T23:21:37.749291015Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"0",}"
14431 time":"2023-09-22T23:21:37.74929182Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal",[1] connecting to 127.0.0.1:62068""pid",:Sep 22 23:21:37.749 INFO 7743f09f-f465-4fba-892d-457fc58d9abf WaitActive WaitActive WaitActive
14432 4769,"looper":"2"}
14433 {"msg":"up_listen starts","v":0,"name":"crucible","level":Sep 22 23:21:37.749 INFO accepted connection from 127.0.0.1:42478, task: main
14434 "v":0,"name":"crucible","level":3030Sep 22 23:21:37.749 INFO [2] downstairs client at 127.0.0.1:35001 has UUID 890345ba-e71f-4b71-b001-a87d38d96b67
14435 ,"time":"2023-09-22T23:21:37.749407381Z","hostname":"Sep 22 23:21:37.749 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57954, task: repair
14436 ip-10-150-1-55.us-west-2.compute.internal",,""pid":time"4769:","2023-09-22T23:21:37.749415041Zlooper":Sep 22 23:21:37.749 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 890345ba-e71f-4b71-b001-a87d38d96b67, encrypted: true, database_read_version: 1, database_write_version: 1 }
14437 {Sep 22 23:21:37.749 INFO Using repair address: 127.0.0.1:57954, task: main
14438 "msg":"[2] connecting to 127.0.0.1:51595","v":0,"name":"crucible","level":30","hostname":"Sep 22 23:21:37.749 INFO accepted connection from 127.0.0.1:52416, task: main
14439 Sep 22 23:21:37.749 INFO No SSL acceptor configured, task: main
14440 ,"Sep 22 23:21:37.749 INFO 7743f09f-f465-4fba-892d-457fc58d9abf WaitActive WaitActive WaitActive
14441 time":"2023-09-22T23:21:37.74961871Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"ip-10-150-1-55.us-west-2.compute.internallooper":""2","}pid"
14442 :4769,"task":"{up_listen"}
14443 "msg":"up_listen starts","v":0{,"name":"crucible","level"":msg":30"Wait for all three downstairs to come online","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.749714386Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"time":,""task":"2023-09-22T23:21:37.749723584Z"up_listen"},"
14444 hostname":"Sep 22 23:21:37.749 INFO accepted connection from 127.0.0.1:40428, task: main
14445 ip-10-150-1-55.us-west-2.compute.internal",{"pid":4769}"
14446 msg":"Wait for all three downstairs to come online"{,"v":0,""name"msg"::""crucible"Flush timeout: 0.5",","level"v"::300,"name":"crucible","level":30,"time":","time":2023-09-22T23:21:37.749779533Z"","2023-09-22T23:21:37.749783441Z"hostname":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal4769","pid"}:
14447 4769}
14448 {{"msg":""Flush timeout: 0.5"msg":,""v":Sep 22 23:21:37.749 INFO accepted connection from 127.0.0.1:64171, task: main
14449 d1885708-091b-4eda-a722-18751f0474d0 active request setSep 22 23:21:37.749 INFO Connection request from d1885708-091b-4eda-a722-18751f0474d0 with version 4, task: proc
14450 0,Sep 22 23:21:37.749 INFO accepted connection from 127.0.0.1:48626, task: main
14451 "","name"v"::"0crucible",","name"level"::"30crucible","level":30Sep 22 23:21:37.749 INFO upstairs UpstairsConnection { upstairs_id: d1885708-091b-4eda-a722-18751f0474d0, session_id: 39e89e28-13fd-421b-bfea-f7360e89a8c3, gen: 1 } connected, version 4, task: proc
14452 ,","time":time":""2023-09-22T23:21:37.749858407Z"2023-09-22T23:21:37.749860807Z",","hostname"hostname"::""ip-10-150-1-55.us-west-2.compute.internal"Sep 22 23:21:37.749 INFO Current flush_numbers [0..12]: [0, 0]
14453 ,"ip-10-150-1-55.us-west-2.compute.internalpid"":,"4769pid":4769}
14454 }
14455 {"{msg":""c8c6d6f5-1396-4842-801b-a86e7e53a2f6 active request set"msg":","v":0,"[1] d1885708-091b-4eda-a722-18751f0474d0 looper connected"name"Sep 22 23:21:37.749 INFO accepted connection from 127.0.0.1:49225, task: main
14456 ,:""crucible"v",:"0level",:"30name":"crucible","level":30,"time":"2023-09-22T23:21:37.749930615Z",","hostname"time"::""2023-09-22T23:21:37.749934051Z"ip-10-150-1-55.us-west-2.compute.internal",",pid"":hostname":4769"}
14457 ip-10-150-1-55.us-west-2.compute.internal"Sep 22 23:21:37.749 INFO Connection request from d1885708-091b-4eda-a722-18751f0474d0 with version 4, task: proc
14458 ,{""pid"msg"::4769","looper":"1"[0] c8c6d6f5-1396-4842-801b-a86e7e53a2f6 looper connected"}
14459 ,{Sep 22 23:21:37.749 INFO current number of open files limit 65536 is already the maximum
14460 ""msg"Sep 22 23:21:37.749 INFO upstairs UpstairsConnection { upstairs_id: d1885708-091b-4eda-a722-18751f0474d0, session_id: 39e89e28-13fd-421b-bfea-f7360e89a8c3, gen: 1 } connected, version 4, task: proc
14461 v:"":[1] Proc runs for 127.0.0.1:57484 in state New"0,,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.75002506Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14462 "name":"crucible"{,"level":"30msg":"[0] d1885708-091b-4eda-a722-18751f0474d0 looper connected","v":0,"name":"crucible","level":30","time":1"","2023-09-22T23:21:37.750059687Z"time"},"
14463 hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"0"}
14464 {:{""msg":"2023-09-22T23:21:37.75005447Z""[0] Proc runs for 127.0.0.1:63880 in state New"msg,"":,""v"hostname":[2] connecting to 127.0.0.1:42987"0:,",""namev""ip-10-150-1-55.us-west-2.compute.internal":,:0",""pid"crucible"name:"4769:,"",level":"30looper":"0"Sep 22 23:21:37.750 INFO Connection request from d1885708-091b-4eda-a722-18751f0474d0 with version 4, task: proc
14465 Sep 22 23:21:37.750 INFO Created new region file "/tmp/downstairs-Ytj1TnsI/region.json"
14466 crucible,",""level":30Sep 22 23:21:37.750 INFO upstairs UpstairsConnection { upstairs_id: d1885708-091b-4eda-a722-18751f0474d0, session_id: 39e89e28-13fd-421b-bfea-f7360e89a8c3, gen: 1 } connected, version 4, task: proc
14467 ,"time":"2023-09-22T23:21:37.750137091Z","hostname":"time":"2023-09-22T23:21:37.750113511Z"ip-10-150-1-55.us-west-2.compute.internal",",hostname"":"pid":4769ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"}looper":
14468 "2"}
14469 {{"msg":"up_listen starts","v":0,"name":"crucible","level":30,"time":""msg":"2023-09-22T23:21:37.750197804Z","hostname":"[2] d1885708-091b-4eda-a722-18751f0474d0 looper connected","ip-10-150-1-55.us-west-2.compute.internalv"":,"0pid",:"4769name":"crucible",","task"level"::"30up_listen"}
14470 Sep 22 23:21:37.750 INFO accepted connection from 127.0.0.1:59580, task: main
14471 {"msg":","Wait for all three downstairs to come online"time":,""v":02023-09-22T23:21:37.750232226Z","name",:""crucible"hostname",}:""
14472 ip-10-150-1-55.us-west-2.compute.internal"level",:"30pid":4769{,"looper":"2"}
14473 "msg":"{[0] Proc runs for 127.0.0.1:44443 in state New",,"""timev"":msg"0:,"":"name"2023-09-22T23:21:37.750264958Z":"[2] Proc runs for 127.0.0.1:54777 in state New"crucible",,,"""hostname"level":v"::30"0,"name":"ip-10-150-1-55.us-west-2.compute.internal"crucible",","pid"level"::476930}
14474 {"msg":"Flush timeout: 0.5","v":0,,""name":"time"crucible":,,"""level"time2023-09-22T23:21:37.750299652Z:":","hostname":Sep 22 23:21:37.750 INFO Downstairs has completed Negotiation, task: proc
14475 ""ip-10-150-1-55.us-west-2.compute.internal","pid"2023-09-22T23:21:37.750306628Z":4769,"}hostname"
14476 :{"msg":"[1] c8c6d6f5-1396-4842-801b-a86e7e53a2f6 looper connected","v":0,"name":"crucible","level":30"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14477 ,"time":"2023-09-22T23:21:37.75045883Z30"{,"hostname":"ip-10-150-1-55.us-west-2.compute.internal"","msg":"pid":4769,"looper":"1"}
14478 [1] d1885708-091b-4eda-a722-18751f0474d0 (39e89e28-13fd-421b-bfea-f7360e89a8c3) New New New ds_transition to WaitActive","v":0{,"name":",crucible"",""leveltime"msg"::""":302023-09-22T23:21:37.750483293Z"[1] Proc runs for 127.0.0.1:60947 in state New",","hostname"v"::"0,"name":"ip-10-150-1-55.us-west-2.compute.internal"crucible",","pid"level"::476930}
14479 ,"time":"2023-09-22T23:21:37.750511179Z"{,"hostname":""ip-10-150-1-55.us-west-2.compute.internal"msg,"":pid"":,4769"time"}:c0eead19-4586-47fd-9018-7a0ce8481009 active request set""
14480 2023-09-22T23:21:37.750523708Z","v",:"{0hostname",:"""name":msg"ip-10-150-1-55.us-west-2.compute.internal"":crucible","",pid"[1] Transition from New to WaitActive":"4769,level"}v"
14481 "::030,"name":"{crucible","level":30"msg":"[2] c8c6d6f5-1396-4842-801b-a86e7e53a2f6 looper connected","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:21:37.750575835Z"time":","hostname":2023-09-22T23:21:37.750579635Z"","hostname":ip-10-150-1-55.us-west-2.compute.internal"","pid":4769ip-10-150-1-55.us-west-2.compute.internal","}pid"
14482 Sep 22 23:21:37.750 INFO Connection request from c8c6d6f5-1396-4842-801b-a86e7e53a2f6 with version 4, task: proc
14483 Sep 22 23:21:37.750 INFO UpstairsConnection { upstairs_id: d1885708-091b-4eda-a722-18751f0474d0, session_id: 39e89e28-13fd-421b-bfea-f7360e89a8c3, gen: 1 } is now active (read-write)
14484 Sep 22 23:21:37.750 INFO Current flush_numbers [0..12]: [0, 0]
14485 Sep 22 23:21:37.750 INFO upstairs UpstairsConnection { upstairs_id: c8c6d6f5-1396-4842-801b-a86e7e53a2f6, session_id: ddd3f126-3dca-46a7-9147-f6bc461d9852, gen: 1 } connected, version 4, task: proc
14486 :4769}
14487 ,"time":{"2023-09-22T23:21:37.750590231Z",""hostname":"msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769[1] client is_active_req TRUE, promote! session 39e89e28-13fd-421b-bfea-f7360e89a8c3",",looper"":"v"2":}0
14488 ,"name":"crucible","level":{30"msg":"[2] Proc runs for 127.0.0.1:51595 in state New","v":0,"name":"crucible","level":30Sep 22 23:21:37.750 INFO Connection request from c8c6d6f5-1396-4842-801b-a86e7e53a2f6 with version 4, task: proc
14489 ,"time":"2023-09-22T23:21:37.750738592Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14490 ,"time"Sep 22 23:21:37.750 INFO upstairs UpstairsConnection { upstairs_id: c8c6d6f5-1396-4842-801b-a86e7e53a2f6, session_id: ddd3f126-3dca-46a7-9147-f6bc461d9852, gen: 1 } connected, version 4, task: proc
14491 {:""msg":"2023-09-22T23:21:37.75075075Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal"[0] d1885708-091b-4eda-a722-18751f0474d0 (39e89e28-13fd-421b-bfea-f7360e89a8c3) New WaitActive New ds_transition to WaitActive",",pid"":v4769":0},
14492 Sep 22 23:21:37.750 INFO UpstairsConnection { upstairs_id: d1885708-091b-4eda-a722-18751f0474d0, session_id: 39e89e28-13fd-421b-bfea-f7360e89a8c3, gen: 1 } is now active (read-write)
14493 "name":"crucible","level":30,"time":"2023-09-22T23:21:37.75081321Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14494 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.750855452Z","hostname":"Sep 22 23:21:37.750 INFO accepted connection from 127.0.0.1:56561, task: main
14495 Sep 22 23:21:37.750 INFO Downstairs has completed Negotiation, task: proc
14496 ip-10-150-1-55.us-west-2.compute.internal"Sep 22 23:21:37.750 INFO Connection request from c8c6d6f5-1396-4842-801b-a86e7e53a2f6 with version 4, task: proc
14497 ,"pid":4769}
14498 {"msg":"[0] client is_active_req TRUE, promote! session 39e89e28-13fd-421b-bfea-f7360e89a8c3","v":0,"name":"crucible","level":30Sep 22 23:21:37.750 INFO upstairs UpstairsConnection { upstairs_id: c8c6d6f5-1396-4842-801b-a86e7e53a2f6, session_id: ddd3f126-3dca-46a7-9147-f6bc461d9852, gen: 1 } connected, version 4, task: proc
14499 Sep 22 23:21:37.750 INFO UpstairsConnection { upstairs_id: d1885708-091b-4eda-a722-18751f0474d0, session_id: 39e89e28-13fd-421b-bfea-f7360e89a8c3, gen: 1 } is now active (read-write)
14500 ,"time":"2023-09-22T23:21:37.750909784Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
145012023-09-22T23:21:37.750ZINFOcrucible: [2] d1885708-091b-4eda-a722-18751f0474d0 (39e89e28-13fd-421b-bfea-f7360e89a8c3) WaitActive WaitActive New ds_transition to WaitActive
145022023-09-22T23:21:37.750ZINFOcrucible: [2] Transition from New to WaitActive
145032023-09-22T23:21:37.751ZINFOcrucible: [2] client is_active_req TRUE, promote! session 39e89e28-13fd-421b-bfea-f7360e89a8c3
14504 Sep 22 23:21:37.751 INFO Current flush_numbers [0..12]: [0, 0]
14505 {{"msg":""msg":"[0] c0eead19-4586-47fd-9018-7a0ce8481009 looper connected"[0] c8c6d6f5-1396-4842-801b-a86e7e53a2f6 (ddd3f126-3dca-46a7-9147-f6bc461d9852) New New New ds_transition to WaitActive,""v":0,,""name"v"::"0crucible",",name"":level"":crucible"30,"level":30,"time":"2023-09-22T23:21:37.751118431Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",","timepid""::4769","2023-09-22T23:21:37.751120827Z"looper":",0""}hostname":
14506 "ip-10-150-1-55.us-west-2.compute.internal","{pid":4769"msg"}:"
14507 [0] Proc runs for 127.0.0.1:35164 in state New","v":0,"name":"{crucible","level":30"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.751175415Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14508 ,"time":"{2023-09-22T23:21:37.751184991Z",""hostname":"msg":"ip-10-150-1-55.us-west-2.compute.internal","pid"[1] c0eead19-4586-47fd-9018-7a0ce8481009 looper connected":,4769"v":0},
14509 "name":"crucible","level":{30"msg":"[0] client is_active_req TRUE, promote! session ddd3f126-3dca-46a7-9147-f6bc461d9852","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.751224729Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"1"}
14510 ,"time":"{2023-09-22T23:21:37.751238668Z","hostname"":"msg":"ip-10-150-1-55.us-west-2.compute.internal","[1] Proc runs for 127.0.0.1:62068 in state Newpid"":4769,"v":0},
14511 "name":"crucible","level{":30"msg":"Sep 22 23:21:37.751 INFO Downstairs has completed Negotiation, task: proc
14512 [1] c8c6d6f5-1396-4842-801b-a86e7e53a2f6 (ddd3f126-3dca-46a7-9147-f6bc461d9852) WaitActive New New ds_transition to WaitActive",",v"":time"0:,""name":"crucible"2023-09-22T23:21:37.751277694Z",",level"":hostname30":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14513 ,"time":"2023-09-22T23:21:37.751303972Z","Sep 22 23:21:37.751 INFO current number of open files limit 65536 is already the maximum
14514 hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14515 {"msg":"[1] Transition from New to WaitActive","v":0,"Sep 22 23:21:37.751 INFO Opened existing region file "/tmp/downstairs-Ztyww1CP/region.json"
14516 name":"crucible","level":30Sep 22 23:21:37.751 INFO Database read version 1
14517 ,"time":"2023-09-22T23:21:37.751361573Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14518 Sep 22 23:21:37.751 INFO Database write version 1
14519 {"msg":"[1] client is_active_req TRUE, promote! session ddd3f126-3dca-46a7-9147-f6bc461d9852","v"Sep 22 23:21:37.751 INFO UpstairsConnection { upstairs_id: c8c6d6f5-1396-4842-801b-a86e7e53a2f6, session_id: ddd3f126-3dca-46a7-9147-f6bc461d9852, gen: 1 } is now active (read-write)
14520 :0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.751414862Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14521 {"msg":"[2] c8c6d6f5-1396-4842-801b-a86e7e53a2f6 (ddd3f126-3dca-46a7-9147-f6bc461d9852) WaitActive WaitActive New ds_transition to WaitActive","v":0,"name":"crucible","level":30Sep 22 23:21:37.751 INFO [0] 7743f09f-f465-4fba-892d-457fc58d9abf (3175f1fa-0abb-4faa-9a83-0df20993aaa9) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
14522 ,"time":"2023-09-22T23:21:37.751462361Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769Sep 22 23:21:37.751 INFO [0] Transition from WaitActive to WaitQuorum
14523 {}
14524 "msg":"{[1] downstairs client at 127.0.0.1:57484 has UUID 854ff323-c929-4b97-8826-1ca400654147""Sep 22 23:21:37.751 WARN [0] new RM replaced this: None
14525 msg",:""v":0[2] Transition from New to WaitActive,""name,":""crucible"v",:"0level",":name":30"Sep 22 23:21:37.751 INFO UpstairsConnection { upstairs_id: c8c6d6f5-1396-4842-801b-a86e7e53a2f6, session_id: ddd3f126-3dca-46a7-9147-f6bc461d9852, gen: 1 } is now active (read-write)
14526 crucible","level":30{"msg":"[2] c0eead19-4586-47fd-9018-7a0ce8481009 looper connected","v,"":time"0:","name":"2023-09-22T23:21:37.751537713Z,crucible""",,""time"level"hostname":::30""2023-09-22T23:21:37.751527448Zip-10-150-1-55.us-west-2.compute.internal"","pid":,4769"hostname"}:
14527 "ip-10-150-1-55.us-west-2.compute.internal"{,"pid":4769"msg":"}
14528 [2] client is_active_req TRUE, promote! session ddd3f126-3dca-46a7-9147-f6bc461d9852","v":0,"name":"{crucible",","time"level"::"30"msg"2023-09-22T23:21:37.751573313Z:"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,","looper"time:"":[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 854ff323-c929-4b97-8826-1ca400654147, encrypted: true, database_read_version: 1, database_write_version: 1 }"2"","}v"
14529 2023-09-22T23:21:37.751591339Z":,0","hostname{"name"::"""cruciblemsg":ip-10-150-1-55.us-west-2.compute.internal""",",[2] Proc runs for 127.0.0.1:42987 in state Newpid"":"4769,level"}"
14530 v"::300,"name":"crucible","level":30,"time":","time":2023-09-22T23:21:37.751631938Z"","hostname"2023-09-22T23:21:37.751628125Z:""Sep 22 23:21:37.751 INFO UpstairsConnection { upstairs_id: c8c6d6f5-1396-4842-801b-a86e7e53a2f6, session_id: ddd3f126-3dca-46a7-9147-f6bc461d9852, gen: 1 } is now active (read-write)
14531 ip-10-150-1-55.us-west-2.compute.internal",,""hostname"pid"::"4769}
14532 ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
145332023-09-22T23:21:37.751ZINFOcrucible: d1885708-091b-4eda-a722-18751f0474d0 WaitActive WaitActive WaitActive
145342023-09-22T23:21:37.751ZINFOcrucible: [0] downstairs client at 127.0.0.1:63880 has UUID 96640be5-25af-4671-856f-955e28c0fd1e
14535 {"msg"Sep 22 23:21:37.751 INFO [0] Starts reconcile loop
14536 :"Sep 22 23:21:37.751 INFO Connection request from c0eead19-4586-47fd-9018-7a0ce8481009 with version 4, task: proc
14537 [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 96640be5-25af-4671-856f-955e28c0fd1e, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.751782501Z","hostname"Sep 22 23:21:37.751 INFO upstairs UpstairsConnection { upstairs_id: c0eead19-4586-47fd-9018-7a0ce8481009, session_id: 64e4856d-2635-4307-a4f9-48bc00b7c13c, gen: 1 } connected, version 4, task: proc
14538 :"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
145392023-09-22T23:21:37.751ZINFOcrucible: d1885708-091b-4eda-a722-18751f0474d0 WaitActive WaitActive WaitActive
14540 {"msg":"[2] downstairs client at 127.0.0.1:54777 has UUID 9ea11016-ca01-4f8d-8100-108f556909ca","v":0,"name":"crucible","level":30Sep 22 23:21:37.751 INFO [1] 7743f09f-f465-4fba-892d-457fc58d9abf (3175f1fa-0abb-4faa-9a83-0df20993aaa9) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
14541 ,"time":"2023-09-22T23:21:37.751865165Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14542 Sep 22 23:21:37.751 INFO [1] Transition from WaitActive to WaitQuorum
14543 {"msg":"Sep 22 23:21:37.751 INFO Connection request from c0eead19-4586-47fd-9018-7a0ce8481009 with version 4, task: proc
14544 [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 9ea11016-ca01-4f8d-8100-108f556909ca, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible"Sep 22 23:21:37.751 WARN [1] new RM replaced this: None
14545 ,"level":30Sep 22 23:21:37.751 INFO upstairs UpstairsConnection { upstairs_id: c0eead19-4586-47fd-9018-7a0ce8481009, session_id: 64e4856d-2635-4307-a4f9-48bc00b7c13c, gen: 1 } connected, version 4, task: proc
14546 ,"time":"Sep 22 23:21:37.751 INFO Current flush_numbers [0..12]: [0, 0]
14547 2023-09-22T23:21:37.751923663Z","hostname":"ip-10-150-1-55.us-west-2.compute.internalSep 22 23:21:37.751 INFO [1] Starts reconcile loop
14548 ","pid":4769}
145492023-09-22T23:21:37.751ZINFOcrucible: d1885708-091b-4eda-a722-18751f0474d0 WaitActive WaitActive WaitActive
14550 Sep 22 23:21:37.752 INFO Connection request from c0eead19-4586-47fd-9018-7a0ce8481009 with version 4, task: proc
14551 Sep 22 23:21:37.752 INFO [2] 7743f09f-f465-4fba-892d-457fc58d9abf (3175f1fa-0abb-4faa-9a83-0df20993aaa9) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
14552 Sep 22 23:21:37.752 INFO [2] Transition from WaitActive to WaitQuorum
14553 Sep 22 23:21:37.752 INFO upstairs UpstairsConnection { upstairs_id: c0eead19-4586-47fd-9018-7a0ce8481009, session_id: 64e4856d-2635-4307-a4f9-48bc00b7c13c, gen: 1 } connected, version 4, task: proc
14554 Sep 22 23:21:37.752 WARN [2] new RM replaced this: None
14555 Sep 22 23:21:37.752 INFO [2] Starts reconcile loop
14556 Sep 22 23:21:37.752 INFO Downstairs has completed Negotiation, task: proc
14557 Sep 22 23:21:37.752 INFO [0] 127.0.0.1:45992 task reports connection:true
14558 {"msg":"[0] downstairs client at 127.0.0.1:44443 has UUID 4a9c9bb9-ba03-459d-9045-08a986b4daa9","v":0,"name":"crucible","level":30Sep 22 23:21:37.752 INFO 7743f09f-f465-4fba-892d-457fc58d9abf WaitQuorum WaitQuorum WaitQuorum
14559 ,"time":"2023-09-22T23:21:37.752177989Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14560 {"msg":"Sep 22 23:21:37.752 INFO [0]R flush_numbers: [0, 0]
14561 [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 4a9c9bb9-ba03-459d-9045-08a986b4daa9, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible","level":30Sep 22 23:21:37.752 INFO [0]R generation: [0, 0]
14562 ,"time":"2023-09-22T23:21:37.752232846Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14563 {"msg":"Sep 22 23:21:37.752 INFO [0]R dirty: [false, false]
14564 c8c6d6f5-1396-4842-801b-a86e7e53a2f6 WaitActive WaitActive WaitActive","v":0,"name":"crucible","level":30,Sep 22 23:21:37.752 INFO [1]R flush_numbers: [0, 0]
14565 "time":"2023-09-22T23:21:37.752281139Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14566 Sep 22 23:21:37.752 INFO [1]R generation: [0, 0]
14567 {"msg":"[1] downstairs client at 127.0.0.1:60947 has UUID 2d0b2cb1-76dc-4a6c-9791-1330cfcd6cae","v":0,"name":"crucible","level":Sep 22 23:21:37.752 INFO Current flush_numbers [0..12]: [0, 0]
14568 Sep 22 23:21:37.752 INFO [1]R dirty: [false, false]
14569 30,"time":"Sep 22 23:21:37.752 INFO [2]R flush_numbers: [0, 0]
14570 2023-09-22T23:21:37.752336975Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14571 {"msg"Sep 22 23:21:37.752 INFO [2]R generation: [0, 0]
14572 :"[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 2d0b2cb1-76dc-4a6c-9791-1330cfcd6cae, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible","Sep 22 23:21:37.752 INFO [2]R dirty: [false, false]
14573 level":30,"Sep 22 23:21:37.752 INFO Max found gen is 1
14574 time":"2023-09-22T23:21:37.75239918Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14575 {"msg":"c8c6d6f5-1396-4842-801b-a86e7e53a2f6 WaitActive WaitActive WaitActive"Sep 22 23:21:37.752 INFO Generation requested: 1 >= found:1
14576 ,"v":0,"name":"crucible","level":30Sep 22 23:21:37.752 INFO UUID: 6c77c5f6-1f7d-446d-9f57-ac7866e05802
14577 Sep 22 23:21:37.752 INFO Next flush: 1
14578 ,"time":"2023-09-22T23:21:37.752448709Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769Sep 22 23:21:37.752 INFO Blocks per extent:5 Total Extents: 2
14579 }
14580 {"msg":"[2] downstairs client at 127.0.0.1:51595 has UUID 62fc2f22-4d19-4964-a68a-fe053dd165a7"Sep 22 23:21:37.752 INFO All extents match
14581 ,"v":0,"name":"crucible","level":30Sep 22 23:21:37.752 INFO Downstairs has completed Negotiation, task: proc
14582 Sep 22 23:21:37.752 INFO No downstairs repair required
14583 ,"time":"2023-09-22T23:21:37.752506526Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14584 {"msg":Sep 22 23:21:37.752 INFO No initial repair work was required
14585 "[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 62fc2f22-4d19-4964-a68a-fe053dd165a7, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible","Sep 22 23:21:37.752 INFO Crucible Version: Crucible Version: 0.0.1
14586 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14587 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14588 rustc: 1.70.0 stable x86_64-unknown-illumos
14589 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14590 levelSep 22 23:21:37.752 INFO Set Downstairs and Upstairs active
14591 "Sep 22 23:21:37.752 INFO Current flush_numbers [0..12]: [0, 0]
14592 :30Sep 22 23:21:37.752 INFO UpstairsConnection { upstairs_id: c0eead19-4586-47fd-9018-7a0ce8481009, session_id: 64e4856d-2635-4307-a4f9-48bc00b7c13c, gen: 1 } is now active (read-write)
14593 Sep 22 23:21:37.752 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14594 ,"time":"2023-09-22T23:21:37.752578724Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14595 Sep 22 23:21:37.752 INFO current number of open files limit 65536 is already the maximum
14596 {Sep 22 23:21:37.752 INFO Using address: 127.0.0.1:54378, task: main
14597 "Sep 22 23:21:37.752 INFO 7743f09f-f465-4fba-892d-457fc58d9abf is now active with session: 3175f1fa-0abb-4faa-9a83-0df20993aaa9
14598 msg":"c8c6d6f5-1396-4842-801b-a86e7e53a2f6 WaitActive WaitActive WaitActive"Sep 22 23:21:37.752 INFO Opened existing region file "/tmp/downstairs-Ytj1TnsI/region.json"
14599 ,"v":0,"name":"crucible","level":30Sep 22 23:21:37.752 INFO Database read version 1
14600 Sep 22 23:21:37.752 INFO 7743f09f-f465-4fba-892d-457fc58d9abf Set Active after no repair
14601 ,"time":"2023-09-22T23:21:37.752636451Z"Sep 22 23:21:37.752 INFO Database write version 1
14602 ,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14603 Sep 22 23:21:37.752 INFO Notify all downstairs, region set compare is done.
14604 Sep 22 23:21:37.752 INFO UpstairsConnection { upstairs_id: c0eead19-4586-47fd-9018-7a0ce8481009, session_id: 64e4856d-2635-4307-a4f9-48bc00b7c13c, gen: 1 } is now active (read-write)
14605 Sep 22 23:21:37.752 INFO Set check for repair
14606 Sep 22 23:21:37.752 INFO Current flush_numbers [0..12]: [0, 0]
14607 {"msg":"[0] c0eead19-4586-47fd-9018-7a0ce8481009 (64e4856d-2635-4307-a4f9-48bc00b7c13c) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.752737967Z"Sep 22 23:21:37.752 INFO Downstairs has completed Negotiation, task: proc
14608 ,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769Sep 22 23:21:37.752 INFO [1] 127.0.0.1:57990 task reports connection:true
14609 }
14610 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30Sep 22 23:21:37.752 INFO 7743f09f-f465-4fba-892d-457fc58d9abf Active Active Active
14611 Sep 22 23:21:37.752 INFO UpstairsConnection { upstairs_id: c0eead19-4586-47fd-9018-7a0ce8481009, session_id: 64e4856d-2635-4307-a4f9-48bc00b7c13c, gen: 1 } is now active (read-write)
14612 ,"time":"2023-09-22T23:21:37.752794371Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14613 Sep 22 23:21:37.752 INFO Set check for repair
146142023-09-22T23:21:37.752ZINFOcrucible: [0] client is_active_req TRUE, promote! session 64e4856d-2635-4307-a4f9-48bc00b7c13c
14615 {"msg":"Sep 22 23:21:37.752 INFO [2] 127.0.0.1:35001 task reports connection:true
14616 [1] c0eead19-4586-47fd-9018-7a0ce8481009 (64e4856d-2635-4307-a4f9-48bc00b7c13c) WaitActive New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.752887563Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":Sep 22 23:21:37.752 INFO 7743f09f-f465-4fba-892d-457fc58d9abf Active Active Active
14617 4769}
14618 {"msg":"Sep 22 23:21:37.752 INFO Downstairs has completed Negotiation, task: proc
14619 [1] Transition from New to WaitActive","v":0,"name":Sep 22 23:21:37.752 INFO Set check for repair
14620 "crucible","level":30Sep 22 23:21:37.752 INFO Repair listens on 127.0.0.1:0, task: repair
14621 Sep 22 23:21:37.752 INFO Current flush_numbers [0..12]: [0, 0]
14622 ,"time":"2023-09-22T23:21:37.752945609Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
146232023-09-22T23:21:37.752ZINFOcrucible: [1] client is_active_req TRUE, promote! session 64e4856d-2635-4307-a4f9-48bc00b7c13c
14624 Sep 22 23:21:37.753 INFO [0] received reconcile message
14625 {"msg":"[2] c0eead19-4586-47fd-9018-7a0ce8481009 (64e4856d-2635-4307-a4f9-48bc00b7c13c) WaitActive WaitActive New ds_transition to WaitActive","v":0,"name":"crucible","level":30Sep 22 23:21:37.753 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:55763, task: repair
14626 Sep 22 23:21:37.753 INFO [0] All repairs completed, exit
14627 ,"time":"2023-09-22T23:21:37.753042836Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14628 {"msg":"[2] Transition from New to WaitActive","v":0,"Sep 22 23:21:37.753 INFO [0] Starts cmd_loop
14629 name":"crucible","level":30Sep 22 23:21:37.753 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:55763, task: repair
14630 ,"time":"2023-09-22T23:21:37.753093894Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14631 Sep 22 23:21:37.753 INFO Downstairs has completed Negotiation, task: proc
14632 Sep 22 23:21:37.753 INFO [1] received reconcile message
14633 {Sep 22 23:21:37.753 INFO [1] All repairs completed, exit
14634 "msg":"[2] client is_active_req TRUE, promote! session 64e4856d-2635-4307-a4f9-48bc00b7c13c","v":0,"name":"{crucible","level":30"msg":"[1] d1885708-091b-4eda-a722-18751f0474d0 (39e89e28-13fd-421b-bfea-f7360e89a8c3) WaitActive WaitActive WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30Sep 22 23:21:37.753 INFO listening, local_addr: 127.0.0.1:55763, task: repair
14635 Sep 22 23:21:37.753 INFO [1] Starts cmd_loop
14636 ,"time":"2023-09-22T23:21:37.753244912Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14637 {"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.753319305Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","Sep 22 23:21:37.753 INFO [2] received reconcile message
14638 pid":4769}
14639 {"msg":"[1] new RM replaced this: None","v":0Sep 22 23:21:37.753 INFO Current flush_numbers [0..12]: [0, 0]
14640 ,"name":"crucible","level":Sep 22 23:21:37.753 INFO [2] All repairs completed, exit
14641 40,"time":"2023-09-22T23:21:37.753376976Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid"Sep 22 23:21:37.753 INFO [2] Starts cmd_loop
14642 :4769}
14643 {"msg":"[1] Starts reconcile loop","v":0,"name":"crucible","level":30Sep 22 23:21:37.753 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:55763, task: repair
14644 ,"time":"2023-09-22T23:21:37.753425818Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14645 {"msg":"[0] d1885708-091b-4eda-a722-18751f0474d0 (39e89e28-13fd-421b-bfea-f7360e89a8c3) WaitActive WaitQuorum WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30Sep 22 23:21:37.753 INFO Using repair address: 127.0.0.1:55763, task: main
14646 Sep 22 23:21:37.753 INFO UUID: e173e972-e8cf-416e-bc11-1d8d35f2e9ba
14647 The guest has finished waiting for activation
14648 ,"time":"Sep 22 23:21:37.753 INFO No SSL acceptor configured, task: main
14649 2023-09-22T23:21:37.753472561Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14650 Sep 22 23:21:37.753 INFO Blocks per extent:5 Total Extents: 2
14651 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.753520521Z","hostname":"Sep 22 23:21:37.753 INFO Downstairs has completed Negotiation, task: proc
14652 Sep 22 23:21:37.753 DEBG Write :1004 deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
14653 ip-10-150-1-55.us-west-2.compute.internal","Sep 22 23:21:37.753 INFO current number of open files limit 65536 is already the maximum
14654 pid":4769}
14655 {"msg":"[0] new RM replaced this: None","v":0,"name":"crucible","level":40,"time":"Sep 22 23:21:37.753 INFO Crucible Version: Crucible Version: 0.0.1
14656 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14657 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14658 rustc: 1.70.0 stable x86_64-unknown-illumos
14659 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14660 2023-09-22T23:21:37.75357352Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14661 {"msg":"[0] Starts reconcile loop"Sep 22 23:21:37.753 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14662 ,"v":0,"name":"crucible","level":30Sep 22 23:21:37.753 INFO Using address: 127.0.0.1:36272, task: main
14663 ,"time":"2023-09-22T23:21:37.753615534Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14664 {"msg":"[2] d1885708-091b-4eda-a722-18751f0474d0 (39e89e28-13fd-421b-bfea-f7360e89a8c3) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30Sep 22 23:21:37.753 INFO current number of open files limit 65536 is already the maximum
14665 ,"time":"2023-09-22T23:21:37.753655857Z","Sep 22 23:21:37.753 INFO Created new region file "/tmp/downstairs-P4Ld1XOk/region.json"
14666 hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
146672023-09-22T23:21:37.753ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
14668 {"msg":"[2] new RM replaced this: None","v":0,"name":"crucible","level":40,"time":"2023-09-22T23:21:37.753742872Z","hostname":"Sep 22 23:21:37.753 INFO Current flush_numbers [0..12]: [0, 0]
14669 ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14670 {"msg":"[2] Starts reconcile loop","v":0,"name":"Sep 22 23:21:37.753 INFO Created new region file "/tmp/downstairs-LHGaEZWT/region.json"
14671 crucible","level":30,"time":"2023-09-22T23:21:37.753800141Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
146722023-09-22T23:21:37.753ZINFOcrucible: [1] 127.0.0.1:57484 task reports connection:true
146732023-09-22T23:21:37.753ZINFOcrucible: d1885708-091b-4eda-a722-18751f0474d0 WaitQuorum WaitQuorum WaitQuorum
14674 {"msg":"[0]R flush_numbers: [0, 0]","v":0,"name":"crucible","level":30The guest has finished waiting for activation
14675 ,"time":"2023-09-22T23:21:37.753923903Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769Sep 22 23:21:37.753 INFO Repair listens on 127.0.0.1:0, task: repair
14676 }
14677 {"msg":"[0]R generation: [0, 0]","v":0,"name":"crucible","level":30Sep 22 23:21:37.753 INFO Downstairs has completed Negotiation, task: proc
14678 ,"time":"2023-09-22T23:21:37.753968803Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
146792023-09-22T23:21:37.754ZINFOcrucible: [0]R dirty: [false, false]
14680 {"msg":"[1]R flush_numbers: [0, 0]"Sep 22 23:21:37.754 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:54512, task: repair
14681 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.754049684Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14682 {"msg":"Sep 22 23:21:37.754 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:54512, task: repair
14683 [1]R generation: [0, 0]","v":0,"name":"crucible","level":30Sep 22 23:21:37.754 DEBG Write :1004 deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
14684 ,"time":"2023-09-22T23:21:37.754090455Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14685 {"msg":"[1]R dirty: [false, false]"Sep 22 23:21:37.754 INFO listening, local_addr: 127.0.0.1:54512, task: repair
14686 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.75413138Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14687 {"msg":"[2]R flush_numbers: [0, 0]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.754166891Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal"Sep 22 23:21:37.754 INFO Current flush_numbers [0..12]: [0, 0]
14688 ,"pid":4769}
146892023-09-22T23:21:37.754ZINFOcrucible: [2]R generation: [0, 0]
146902023-09-22T23:21:37.754ZINFOcrucible: [2]R dirty: [false, false]
146912023-09-22T23:21:37.754ZINFOcrucible: Max found gen is 1
14692 {Sep 22 23:21:37.754 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:54512, task: repair
14693 "msg":"Generation requested: 1 >= found:1","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.754314126Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14694 Sep 22 23:21:37.754 INFO Using repair address: 127.0.0.1:54512, task: main
14695 {"msg":"Next flush: 1","v":0,"name":"crucible","level":30Sep 22 23:21:37.754 INFO No SSL acceptor configured, task: main
14696 ,"time":"2023-09-22T23:21:37.754351886Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14697 {"msg":"All extents match","v":0Sep 22 23:21:37.754 INFO Downstairs has completed Negotiation, task: proc
14698 ,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.75439481Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
146992023-09-22T23:21:37.754ZINFOcrucible: No downstairs repair required
147002023-09-22T23:21:37.754ZINFOcrucible: No initial repair work was required
14701 {"msg":"Set Downstairs and Upstairs active","v":0,"name":"crucible","level":30The guest has finished waiting for activation
14702 ,"time":"Sep 22 23:21:37.754 INFO current number of open files limit 65536 is already the maximum
14703 2023-09-22T23:21:37.754509977Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
147042023-09-22T23:21:37.754ZINFOcrucible: d1885708-091b-4eda-a722-18751f0474d0 is now active with session: 39e89e28-13fd-421b-bfea-f7360e89a8c3
14705 {"msg":"d1885708-091b-4eda-a722-18751f0474d0 Set Active after no repair","v":0,"name":"crucible","level":30Sep 22 23:21:37.754 INFO Current flush_numbers [0..12]: [0, 0]
14706 ,"time":"2023-09-22T23:21:37.754595431Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14707 {"msg":"Notify all downstairs, region set compare is done.","v":0,"name":"crucible","level":30Sep 22 23:21:37.754 INFO Created new region file "/tmp/downstairs-6LMZkwp5/region.json"
14708 ,"time":"2023-09-22T23:21:37.754634111Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14709 {"msg":"Set check for repair","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.754669035Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal"Sep 22 23:21:37.754 DEBG Write :1004 deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
14710 ,"pid":4769}
147112023-09-22T23:21:37.754ZINFOcrucible: [0] 127.0.0.1:63880 task reports connection:true
147122023-09-22T23:21:37.754ZINFOcrucible: d1885708-091b-4eda-a722-18751f0474d0 Active Active Active
14713 {"msg":"Set check for repair"Sep 22 23:21:37.754 INFO Downstairs has completed Negotiation, task: proc
14714 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.754798315Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
147152023-09-22T23:21:37.754ZINFOcrucible: [2] 127.0.0.1:54777 task reports connection:true
147162023-09-22T23:21:37.754ZINFOcrucible: d1885708-091b-4eda-a722-18751f0474d0 Active Active Active
147172023-09-22T23:21:37.754ZINFOcrucible: Set check for repair
147182023-09-22T23:21:37.754ZINFOcrucible: [0] received reconcile message
147192023-09-22T23:21:37.754ZINFOcrucible: [0] All repairs completed, exit
147202023-09-22T23:21:37.755ZINFOcrucible: [0] Starts cmd_loop
14721 {"msg":"[1] received reconcile message","v":0,"name":"crucible","level":30{"msg":","time":"2023-09-22T23:21:37.755041166Z"[0] c8c6d6f5-1396-4842-801b-a86e7e53a2f6 (ddd3f126-3dca-46a7-9147-f6bc461d9852) WaitActive WaitActive WaitActive ds_transition to WaitQuorum","hostname":","v":0ip-10-150-1-55.us-west-2.compute.internal",","name"pid":":crucible"4769,"level"}:
14722 30{"msg":"[1] All repairs completed, exit","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:21:37.755071581Ztime"":","hostname2023-09-22T23:21:37.755082981Z"":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid"ip-10-150-1-55.us-west-2.compute.internal":,"4769pid":4769}}
14723 
14724 {{"msg":""msg":[0] Transition from WaitActive to WaitQuorum"","v"[1] Starts cmd_loop","v":0,"name":"crucible","level":30:0,"name":"crucible","level":30,"time":","2023-09-22T23:21:37.755133007Z"time":","hostname"2023-09-22T23:21:37.755144543Z":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4769,"pid":}4769
14725 }
14726 {"msg"{:"[2] received reconcile message"","msg"v":":0,"[0] new RM replaced this: None"name":","cruciblev"":,"0level",":name":30"crucible","level":40,"time":"2023-09-22T23:21:37.755207189Z",,""time"hostname"::""2023-09-22T23:21:37.755210302Z"ip-10-150-1-55.us-west-2.compute.internal",,""pid":hostname4769":"}
14727 ip-10-150-1-55.us-west-2.compute.internal","pid":4769{}
14728 "msg":"[2] All repairs completed, exit"{,"v":0","msgname""::""crucible","[0] Starts reconcile loop"level":,30"v":0,"name":"crucible","level":30,"time,"":"time":"2023-09-22T23:21:37.753219137Z"2023-09-22T23:21:37.755261866Z",","hostname"hostname:"":"ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,,""pidpid"":"time:47694769":}"}
14729 
14730 {2023-09-22T23:21:37.755267184Z",""hostnamemsg{"":":""[2] Starts cmd_loop"msg":","ip-10-150-1-55.us-west-2.compute.internal"v",:"0pid,[0] downstairs client at 127.0.0.1:35164 has UUID c0634833-e1bc-4dc4-a207-c663e8474e16""name","":v"":crucible0",,""name"level:"":30crucible","level":30:4769}
14731 ,"time":,""time"{:2023-09-22T23:21:37.755308734Z"","2023-09-22T23:21:37.755310934Z"","msg"hostname":":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14732 hostname"[1] c8c6d6f5-1396-4842-801b-a86e7e53a2f6 (ddd3f126-3dca-46a7-9147-f6bc461d9852) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum:""{,"v":0ip-10-150-1-55.us-west-2.compute.internal"",msg,"":pid""":name"4769:"}crucible"
14733 ,"level":30[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c0634833-e1bc-4dc4-a207-c663e8474e16, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.755351007Z",","hostname"time"::""2023-09-22T23:21:37.755357317Z"ip-10-150-1-55.us-west-2.compute.internal",,""hostname"pid:"":4769}ip-10-150-1-55.us-west-2.compute.internal
14734 ","pid":4769{}
14735 "msg":"{[1] Transition from WaitActive to WaitQuorum",""msg":"v":0,"namec0eead19-4586-47fd-9018-7a0ce8481009 WaitActive WaitActive WaitActive"":,""v":0crucible,""name":","crucible"level",":level"30:30,"time":"2023-09-22T23:21:37.755408558Z","hostname":","time"ip-10-150-1-55.us-west-2.compute.internal":,""pid":4769}
14736 2023-09-22T23:21:37.755408251Z",{"hostname"":"msg":"ip-10-150-1-55.us-west-2.compute.internal","[1] downstairs client at 127.0.0.1:62068 has UUID 9aaa216b-96cd-4a39-8952-f14cf6369014pid"":,"4769v":0,"}name"
14737 :"crucible","level":30{"msg":"[1] new RM replaced this: None","v":0,"name":"crucible","level":40,"time":"2023-09-22T23:21:37.75544471Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,}"
14738 time":"2023-09-22T23:21:37.755456041Z"{,"hostname":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14739 {[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 9aaa216b-96cd-4a39-8952-f14cf6369014, encrypted: true, database_read_version: 1, database_write_version: 1 }"","msg"v:"":0,"[1] Starts reconcile loopname"":","vcrucible"":0,","level"name:":"30crucible","level":30,"time,"":"time":"2023-09-22T23:21:37.755544244Z"2023-09-22T23:21:37.755546376Z",","hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internal",ip-10-150-1-55.us-west-2.compute.internal""pid,"":pid4769":4769}
14740 }
14741 {{"msg":""msg":"c0eead19-4586-47fd-9018-7a0ce8481009 WaitActive WaitActive WaitActive","v"[2] c8c6d6f5-1396-4842-801b-a86e7e53a2f6 (ddd3f126-3dca-46a7-9147-f6bc461d9852) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible",":level"0:30,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.755584121Z",","hostname"time:"":"2023-09-22T23:21:37.755587871Zip-10-150-1-55.us-west-2.compute.internal"",","pid":hostname4769":"}
14742 ip-10-150-1-55.us-west-2.compute.internal","pid":4769{}
14743 "msg":"{[2] Transition from WaitActive to WaitQuorum"","msg"v:"":0,"name":"crucible","level"[2] downstairs client at 127.0.0.1:42987 has UUID 69d03179-d586-4376-83f6-52e7dee79d82:"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.755619243Z",",hostname"":"time":"ip-10-150-1-55.us-west-2.compute.internal2023-09-22T23:21:37.755623906Z"",","pid":hostname"4769:"}
14744 {"msg":"[2] new RM replaced this: None","v":0,"name":"crucible","level":40ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14745 {",msg"":time"":"2023-09-22T23:21:37.755718762Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14746 [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 69d03179-d586-4376-83f6-52e7dee79d82, encrypted: true, database_read_version: 1, database_write_version: 1 }","v{":0,""namemsg""::""crucible","[2] Starts reconcile loop"level",:"v30":0,"name":"crucible","level":30The guest has finished waiting for activation
14747 ,","time"time:"":"2023-09-22T23:21:37.755757001Z"2023-09-22T23:21:37.755753565Z",","hostname"hostname:"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"","pid"pid:"4769:4769}
14748 }
14749 {"{msg":""msg":"[0] 127.0.0.1:44443 task reports connection:true","v":0,"c0eead19-4586-47fd-9018-7a0ce8481009 WaitActive WaitActive WaitActive"name":,""v"crucible:"0,,""levelname""::"30crucible","level":30,"time,"":"time":"2023-09-22T23:21:37.755791299Z"2023-09-22T23:21:37.755793363Z",","hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"","pid"pid:":47694769}}
14750 
14751 {{"msg":""msg":"c8c6d6f5-1396-4842-801b-a86e7e53a2f6 WaitQuorum WaitQuorum WaitQuorum","v":0,"name":"crucible[0] c0eead19-4586-47fd-9018-7a0ce8481009 (64e4856d-2635-4307-a4f9-48bc00b7c13c) WaitActive WaitActive WaitActive ds_transition to WaitQuorum"",","levelv""::300,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.755826811Z,""time,"":"hostname":"2023-09-22T23:21:37.75583027Z","ip-10-150-1-55.us-west-2.compute.internal"hostname",:""pid":4769}
14752 ip-10-150-1-55.us-west-2.compute.internal","pid":4769{}
14753 "msg":"{[0]R flush_numbers: [0, 0]"","msg"v:"":0,"name":"[0] Transition from WaitActive to WaitQuorum"crucible",,""v"level:"0:,30"name":"crucible","level":30,"time":","2023-09-22T23:21:37.755860746Ztime"":","hostname":2023-09-22T23:21:37.75586376Z"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769ip-10-150-1-55.us-west-2.compute.internal","}
14754 pid":4769}
14755 {{"msg":""msg":"[0]R generation: [0, 0]","v[0] new RM replaced this: None"":0,","v":name0":,""namecrucible"":","crucible"level",:"30level":40,,""timetime""::""2023-09-22T23:21:37.75589539Z"2023-09-22T23:21:37.755896067Z",","hostname"hostname:"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",","pid"pid:":47694769}}
14756 
14757 {{"msg":""msg":"[0]R dirty: [false, false]","[0] Starts reconcile loop"v":,0",v"":name0":,""namecrucible"":","level":30crucible",",time"":"level":302023-09-22T23:21:37.755929423Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14758 ,{"time":""msg":"2023-09-22T23:21:37.755940961Z","[1]R flush_numbers: [0, 0]"hostname",:""v":0,"name"ip-10-150-1-55.us-west-2.compute.internal:"","cruciblepid"":,4769"level"}:
14759 30{"msg":"[1] c0eead19-4586-47fd-9018-7a0ce8481009 (64e4856d-2635-4307-a4f9-48bc00b7c13c) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum,""time,"":"v":0,"2023-09-22T23:21:37.755963207Zname"":","crucible"hostname",:""level":30ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14760 {"msg":,""time":"[1]R generation: [0, 0]","v2023-09-22T23:21:37.75597915Z"":0,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14761 {,"name"":msg"":"crucible","level":[1] Transition from WaitActive to WaitQuorum"30,"v":0,"name":"crucible","level":30,"time":","time"2023-09-22T23:21:37.756005514Z:"","hostname"2023-09-22T23:21:37.756009979Z:"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal4769","}pid
14762 ":4769}
14763 {{"msg":""msg":"[1]R dirty: [false, false]","[1] new RM replaced this: Nonev"":,0",v"":name0":,""namecrucible"":","crucible"level",:"30level":40,,""timetime""::""2023-09-22T23:21:37.756042808Z2023-09-22T23:21:37.756042109Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",","pid"pid:":47694769}}
14764 
14765 {"{msg":""msg":"[1] Starts reconcile loop","[2]R flush_numbers: [0, 0]v"":,0"v,"":0name",:""name"crucible:"","crucible"level,"":30level":30,,""timetime""::""2023-09-22T23:21:37.75608338Z"2023-09-22T23:21:37.756082822Z",","hostname"hostname:"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",","pid"pid:":47694769}}
14766 {"msg":"
14767 [2]R generation: [0, 0]","v":0,"name{":"crucible",""levelmsg""::"30[2] c0eead19-4586-47fd-9018-7a0ce8481009 (64e4856d-2635-4307-a4f9-48bc00b7c13c) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible",",level"":time"30:"2023-09-22T23:21:37.756119501Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14768 ,"time":"{2023-09-22T23:21:37.756131141Z",""msghostname""::""[2]R dirty: [false, false]"ip-10-150-1-55.us-west-2.compute.internal",",pid""v:":47690,}"
14769 name":"crucible"{,"level":"30msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.756162154Z","hostname":","time"ip-10-150-1-55.us-west-2.compute.internal:"","pid":47692023-09-22T23:21:37.756169719Z",}"
14770 hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid"":msg4769":"}
14771 Max found gen is 1","v"{:0,""name"msg:"":"crucible","[2] new RM replaced this: Nonelevel"":,"30v":0,"name":"crucible","level":40,"time":"2023-09-22T23:21:37.756197744Z",","time":hostname"":"2023-09-22T23:21:37.75620177Z","ip-10-150-1-55.us-west-2.compute.internal"hostname,"":"pid":4769}
14772 ip-10-150-1-55.us-west-2.compute.internal","pid":4769{}
14773 "msg":"{Generation requested: 1 >= found:1"","msg"v:"":0,"[2] Starts reconcile loopname"":","vcrucible"":0,","level"name:":30"crucible","level":30,"time":","time":"2023-09-22T23:21:37.756231817Z","2023-09-22T23:21:37.756234134Z"hostname":,""hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid"ip-10-150-1-55.us-west-2.compute.internal:"4769,"pid}"
14774 :4769}
14775 {{"msg":""msg"Next flush: 1:"","v":0,"[0] 127.0.0.1:35164 task reports connection:true"name":,""v"crucible:"0,,""levelname""::"30crucible","level":30,"time",:""time":"2023-09-22T23:21:37.756265546Z",2023-09-22T23:21:37.756267603Z""hostname,"":"hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internal"pid",:"4769pid":}4769
14776 }
14777 {"{msg":""All extents matchmsg"":","v":0,"name":"c0eead19-4586-47fd-9018-7a0ce8481009 WaitQuorum WaitQuorum WaitQuorumcrucible"",,""vlevel""::030,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.756298399Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14778 ,"time":"{2023-09-22T23:21:37.756301636Z"","msg":hostname"":"No downstairs repair required","ip-10-150-1-55.us-west-2.compute.internal"v",:"0pid",:"4769name":"}
14779 crucible","level":30{"msg":"[0]R flush_numbers: [0, 0]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.756332226Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769},
14780 "time":"{2023-09-22T23:21:37.756341204Z"","msg":"hostname":"No initial repair work was required",ip-10-150-1-55.us-west-2.compute.internal""v,"":0pid":,"4769name":}"
14781 crucible","level"{:30"msg":"[0]R generation: [0, 0]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.756366343Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid",:"4769time":}"
14782 2023-09-22T23:21:37.756374365Z","hostname{":""msg":"ip-10-150-1-55.us-west-2.compute.internal","pid":Set Downstairs and Upstairs active4769}
14783 {""msg":","v"[0]R dirty: [false, false]:"0,,""v"name:"0:","cruciblename"":","crucible"level",:"30level":30,,""timetime""::""2023-09-22T23:21:37.756405419Z2023-09-22T23:21:37.756405809Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",","pid"pid:":47694769}}
14784 
14785 {"{msg":""msg[1]R flush_numbers: [0, 0]"":","v":0,"name":"crucible","c8c6d6f5-1396-4842-801b-a86e7e53a2f6 is now active with session: ddd3f126-3dca-46a7-9147-f6bc461d9852level"":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.756440644Z",,""time"hostname:"":"2023-09-22T23:21:37.75644493Z",ip-10-150-1-55.us-west-2.compute.internal"","hostname"pid:"":4769}
14786 ip-10-150-1-55.us-west-2.compute.internal","pid":4769{}
14787 "msg":"{[1]R generation: [0, 0]",""msgv""::"0,"name":"crucible",c8c6d6f5-1396-4842-801b-a86e7e53a2f6 Set Active after no repair""level,"":v30":0,"name":"crucible","level":30,"time":","2023-09-22T23:21:37.75648007Ztime"":","hostname":"2023-09-22T23:21:37.756483794Z","hostname":ip-10-150-1-55.us-west-2.compute.internal"","pid":4769ip-10-150-1-55.us-west-2.compute.internal"},
14788 "pid":4769{}
14789 "msg":"{[1]R dirty: [false, false]"",msg"":v"":0,"name":"Notify all downstairs, region set compare is done."crucible",,""v"level:"0:,30"name":"crucible","level":30,"time":","time"2023-09-22T23:21:37.756514233Z:"","hostname"2023-09-22T23:21:37.756517258Z:"","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal4769","}pid
14790 ":4769}
14791 {"{msg":""msg":"[2]R flush_numbers: [0, 0]","Set check for repair"v":,0",v"":name0":,""namecrucible"":","cruciblelevel"":,"30level":30,,""timetime""::""2023-09-22T23:21:37.75654908Z2023-09-22T23:21:37.756549943Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pid"pid:":47694769}}
14792 
14793 {{"msg":""msg":"[2]R generation: [0, 0]","v":0[1] 127.0.0.1:60947 task reports connection:true,"",name"":v"":0crucible",","name"level:"":30crucible","level":30,"time":"2023-09-22T23:21:37.756585049Z","hostname":","time"ip-10-150-1-55.us-west-2.compute.internal:"","pid":47692023-09-22T23:21:37.756583394Z"},"
14794 hostname":"{ip-10-150-1-55.us-west-2.compute.internal","pid"":msg"4769:"}
14795 c8c6d6f5-1396-4842-801b-a86e7e53a2f6 Active Active Active","v":0{,"name":""cruciblemsg"":,""level":30[2]R dirty: [false, false]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.756619584Z","hostname":,""time":"ip-10-150-1-55.us-west-2.compute.internal",2023-09-22T23:21:37.756624865Z""pid",:"4769hostname":}"
14796 ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
14797 {"{msg":"Max found gen is 1""msg,"":"v":0Set check for repair,""name,"":"v":crucible0",","name":level"":crucible30","level":30,"time":","time":"2023-09-22T23:21:37.756656638Z","2023-09-22T23:21:37.756658359Zhostname"":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4769,"pid}":
14798 4769}
14799 {"{msg":""msg":"Generation requested: 1 >= found:1","v":0[2] 127.0.0.1:51595 task reports connection:true",",name"":v"":0crucible",","name":level"":crucible30","level":30,","time":time"":"2023-09-22T23:21:37.756690987Z"2023-09-22T23:21:37.756692512Z",","hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"","pid"pid:":47694769}}
14800 
14801 {"{msg":""msgNext flush: 1"":","v":0,"name"c8c6d6f5-1396-4842-801b-a86e7e53a2f6 Active Active Active:"","crucible"v",:"0level,"":30name":"crucible","level":30,"time":","time":2023-09-22T23:21:37.756723387Z"","hostname2023-09-22T23:21:37.756726041Z"":","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":ip-10-150-1-55.us-west-2.compute.internal"4769,"}pid
14802 ":4769}
14803 {"{msg":""msgAll extents match"":","v":Set check for repair0",","name"v:"":0crucible,"","name":level"":30crucible","level":30,","time"time:"":"2023-09-22T23:21:37.756756522Z"2023-09-22T23:21:37.756758202Z",","hostname"hostname:"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal",","pid"pid:":47694769}}
14804 
14805 {"{msg":""msg":"No downstairs repair required","v"[0] received reconcile message:"0,,""v"name:"0:","cruciblename"":,""cruciblelevel"":,"30level":30,,""timetime""::""2023-09-22T23:21:37.75679059Z2023-09-22T23:21:37.756789914Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47694769}}
14806 
14807 {"{msg":""msg":"No initial repair work was required","v[0] All repairs completed, exit"":0,","v"name:"0:","cruciblename"":,""cruciblelevel"":,"30level":30,,""timetime""::""2023-09-22T23:21:37.756829892Z2023-09-22T23:21:37.756829245Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47694769}}
14808 {"msg":"
14809 [0] Starts cmd_loop","v":0,"{name":"crucible"","msg"level"::"30Set Downstairs and Upstairs active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.756865757Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal",","time"pid:"":4769}2023-09-22T23:21:37.756872428Z
14810 ","hostname":{""msg"ip-10-150-1-55.us-west-2.compute.internal:"","pid":4769[1] received reconcile message"},
14811 "v":0,"{name":"crucible""msg,"":"level":30c0eead19-4586-47fd-9018-7a0ce8481009 is now active with session: 64e4856d-2635-4307-a4f9-48bc00b7c13c","v":0,"name":"crucible","level":30,,""time"time:"":"2023-09-22T23:21:37.756908208Z"2023-09-22T23:21:37.756900139Z",","hostname":hostname"":"ip-10-150-1-55.us-west-2.compute.internal"ip-10-150-1-55.us-west-2.compute.internal,"","pid"pid:":47694769}}
14812 
14813 {"{msg":""msg":"c0eead19-4586-47fd-9018-7a0ce8481009 Set Active after no repair"[1] All repairs completed, exit",","v"v:"0:0,","name"name:"":"crucible"crucible",","level"level:":3030,,""timetime""::""2023-09-22T23:21:37.756942739Z2023-09-22T23:21:37.756942517Z"",,""hostnamehostname""::""ip-10-150-1-55.us-west-2.compute.internalip-10-150-1-55.us-west-2.compute.internal"",,""pidpid""::47694769}}
14814 
14815 {"{msg":""msg"[1] Starts cmd_loop:"","v":0,"Notify all downstairs, region set compare is done.name"":","cruciblev"":,0","level"name:":30"crucible","level":30,"time":","time":"2023-09-22T23:21:37.756977291Z","2023-09-22T23:21:37.756979553Z"hostname",:""hostname":"ip-10-150-1-55.us-west-2.compute.internal","ip-10-150-1-55.us-west-2.compute.internalpid"":,"4769pid":}4769
14816 }
14817 {"msg":"[2] received reconcile message","v":0,"name":"crucible","{level":30"msg":"Set check for repair","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.757008937Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"}
14818 time":"{2023-09-22T23:21:37.757017163Z",""hostnamemsg""::""[2] All repairs completed, exit"ip-10-150-1-55.us-west-2.compute.internal",,""v"pid:"0:,4769"name"}:
14819 "crucible","{level":30"msg":"[1] 127.0.0.1:62068 task reports connection:true","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.757043339Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"}time
14820 ":"{2023-09-22T23:21:37.757052129Z",""hostnamemsg""::""[2] Starts cmd_loop",ip-10-150-1-55.us-west-2.compute.internal""v,"":0pid",:"4769name":"}
14821 crucible","level"{:30"msg":"c0eead19-4586-47fd-9018-7a0ce8481009 Active Active Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.757076218Z","hostname",":time"":"2023-09-22T23:21:37.757085698Z"ip-10-150-1-55.us-west-2.compute.internal",","hostnamepid""::"4769}
14822 ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
148232023-09-22T23:21:37.757ZINFOcrucible: Set check for repair
148242023-09-22T23:21:37.757ZINFOcrucible: [2] 127.0.0.1:42987 task reports connection:true
148252023-09-22T23:21:37.757ZINFOcrucible: c0eead19-4586-47fd-9018-7a0ce8481009 Active Active Active
148262023-09-22T23:21:37.757ZINFOcrucible: Set check for repair
148272023-09-22T23:21:37.757ZINFOcrucible: [0] received reconcile message
148282023-09-22T23:21:37.757ZINFOcrucible: [0] All repairs completed, exit
148292023-09-22T23:21:37.757ZINFOcrucible: [0] Starts cmd_loop
148302023-09-22T23:21:37.757ZINFOcrucible: [1] received reconcile message
148312023-09-22T23:21:37.757ZINFOcrucible: [1] All repairs completed, exit
148322023-09-22T23:21:37.757ZINFOcrucible: [1] Starts cmd_loop
148332023-09-22T23:21:37.757ZINFOcrucible: [2] received reconcile message
148342023-09-22T23:21:37.757ZINFOcrucible: [2] All repairs completed, exit
148352023-09-22T23:21:37.757ZINFOcrucible: [2] Starts cmd_loop
14836 Sep 22 23:21:37.758 INFO current number of open files limit 65536 is already the maximum
14837 Sep 22 23:21:37.758 INFO Opened existing region file "/tmp/downstairs-LHGaEZWT/region.json"
14838 Sep 22 23:21:37.758 INFO Database read version 1
14839 Sep 22 23:21:37.758 INFO Database write version 1
14840 test test::integration_test_io_out_of_range ... ok
14841 Sep 22 23:21:37.758 DEBG Read :1005 deps:[JobId(1004), JobId(1002), JobId(1000)] res:true
14842 Sep 22 23:21:37.759 INFO current number of open files limit 65536 is already the maximum
14843 Sep 22 23:21:37.759 INFO Created new region file "/tmp/downstairs-Was11KKc/region.json"
14844 test test::integration_test_guest_zero_length_io ... ok
14845 Sep 22 23:21:37.759 INFO current number of open files limit 65536 is already the maximum
14846 Sep 22 23:21:37.759 DEBG Read :1005 deps:[JobId(1004), JobId(1002), JobId(1000)] res:true
14847 Sep 22 23:21:37.759 INFO Created new region file "/tmp/downstairs-0VvjfZUg/region.json"
14848 test test::integration_test_io_span_out_of_range ... ok
14849 Sep 22 23:21:37.760 INFO UUID: 838f9715-247a-451d-9f6e-d47e6fb7eb32
14850 Sep 22 23:21:37.760 INFO Blocks per extent:5 Total Extents: 2
14851 Sep 22 23:21:37.760 INFO Crucible Version: Crucible Version: 0.0.1
14852 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14853 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14854 rustc: 1.70.0 stable x86_64-unknown-illumos
14855 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14856 Sep 22 23:21:37.760 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14857 Sep 22 23:21:37.760 INFO Using address: 127.0.0.1:60919, task: main
14858 Sep 22 23:21:37.760 DEBG Read :1005 deps:[JobId(1004), JobId(1002), JobId(1000)] res:true
14859 Sep 22 23:21:37.760 INFO current number of open files limit 65536 is already the maximum
14860 Sep 22 23:21:37.760 INFO current number of open files limit 65536 is already the maximum
14861 Sep 22 23:21:37.760 INFO Created new region file "/tmp/downstairs-iykT2e0l/region.json"
14862 Sep 22 23:21:37.760 INFO Opened existing region file "/tmp/downstairs-P4Ld1XOk/region.json"
14863 Sep 22 23:21:37.760 INFO Repair listens on 127.0.0.1:0, task: repair
14864 Sep 22 23:21:37.760 INFO Database read version 1
14865 Sep 22 23:21:37.760 INFO Database write version 1
14866 Sep 22 23:21:37.760 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:47350, task: repair
14867 Sep 22 23:21:37.760 INFO current number of open files limit 65536 is already the maximum
14868 Sep 22 23:21:37.760 INFO Opened existing region file "/tmp/downstairs-6LMZkwp5/region.json"
14869 Sep 22 23:21:37.760 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:47350, task: repair
14870 Sep 22 23:21:37.760 INFO Database read version 1
14871 Sep 22 23:21:37.760 INFO Database write version 1
14872 Sep 22 23:21:37.760 INFO listening, local_addr: 127.0.0.1:47350, task: repair
14873 Sep 22 23:21:37.761 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:47350, task: repair
14874 Sep 22 23:21:37.761 INFO Using repair address: 127.0.0.1:47350, task: main
14875 Sep 22 23:21:37.761 INFO No SSL acceptor configured, task: main
14876 Sep 22 23:21:37.762 INFO Upstairs starts
14877 Sep 22 23:21:37.762 INFO Crucible Version: BuildInfo {
14878 version: "0.0.1",
14879 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
14880 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
14881 git_branch: "main",
14882 rustc_semver: "1.70.0",
14883 rustc_channel: "stable",
14884 rustc_host_triple: "x86_64-unknown-illumos",
14885 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
14886 cargo_triple: "x86_64-unknown-illumos",
14887 debug: true,
14888 opt_level: 0,
14889 }
14890 Sep 22 23:21:37.762 INFO Upstairs <-> Downstairs Message Version: 4
14891 Sep 22 23:21:37.762 INFO Crucible stats registered with UUID: 6afc946f-d080-46eb-8409-d4d69f1e5cf5
14892 Sep 22 23:21:37.762 INFO Crucible 6afc946f-d080-46eb-8409-d4d69f1e5cf5 has session id: c2a95475-2918-450f-b16e-7b810ca0119f
14893 Sep 22 23:21:37.762 INFO listening on 127.0.0.1:0, task: main
14894 Sep 22 23:21:37.762 INFO listening on 127.0.0.1:0, task: main
14895 Sep 22 23:21:37.762 INFO listening on 127.0.0.1:0, task: main
14896 Sep 22 23:21:37.762 INFO [0] connecting to 127.0.0.1:35178, looper: 0
14897 Sep 22 23:21:37.762 INFO [1] connecting to 127.0.0.1:54378, looper: 1
14898 Sep 22 23:21:37.762 INFO UUID: d57f7bca-56d6-4fef-ba3b-7c8c7e1674fe
14899 Sep 22 23:21:37.762 INFO Blocks per extent:5 Total Extents: 2
14900 Sep 22 23:21:37.762 INFO UUID: c675aa7b-d28f-41ce-bbbd-77c754910d5f
14901 Sep 22 23:21:37.762 INFO Blocks per extent:5 Total Extents: 2
14902 Sep 22 23:21:37.762 INFO [2] connecting to 127.0.0.1:60919, looper: 2
14903 Sep 22 23:21:37.762 INFO Crucible Version: Crucible Version: 0.0.1
14904 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14905 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14906 rustc: 1.70.0 stable x86_64-unknown-illumos
14907 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14908 Sep 22 23:21:37.762 INFO Crucible Version: Crucible Version: 0.0.1
14909 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14910 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14911 rustc: 1.70.0 stable x86_64-unknown-illumos
14912 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14913 Sep 22 23:21:37.762 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14914 Sep 22 23:21:37.762 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14915 Sep 22 23:21:37.762 INFO Using address: 127.0.0.1:46846, task: main
14916 Sep 22 23:21:37.762 INFO Using address: 127.0.0.1:48840, task: main
14917 Sep 22 23:21:37.763 INFO up_listen starts, task: up_listen
14918 Sep 22 23:21:37.763 INFO Wait for all three downstairs to come online
14919 Sep 22 23:21:37.763 INFO Flush timeout: 0.5
14920 Sep 22 23:21:37.763 INFO Repair listens on 127.0.0.1:0, task: repair
14921 Sep 22 23:21:37.763 INFO accepted connection from 127.0.0.1:43367, task: main
14922 Sep 22 23:21:37.763 INFO Repair listens on 127.0.0.1:0, task: repair
14923 Sep 22 23:21:37.763 INFO [0] 6afc946f-d080-46eb-8409-d4d69f1e5cf5 looper connected, looper: 0
14924 Sep 22 23:21:37.763 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:48561, task: repair
14925 Sep 22 23:21:37.763 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38246, task: repair
14926 Sep 22 23:21:37.763 INFO [0] Proc runs for 127.0.0.1:35178 in state New
14927 Sep 22 23:21:37.763 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:48561, task: repair
14928 Sep 22 23:21:37.763 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38246, task: repair
14929 Sep 22 23:21:37.763 INFO listening, local_addr: 127.0.0.1:48561, task: repair
14930 Sep 22 23:21:37.763 INFO listening, local_addr: 127.0.0.1:38246, task: repair
14931 Sep 22 23:21:37.763 INFO [1] 6afc946f-d080-46eb-8409-d4d69f1e5cf5 looper connected, looper: 1
14932 Sep 22 23:21:37.763 INFO [1] Proc runs for 127.0.0.1:54378 in state New
14933 Sep 22 23:21:37.763 INFO current number of open files limit 65536 is already the maximum
14934 Sep 22 23:21:37.763 INFO [2] 6afc946f-d080-46eb-8409-d4d69f1e5cf5 looper connected, looper: 2
14935 Sep 22 23:21:37.763 INFO Opened existing region file "/tmp/downstairs-0VvjfZUg/region.json"
14936 Sep 22 23:21:37.763 INFO Database read version 1
14937 Sep 22 23:21:37.763 INFO Database write version 1
14938 Sep 22 23:21:37.763 INFO [2] Proc runs for 127.0.0.1:60919 in state New
14939 Sep 22 23:21:37.763 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:48561, task: repair
14940 Sep 22 23:21:37.763 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38246, task: repair
14941 Sep 22 23:21:37.763 INFO Using repair address: 127.0.0.1:38246, task: main
14942 Sep 22 23:21:37.763 INFO Using repair address: 127.0.0.1:48561, task: main
14943 Sep 22 23:21:37.763 INFO No SSL acceptor configured, task: main
14944 Sep 22 23:21:37.763 INFO No SSL acceptor configured, task: main
14945 Sep 22 23:21:37.763 INFO accepted connection from 127.0.0.1:58083, task: main
14946 Sep 22 23:21:37.763 INFO accepted connection from 127.0.0.1:39236, task: main
14947 Sep 22 23:21:37.764 INFO Connection request from 6afc946f-d080-46eb-8409-d4d69f1e5cf5 with version 4, task: proc
14948 Sep 22 23:21:37.764 INFO current number of open files limit 65536 is already the maximum
14949 Sep 22 23:21:37.764 INFO upstairs UpstairsConnection { upstairs_id: 6afc946f-d080-46eb-8409-d4d69f1e5cf5, session_id: a5cb2416-c55e-442a-875e-b2f1c82b7925, gen: 1 } connected, version 4, task: proc
14950 Sep 22 23:21:37.764 INFO Connection request from 6afc946f-d080-46eb-8409-d4d69f1e5cf5 with version 4, task: proc
14951 Sep 22 23:21:37.764 INFO Created new region file "/tmp/downstairs-kyT9vuNL/region.json"
14952 Sep 22 23:21:37.764 INFO Upstairs starts
14953 Sep 22 23:21:37.764 INFO upstairs UpstairsConnection { upstairs_id: 6afc946f-d080-46eb-8409-d4d69f1e5cf5, session_id: a5cb2416-c55e-442a-875e-b2f1c82b7925, gen: 1 } connected, version 4, task: proc
14954 Sep 22 23:21:37.764 INFO Crucible Version: BuildInfo {
14955 version: "0.0.1",
14956 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
14957 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
14958 git_branch: "main",
14959 rustc_semver: "1.70.0",
14960 rustc_channel: "stable",
14961 rustc_host_triple: "x86_64-unknown-illumos",
14962 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
14963 cargo_triple: "x86_64-unknown-illumos",
14964 debug: true,
14965 opt_level: 0,
14966 }
14967 Sep 22 23:21:37.764 INFO Upstairs <-> Downstairs Message Version: 4
14968 Sep 22 23:21:37.764 INFO Crucible stats registered with UUID: be1bbe57-a78b-41fd-91a6-742ef82c740e
14969 Sep 22 23:21:37.764 INFO Connection request from 6afc946f-d080-46eb-8409-d4d69f1e5cf5 with version 4, task: proc
14970 Sep 22 23:21:37.764 INFO Crucible be1bbe57-a78b-41fd-91a6-742ef82c740e has session id: 777b8bd2-886b-4a1b-bd27-823aabe06e49
14971 Sep 22 23:21:37.764 INFO upstairs UpstairsConnection { upstairs_id: 6afc946f-d080-46eb-8409-d4d69f1e5cf5, session_id: a5cb2416-c55e-442a-875e-b2f1c82b7925, gen: 1 } connected, version 4, task: proc
14972 Sep 22 23:21:37.764 INFO listening on 127.0.0.1:0, task: main
14973 Sep 22 23:21:37.764 INFO current number of open files limit 65536 is already the maximum
14974 Sep 22 23:21:37.764 INFO Opened existing region file "/tmp/downstairs-iykT2e0l/region.json"
14975 Sep 22 23:21:37.764 INFO Database read version 1
14976 Sep 22 23:21:37.764 INFO listening on 127.0.0.1:0, task: main
14977 Sep 22 23:21:37.764 INFO Database write version 1
14978 Sep 22 23:21:37.764 INFO [0] 6afc946f-d080-46eb-8409-d4d69f1e5cf5 (a5cb2416-c55e-442a-875e-b2f1c82b7925) New New New ds_transition to WaitActive
14979 Sep 22 23:21:37.764 INFO [0] Transition from New to WaitActive
14980 Sep 22 23:21:37.764 INFO listening on 127.0.0.1:0, task: main
14981 Sep 22 23:21:37.764 INFO [0] connecting to 127.0.0.1:61806, looper: 0
14982 Sep 22 23:21:37.764 INFO UUID: 948c8c95-e366-4ab4-b43c-f85926d90cc9
14983 Sep 22 23:21:37.764 INFO [1] 6afc946f-d080-46eb-8409-d4d69f1e5cf5 (a5cb2416-c55e-442a-875e-b2f1c82b7925) WaitActive New New ds_transition to WaitActive
14984 Sep 22 23:21:37.764 INFO Blocks per extent:5 Total Extents: 2
14985 Sep 22 23:21:37.764 INFO [1] Transition from New to WaitActive
14986 Sep 22 23:21:37.764 INFO Crucible Version: Crucible Version: 0.0.1
14987 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14988 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14989 rustc: 1.70.0 stable x86_64-unknown-illumos
14990 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14991 Sep 22 23:21:37.764 INFO [2] 6afc946f-d080-46eb-8409-d4d69f1e5cf5 (a5cb2416-c55e-442a-875e-b2f1c82b7925) WaitActive WaitActive New ds_transition to WaitActive
14992 Sep 22 23:21:37.764 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14993 Sep 22 23:21:37.764 INFO Using address: 127.0.0.1:63732, task: main
14994 Sep 22 23:21:37.764 INFO [1] connecting to 127.0.0.1:36272, looper: 1
14995 Sep 22 23:21:37.764 INFO [2] Transition from New to WaitActive
14996 Sep 22 23:21:37.765 INFO [2] connecting to 127.0.0.1:46846, looper: 2
14997 The guest has requested activation
14998 Sep 22 23:21:37.765 INFO 6afc946f-d080-46eb-8409-d4d69f1e5cf5 active request set
14999 Sep 22 23:21:37.765 INFO up_listen starts, task: up_listen
15000 Sep 22 23:21:37.765 INFO Wait for all three downstairs to come online
15001 Sep 22 23:21:37.765 INFO Repair listens on 127.0.0.1:0, task: repair
15002 Sep 22 23:21:37.765 INFO Flush timeout: 0.5
15003 Sep 22 23:21:37.765 INFO [0] received activate with gen 1
15004 Sep 22 23:21:37.765 INFO [0] client got ds_active_rx, promote! session a5cb2416-c55e-442a-875e-b2f1c82b7925
15005 Sep 22 23:21:37.765 INFO [1] received activate with gen 1
15006 Sep 22 23:21:37.765 INFO [1] client got ds_active_rx, promote! session a5cb2416-c55e-442a-875e-b2f1c82b7925
15007 Sep 22 23:21:37.765 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52997, task: repair
15008 Sep 22 23:21:37.765 INFO [2] received activate with gen 1
15009 Sep 22 23:21:37.765 INFO [2] client got ds_active_rx, promote! session a5cb2416-c55e-442a-875e-b2f1c82b7925
15010 Sep 22 23:21:37.765 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52997, task: repair
15011 Sep 22 23:21:37.765 INFO listening, local_addr: 127.0.0.1:52997, task: repair
15012 Sep 22 23:21:37.765 INFO accepted connection from 127.0.0.1:50466, task: main
15013 Sep 22 23:21:37.765 INFO accepted connection from 127.0.0.1:41992, task: main
15014 Sep 22 23:21:37.765 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52997, task: repair
15015 Sep 22 23:21:37.765 INFO accepted connection from 127.0.0.1:59307, task: main
15016 Sep 22 23:21:37.765 INFO Using repair address: 127.0.0.1:52997, task: main
15017 Sep 22 23:21:37.765 INFO No SSL acceptor configured, task: main
15018 Sep 22 23:21:37.765 INFO [0] be1bbe57-a78b-41fd-91a6-742ef82c740e looper connected, looper: 0
15019 Sep 22 23:21:37.765 INFO [0] Proc runs for 127.0.0.1:61806 in state New
15020 Sep 22 23:21:37.765 INFO UUID: a6815f0f-5f43-4904-a056-e79c5267f195
15021 Sep 22 23:21:37.765 INFO [1] be1bbe57-a78b-41fd-91a6-742ef82c740e looper connected, looper: 1
15022 Sep 22 23:21:37.765 INFO Blocks per extent:5 Total Extents: 2
15023 Sep 22 23:21:37.765 INFO [1] Proc runs for 127.0.0.1:36272 in state New
15024 Sep 22 23:21:37.765 INFO [0] downstairs client at 127.0.0.1:35178 has UUID a32c7e82-e087-4524-b977-a5b003758c5f
15025 Sep 22 23:21:37.765 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a32c7e82-e087-4524-b977-a5b003758c5f, encrypted: true, database_read_version: 1, database_write_version: 1 }
15026 Sep 22 23:21:37.765 INFO Crucible Version: Crucible Version: 0.0.1
15027 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15028 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15029 rustc: 1.70.0 stable x86_64-unknown-illumos
15030 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15031 Sep 22 23:21:37.765 INFO [2] be1bbe57-a78b-41fd-91a6-742ef82c740e looper connected, looper: 2
15032 Sep 22 23:21:37.765 INFO 6afc946f-d080-46eb-8409-d4d69f1e5cf5 WaitActive WaitActive WaitActive
15033 Sep 22 23:21:37.765 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15034 Sep 22 23:21:37.766 INFO Using address: 127.0.0.1:60322, task: main
15035 Sep 22 23:21:37.766 INFO [2] Proc runs for 127.0.0.1:46846 in state New
15036 Sep 22 23:21:37.766 INFO current number of open files limit 65536 is already the maximum
15037 Sep 22 23:21:37.766 INFO [1] downstairs client at 127.0.0.1:54378 has UUID 6c77c5f6-1f7d-446d-9f57-ac7866e05802
15038 Sep 22 23:21:37.766 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 6c77c5f6-1f7d-446d-9f57-ac7866e05802, encrypted: true, database_read_version: 1, database_write_version: 1 }
15039 Sep 22 23:21:37.766 INFO 6afc946f-d080-46eb-8409-d4d69f1e5cf5 WaitActive WaitActive WaitActive
15040 Sep 22 23:21:37.766 INFO Created new region file "/tmp/downstairs-2YrrmjVu/region.json"
15041 Sep 22 23:21:37.766 INFO [2] downstairs client at 127.0.0.1:60919 has UUID 838f9715-247a-451d-9f6e-d47e6fb7eb32
15042 Sep 22 23:21:37.766 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 838f9715-247a-451d-9f6e-d47e6fb7eb32, encrypted: true, database_read_version: 1, database_write_version: 1 }
15043 Sep 22 23:21:37.766 INFO 6afc946f-d080-46eb-8409-d4d69f1e5cf5 WaitActive WaitActive WaitActive
15044 Sep 22 23:21:37.766 INFO Repair listens on 127.0.0.1:0, task: repair
15045 Sep 22 23:21:37.766 INFO Connection request from be1bbe57-a78b-41fd-91a6-742ef82c740e with version 4, task: proc
15046 Sep 22 23:21:37.766 INFO upstairs UpstairsConnection { upstairs_id: be1bbe57-a78b-41fd-91a6-742ef82c740e, session_id: dfde9f11-1418-46ce-85ab-4ed932ebdb6b, gen: 1 } connected, version 4, task: proc
15047 Sep 22 23:21:37.766 INFO Current flush_numbers [0..12]: [0, 0]
15048 Sep 22 23:21:37.766 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:46041, task: repair
15049 Sep 22 23:21:37.766 INFO Connection request from be1bbe57-a78b-41fd-91a6-742ef82c740e with version 4, task: proc
15050 Sep 22 23:21:37.766 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:46041, task: repair
15051 Sep 22 23:21:37.766 INFO upstairs UpstairsConnection { upstairs_id: be1bbe57-a78b-41fd-91a6-742ef82c740e, session_id: dfde9f11-1418-46ce-85ab-4ed932ebdb6b, gen: 1 } connected, version 4, task: proc
15052 Sep 22 23:21:37.766 INFO Downstairs has completed Negotiation, task: proc
15053 Sep 22 23:21:37.766 INFO listening, local_addr: 127.0.0.1:46041, task: repair
15054 Sep 22 23:21:37.766 INFO Connection request from be1bbe57-a78b-41fd-91a6-742ef82c740e with version 4, task: proc
15055 Sep 22 23:21:37.766 INFO upstairs UpstairsConnection { upstairs_id: be1bbe57-a78b-41fd-91a6-742ef82c740e, session_id: dfde9f11-1418-46ce-85ab-4ed932ebdb6b, gen: 1 } connected, version 4, task: proc
15056 Sep 22 23:21:37.766 INFO Current flush_numbers [0..12]: [0, 0]
15057 Sep 22 23:21:37.766 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:46041, task: repair
15058 Sep 22 23:21:37.766 INFO Using repair address: 127.0.0.1:46041, task: main
15059 Sep 22 23:21:37.766 INFO No SSL acceptor configured, task: main
15060 Sep 22 23:21:37.766 INFO Downstairs has completed Negotiation, task: proc
15061 Sep 22 23:21:37.766 INFO [0] be1bbe57-a78b-41fd-91a6-742ef82c740e (dfde9f11-1418-46ce-85ab-4ed932ebdb6b) New New New ds_transition to WaitActive
15062 Sep 22 23:21:37.766 INFO [0] Transition from New to WaitActive
15063 Sep 22 23:21:37.766 INFO Current flush_numbers [0..12]: [0, 0]
15064 Sep 22 23:21:37.766 INFO current number of open files limit 65536 is already the maximum
15065 Sep 22 23:21:37.767 INFO [1] be1bbe57-a78b-41fd-91a6-742ef82c740e (dfde9f11-1418-46ce-85ab-4ed932ebdb6b) WaitActive New New ds_transition to WaitActive
15066 Sep 22 23:21:37.767 INFO [1] Transition from New to WaitActive
15067 Sep 22 23:21:37.767 INFO Downstairs has completed Negotiation, task: proc
15068 Sep 22 23:21:37.767 INFO Created new region file "/tmp/downstairs-95N00kmj/region.json"
15069 Sep 22 23:21:37.767 INFO [2] be1bbe57-a78b-41fd-91a6-742ef82c740e (dfde9f11-1418-46ce-85ab-4ed932ebdb6b) WaitActive WaitActive New ds_transition to WaitActive
15070 Sep 22 23:21:37.767 INFO current number of open files limit 65536 is already the maximum
15071 Sep 22 23:21:37.767 INFO [2] Transition from New to WaitActive
15072 Sep 22 23:21:37.767 INFO Opened existing region file "/tmp/downstairs-kyT9vuNL/region.json"
15073 Sep 22 23:21:37.767 INFO Database read version 1
15074 Sep 22 23:21:37.767 INFO [0] 6afc946f-d080-46eb-8409-d4d69f1e5cf5 (a5cb2416-c55e-442a-875e-b2f1c82b7925) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
15075 Sep 22 23:21:37.767 INFO Database write version 1
15076 Sep 22 23:21:37.767 INFO [0] Transition from WaitActive to WaitQuorum
15077 Sep 22 23:21:37.767 WARN [0] new RM replaced this: None
15078 Sep 22 23:21:37.767 INFO [0] Starts reconcile loop
15079 The guest has requested activation
15080 Sep 22 23:21:37.767 INFO [1] 6afc946f-d080-46eb-8409-d4d69f1e5cf5 (a5cb2416-c55e-442a-875e-b2f1c82b7925) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
15081 Sep 22 23:21:37.767 INFO [1] Transition from WaitActive to WaitQuorum
15082 Sep 22 23:21:37.767 WARN [1] new RM replaced this: None
15083 Sep 22 23:21:37.767 INFO [1] Starts reconcile loop
15084 Sep 22 23:21:37.767 INFO be1bbe57-a78b-41fd-91a6-742ef82c740e active request set
15085 Sep 22 23:21:37.767 INFO [2] 6afc946f-d080-46eb-8409-d4d69f1e5cf5 (a5cb2416-c55e-442a-875e-b2f1c82b7925) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
15086 Sep 22 23:21:37.767 INFO [2] Transition from WaitActive to WaitQuorum
15087 Sep 22 23:21:37.767 WARN [2] new RM replaced this: None
15088 Sep 22 23:21:37.767 INFO [2] Starts reconcile loop
15089 Sep 22 23:21:37.767 INFO [0] received activate with gen 1
15090 Sep 22 23:21:37.767 INFO [0] client got ds_active_rx, promote! session dfde9f11-1418-46ce-85ab-4ed932ebdb6b
15091 Sep 22 23:21:37.767 INFO [0] 127.0.0.1:35178 task reports connection:true
15092 Sep 22 23:21:37.767 INFO 6afc946f-d080-46eb-8409-d4d69f1e5cf5 WaitQuorum WaitQuorum WaitQuorum
15093 Sep 22 23:21:37.767 INFO [0]R flush_numbers: [0, 0]
15094 Sep 22 23:21:37.767 INFO [0]R generation: [0, 0]
15095 Sep 22 23:21:37.767 INFO [0]R dirty: [false, false]
15096 Sep 22 23:21:37.767 INFO [1] received activate with gen 1
15097 Sep 22 23:21:37.767 INFO [1]R flush_numbers: [0, 0]
15098 Sep 22 23:21:37.767 INFO [1]R generation: [0, 0]
15099 Sep 22 23:21:37.767 INFO [1] client got ds_active_rx, promote! session dfde9f11-1418-46ce-85ab-4ed932ebdb6b
15100 Sep 22 23:21:37.767 INFO [1]R dirty: [false, false]
15101 Sep 22 23:21:37.767 INFO [2]R flush_numbers: [0, 0]
15102 Sep 22 23:21:37.767 INFO [2]R generation: [0, 0]
15103 Sep 22 23:21:37.767 INFO [2]R dirty: [false, false]
15104 Sep 22 23:21:37.767 INFO Max found gen is 1
15105 Sep 22 23:21:37.767 INFO Generation requested: 1 >= found:1
15106 Sep 22 23:21:37.767 INFO Next flush: 1
15107 Sep 22 23:21:37.767 INFO [2] received activate with gen 1
15108 Sep 22 23:21:37.767 INFO All extents match
15109 Sep 22 23:21:37.767 INFO No downstairs repair required
15110 Sep 22 23:21:37.767 INFO [2] client got ds_active_rx, promote! session dfde9f11-1418-46ce-85ab-4ed932ebdb6b
15111 Sep 22 23:21:37.767 INFO No initial repair work was required
15112 Sep 22 23:21:37.767 INFO Set Downstairs and Upstairs active
15113 Sep 22 23:21:37.767 INFO 6afc946f-d080-46eb-8409-d4d69f1e5cf5 is now active with session: a5cb2416-c55e-442a-875e-b2f1c82b7925
15114 Sep 22 23:21:37.767 INFO 6afc946f-d080-46eb-8409-d4d69f1e5cf5 Set Active after no repair
15115 Sep 22 23:21:37.767 INFO Notify all downstairs, region set compare is done.
15116 Sep 22 23:21:37.767 INFO Set check for repair
15117 Sep 22 23:21:37.767 INFO [1] 127.0.0.1:54378 task reports connection:true
15118 Sep 22 23:21:37.767 INFO 6afc946f-d080-46eb-8409-d4d69f1e5cf5 Active Active Active
15119 Sep 22 23:21:37.767 INFO Set check for repair
15120 Sep 22 23:21:37.767 INFO [2] 127.0.0.1:60919 task reports connection:true
15121 Sep 22 23:21:37.767 INFO 6afc946f-d080-46eb-8409-d4d69f1e5cf5 Active Active Active
15122 Sep 22 23:21:37.767 INFO Set check for repair
15123 Sep 22 23:21:37.767 INFO [0] received reconcile message
15124 Sep 22 23:21:37.767 INFO [0] All repairs completed, exit
15125 Sep 22 23:21:37.767 INFO [0] Starts cmd_loop
15126 Sep 22 23:21:37.768 INFO [1] received reconcile message
15127 Sep 22 23:21:37.768 INFO [1] All repairs completed, exit
15128 Sep 22 23:21:37.768 INFO [1] Starts cmd_loop
15129 Sep 22 23:21:37.768 INFO [2] received reconcile message
15130 Sep 22 23:21:37.768 INFO [2] All repairs completed, exit
15131 Sep 22 23:21:37.768 INFO [2] Starts cmd_loop
15132 The guest has finished waiting for activation
15133 Sep 22 23:21:37.768 INFO [0] downstairs client at 127.0.0.1:61806 has UUID 17366ba3-9d52-4dce-a4fd-d4657621aec9
15134 Sep 22 23:21:37.768 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 17366ba3-9d52-4dce-a4fd-d4657621aec9, encrypted: true, database_read_version: 1, database_write_version: 1 }
15135 Sep 22 23:21:37.768 INFO be1bbe57-a78b-41fd-91a6-742ef82c740e WaitActive WaitActive WaitActive
15136 Sep 22 23:21:37.768 DEBG IO Read 1000 has deps []
15137 Sep 22 23:21:37.768 INFO [1] downstairs client at 127.0.0.1:36272 has UUID e173e972-e8cf-416e-bc11-1d8d35f2e9ba
15138 Sep 22 23:21:37.768 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e173e972-e8cf-416e-bc11-1d8d35f2e9ba, encrypted: true, database_read_version: 1, database_write_version: 1 }
15139 Sep 22 23:21:37.768 INFO be1bbe57-a78b-41fd-91a6-742ef82c740e WaitActive WaitActive WaitActive
15140 Sep 22 23:21:37.768 INFO [2] downstairs client at 127.0.0.1:46846 has UUID d57f7bca-56d6-4fef-ba3b-7c8c7e1674fe
15141 Sep 22 23:21:37.768 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: d57f7bca-56d6-4fef-ba3b-7c8c7e1674fe, encrypted: true, database_read_version: 1, database_write_version: 1 }
15142 Sep 22 23:21:37.768 INFO be1bbe57-a78b-41fd-91a6-742ef82c740e WaitActive WaitActive WaitActive
15143 Sep 22 23:21:37.768 INFO Current flush_numbers [0..12]: [0, 0]
15144 Sep 22 23:21:37.768 DEBG Read :1000 deps:[] res:true
15145 Sep 22 23:21:37.769 INFO Downstairs has completed Negotiation, task: proc
15146 Sep 22 23:21:37.769 DEBG Read :1000 deps:[] res:true
15147 Sep 22 23:21:37.769 DEBG Read :1000 deps:[] res:true
15148 Sep 22 23:21:37.769 INFO Current flush_numbers [0..12]: [0, 0]
15149 Sep 22 23:21:37.769 INFO UUID: 59952ac1-1e6e-4726-b89d-5cdada889db9
15150 Sep 22 23:21:37.769 INFO Blocks per extent:5 Total Extents: 2
15151 Sep 22 23:21:37.769 INFO current number of open files limit 65536 is already the maximum
15152 Sep 22 23:21:37.769 INFO Opened existing region file "/tmp/downstairs-2YrrmjVu/region.json"
15153 Sep 22 23:21:37.769 INFO Database read version 1
15154 Sep 22 23:21:37.769 INFO Database write version 1
15155 Sep 22 23:21:37.769 INFO Crucible Version: Crucible Version: 0.0.1
15156 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15157 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15158 rustc: 1.70.0 stable x86_64-unknown-illumos
15159 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15160 Sep 22 23:21:37.769 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15161 Sep 22 23:21:37.769 INFO Using address: 127.0.0.1:58780, task: main
15162 Sep 22 23:21:37.769 INFO Downstairs has completed Negotiation, task: proc
15163 Sep 22 23:21:37.769 DEBG [0] Read AckReady 1000, : downstairs
15164 Sep 22 23:21:37.769 INFO Current flush_numbers [0..12]: [0, 0]
15165 Sep 22 23:21:37.769 DEBG [1] Read already AckReady 1000, : downstairs
15166 Sep 22 23:21:37.770 DEBG [2] Read already AckReady 1000, : downstairs
15167 Sep 22 23:21:37.770 INFO Repair listens on 127.0.0.1:0, task: repair
15168 Sep 22 23:21:37.770 DEBG up_ds_listen was notified
15169 Sep 22 23:21:37.770 INFO Downstairs has completed Negotiation, task: proc
15170 Sep 22 23:21:37.770 DEBG up_ds_listen process 1000
15171 Sep 22 23:21:37.770 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33158, task: repair
15172 Sep 22 23:21:37.770 DEBG [A] ack job 1000:1, : downstairs
15173 Sep 22 23:21:37.770 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33158, task: repair
15174 Sep 22 23:21:37.770 INFO listening, local_addr: 127.0.0.1:33158, task: repair
15175 Sep 22 23:21:37.770 DEBG up_ds_listen checked 1 jobs, back to waiting
15176 Sep 22 23:21:37.770 INFO [0] be1bbe57-a78b-41fd-91a6-742ef82c740e (dfde9f11-1418-46ce-85ab-4ed932ebdb6b) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
15177 Sep 22 23:21:37.770 INFO [0] Transition from WaitActive to WaitQuorum
15178 Sep 22 23:21:37.770 WARN [0] new RM replaced this: None
15179 Sep 22 23:21:37.770 INFO [0] Starts reconcile loop
15180 Sep 22 23:21:37.770 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33158, task: repair
15181 Sep 22 23:21:37.770 INFO Using repair address: 127.0.0.1:33158, task: main
15182 Sep 22 23:21:37.770 INFO No SSL acceptor configured, task: main
15183 Sep 22 23:21:37.770 INFO [1] be1bbe57-a78b-41fd-91a6-742ef82c740e (dfde9f11-1418-46ce-85ab-4ed932ebdb6b) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
15184 Sep 22 23:21:37.770 INFO [1] Transition from WaitActive to WaitQuorum
15185 Sep 22 23:21:37.770 WARN [1] new RM replaced this: None
15186 Sep 22 23:21:37.770 INFO [1] Starts reconcile loop
15187 Sep 22 23:21:37.770 INFO [2] be1bbe57-a78b-41fd-91a6-742ef82c740e (dfde9f11-1418-46ce-85ab-4ed932ebdb6b) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
15188 Sep 22 23:21:37.770 INFO [2] Transition from WaitActive to WaitQuorum
15189 Sep 22 23:21:37.770 WARN [2] new RM replaced this: None
15190 Sep 22 23:21:37.770 INFO [2] Starts reconcile loop
15191 Sep 22 23:21:37.770 INFO [0] 127.0.0.1:61806 task reports connection:true
15192 Sep 22 23:21:37.770 INFO be1bbe57-a78b-41fd-91a6-742ef82c740e WaitQuorum WaitQuorum WaitQuorum
15193 Sep 22 23:21:37.770 INFO [0]R flush_numbers: [0, 0]
15194 Sep 22 23:21:37.770 INFO [0]R generation: [0, 0]
15195 Sep 22 23:21:37.770 INFO [0]R dirty: [false, false]
15196 Sep 22 23:21:37.770 INFO [1]R flush_numbers: [0, 0]
15197 Sep 22 23:21:37.770 INFO [1]R generation: [0, 0]
15198 Sep 22 23:21:37.770 INFO [1]R dirty: [false, false]
15199 Sep 22 23:21:37.770 INFO [2]R flush_numbers: [0, 0]
15200 Sep 22 23:21:37.770 INFO [2]R generation: [0, 0]
15201 Sep 22 23:21:37.770 INFO [2]R dirty: [false, false]
15202 Sep 22 23:21:37.770 INFO Max found gen is 1
15203 Sep 22 23:21:37.770 INFO Generation requested: 1 >= found:1
15204 Sep 22 23:21:37.771 INFO Next flush: 1
15205 Sep 22 23:21:37.771 INFO All extents match
15206 Sep 22 23:21:37.771 INFO No downstairs repair required
15207 Sep 22 23:21:37.771 INFO No initial repair work was required
15208 Sep 22 23:21:37.771 INFO listening on 127.0.0.1:0, task: main
15209 Sep 22 23:21:37.771 INFO Set Downstairs and Upstairs active
15210 Sep 22 23:21:37.771 INFO be1bbe57-a78b-41fd-91a6-742ef82c740e is now active with session: dfde9f11-1418-46ce-85ab-4ed932ebdb6b
15211 Sep 22 23:21:37.771 INFO listening on 127.0.0.1:0, task: main
15212 Sep 22 23:21:37.771 INFO be1bbe57-a78b-41fd-91a6-742ef82c740e Set Active after no repair
15213 Sep 22 23:21:37.771 INFO Notify all downstairs, region set compare is done.
15214 Sep 22 23:21:37.771 WARN 7743f09f-f465-4fba-892d-457fc58d9abf request to replace downstairs 127.0.0.1:45992 with 127.0.0.1:48840
15215 Sep 22 23:21:37.771 INFO Set check for repair
15216 Sep 22 23:21:37.771 INFO current number of open files limit 65536 is already the maximum
15217 Sep 22 23:21:37.771 INFO 7743f09f-f465-4fba-892d-457fc58d9abf found old target: 127.0.0.1:45992 at 0
15218 Sep 22 23:21:37.771 INFO Opened existing region file "/tmp/downstairs-95N00kmj/region.json"
15219 Sep 22 23:21:37.771 INFO Database read version 1
15220 Sep 22 23:21:37.771 INFO [1] 127.0.0.1:36272 task reports connection:true
15221 Sep 22 23:21:37.771 INFO 7743f09f-f465-4fba-892d-457fc58d9abf replacing old: 127.0.0.1:45992 at 0
15222 Sep 22 23:21:37.771 INFO Database write version 1
15223 Sep 22 23:21:37.771 INFO be1bbe57-a78b-41fd-91a6-742ef82c740e Active Active Active
15224 Sep 22 23:21:37.771 INFO [0] client skip 0 in process jobs because fault, : downstairs
15225 Sep 22 23:21:37.771 INFO Set check for repair
15226 Sep 22 23:21:37.771 INFO [0] changed 0 jobs to fault skipped, : downstairs
15227 Sep 22 23:21:37.771 INFO [0] 7743f09f-f465-4fba-892d-457fc58d9abf (3175f1fa-0abb-4faa-9a83-0df20993aaa9) Active Active Active ds_transition to Replacing
15228 Sep 22 23:21:37.771 INFO [0] Transition from Active to Replacing
15229 Sep 22 23:21:37.771 INFO [2] 127.0.0.1:46846 task reports connection:true
15230 Sep 22 23:21:37.771 INFO be1bbe57-a78b-41fd-91a6-742ef82c740e Active Active Active
15231 Sep 22 23:21:37.771 INFO Set check for repair
15232 Sep 22 23:21:37.771 INFO [0] received reconcile message
15233 Sep 22 23:21:37.771 WARN 7743f09f-f465-4fba-892d-457fc58d9abf request to replace downstairs 127.0.0.1:57990 with 127.0.0.1:58780
15234 Sep 22 23:21:37.771 INFO [0] All repairs completed, exit
15235 Sep 22 23:21:37.771 INFO 7743f09f-f465-4fba-892d-457fc58d9abf found old target: 127.0.0.1:57990 at 1
15236 Sep 22 23:21:37.771 INFO [0] Starts cmd_loop
15237 Sep 22 23:21:37.771 INFO UUID: d6f117a0-d707-4b2e-a491-e44ab08fd29f
15238 Sep 22 23:21:37.771 INFO [1] received reconcile message
15239 Sep 22 23:21:37.771 INFO Blocks per extent:5 Total Extents: 2
15240 Sep 22 23:21:37.771 INFO [1] All repairs completed, exit
15241 Sep 22 23:21:37.771 INFO [1] Starts cmd_loop
15242 Sep 22 23:21:37.771 INFO Crucible Version: Crucible Version: 0.0.1
15243 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15244 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15245 rustc: 1.70.0 stable x86_64-unknown-illumos
15246 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15247 Sep 22 23:21:37.771 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15248 Sep 22 23:21:37.771 INFO Using address: 127.0.0.1:61215, task: main
15249 Sep 22 23:21:37.771 INFO [2] received reconcile message
15250 Sep 22 23:21:37.771 INFO [2] All repairs completed, exit
15251 Sep 22 23:21:37.771 INFO [2] Starts cmd_loop
15252 The guest has finished waiting for activation
15253 Sep 22 23:21:37.771 INFO Repair listens on 127.0.0.1:0, task: repair
15254 Sep 22 23:21:37.772 INFO Upstairs starts
15255 Sep 22 23:21:37.772 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:42316, task: repair
15256 Sep 22 23:21:37.772 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:42316, task: repair
15257 Sep 22 23:21:37.772 INFO Crucible Version: BuildInfo {
15258 version: "0.0.1",
15259 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
15260 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
15261 git_branch: "main",
15262 rustc_semver: "1.70.0",
15263 rustc_channel: "stable",
15264 rustc_host_triple: "x86_64-unknown-illumos",
15265 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
15266 cargo_triple: "x86_64-unknown-illumos",
15267 debug: true,
15268 opt_level: 0,
15269 }
15270 Sep 22 23:21:37.772 INFO Upstairs <-> Downstairs Message Version: 4
15271 Sep 22 23:21:37.772 INFO listening, local_addr: 127.0.0.1:42316, task: repair
15272 Sep 22 23:21:37.772 INFO Crucible stats registered with UUID: afdd1779-4d01-4718-83ef-1f0582b0fcbd
15273 Sep 22 23:21:37.772 INFO Crucible afdd1779-4d01-4718-83ef-1f0582b0fcbd has session id: d69380f8-391d-43d8-ba26-076f98fc936f
15274 Sep 22 23:21:37.772 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:42316, task: repair
15275 Sep 22 23:21:37.772 INFO Using repair address: 127.0.0.1:42316, task: main
15276 Sep 22 23:21:37.772 INFO No SSL acceptor configured, task: main
15277 Sep 22 23:21:37.772 INFO [0] connecting to 127.0.0.1:61806, looper: 0
15278 Sep 22 23:21:37.773 INFO [1] connecting to 127.0.0.1:36272, looper: 1
15279 Sep 22 23:21:37.773 INFO current number of open files limit 65536 is already the maximum
15280 Sep 22 23:21:37.773 INFO [2] connecting to 127.0.0.1:46846, looper: 2
15281 Sep 22 23:21:37.773 INFO Created new region file "/tmp/downstairs-GSLa5ha2/region.json"
15282 Sep 22 23:21:37.773 INFO up_listen starts, task: up_listen
15283 Sep 22 23:21:37.773 INFO Wait for all three downstairs to come online
15284 Sep 22 23:21:37.773 INFO Flush timeout: 0.5
15285 Sep 22 23:21:37.773 INFO [2] afdd1779-4d01-4718-83ef-1f0582b0fcbd looper connected, looper: 2
15286 Sep 22 23:21:37.773 INFO [2] Proc runs for 127.0.0.1:46846 in state New
15287 Sep 22 23:21:37.773 INFO [0] afdd1779-4d01-4718-83ef-1f0582b0fcbd looper connected, looper: 0
15288 Sep 22 23:21:37.773 INFO [0] Proc runs for 127.0.0.1:61806 in state New
15289 Sep 22 23:21:37.773 INFO [1] afdd1779-4d01-4718-83ef-1f0582b0fcbd looper connected, looper: 1
15290 Sep 22 23:21:37.773 INFO [1] Proc runs for 127.0.0.1:36272 in state New
15291 Sep 22 23:21:37.773 INFO accepted connection from 127.0.0.1:44545, task: main
15292 Sep 22 23:21:37.773 INFO accepted connection from 127.0.0.1:60808, task: main
15293 Sep 22 23:21:37.774 INFO accepted connection from 127.0.0.1:33455, task: main
15294 Sep 22 23:21:37.774 INFO Connection request from afdd1779-4d01-4718-83ef-1f0582b0fcbd with version 4, task: proc
15295 Sep 22 23:21:37.774 INFO upstairs UpstairsConnection { upstairs_id: afdd1779-4d01-4718-83ef-1f0582b0fcbd, session_id: 9c74e4ab-e653-4eaa-b48f-6000a2bdbb12, gen: 1 } connected, version 4, task: proc
15296 Sep 22 23:21:37.774 INFO UUID: 9d3f8a28-aea4-4f16-a02f-499f4731649b
15297 Sep 22 23:21:37.774 INFO Connection request from afdd1779-4d01-4718-83ef-1f0582b0fcbd with version 4, task: proc
15298 Sep 22 23:21:37.774 INFO Blocks per extent:5 Total Extents: 2
15299 Sep 22 23:21:37.774 INFO upstairs UpstairsConnection { upstairs_id: afdd1779-4d01-4718-83ef-1f0582b0fcbd, session_id: 9c74e4ab-e653-4eaa-b48f-6000a2bdbb12, gen: 1 } connected, version 4, task: proc
15300 Sep 22 23:21:37.774 INFO Crucible Version: Crucible Version: 0.0.1
15301 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15302 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15303 rustc: 1.70.0 stable x86_64-unknown-illumos
15304 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15305 Sep 22 23:21:37.774 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15306 Sep 22 23:21:37.774 INFO Connection request from afdd1779-4d01-4718-83ef-1f0582b0fcbd with version 4, task: proc
15307 Sep 22 23:21:37.774 INFO Using address: 127.0.0.1:48145, task: main
15308 test test::integration_test_just_read ... Sep 22 23:21:37.774 INFO upstairs UpstairsConnection { upstairs_id: afdd1779-4d01-4718-83ef-1f0582b0fcbd, session_id: 9c74e4ab-e653-4eaa-b48f-6000a2bdbb12, gen: 1 } connected, version 4, task: proc
15309 ok
15310 Sep 22 23:21:37.775 INFO [2] afdd1779-4d01-4718-83ef-1f0582b0fcbd (9c74e4ab-e653-4eaa-b48f-6000a2bdbb12) New New New ds_transition to WaitActive
15311 Sep 22 23:21:37.775 INFO Repair listens on 127.0.0.1:0, task: repair
15312 Sep 22 23:21:37.775 INFO [2] Transition from New to WaitActive
15313 Sep 22 23:21:37.775 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:42170, task: repair
15314 Sep 22 23:21:37.775 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:42170, task: repair
15315 Sep 22 23:21:37.775 INFO current number of open files limit 65536 is already the maximum
15316 Sep 22 23:21:37.775 INFO [0] afdd1779-4d01-4718-83ef-1f0582b0fcbd (9c74e4ab-e653-4eaa-b48f-6000a2bdbb12) New New WaitActive ds_transition to WaitActive
15317 Sep 22 23:21:37.775 INFO [0] Transition from New to WaitActive
15318 Sep 22 23:21:37.775 INFO listening, local_addr: 127.0.0.1:42170, task: repair
15319 Sep 22 23:21:37.775 INFO [1] afdd1779-4d01-4718-83ef-1f0582b0fcbd (9c74e4ab-e653-4eaa-b48f-6000a2bdbb12) WaitActive New WaitActive ds_transition to WaitActive
15320 Sep 22 23:21:37.775 INFO Created new region file "/tmp/downstairs-3Ng9gZLV/region.json"
15321 Sep 22 23:21:37.775 INFO [1] Transition from New to WaitActive
15322 Sep 22 23:21:37.775 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:42170, task: repair
15323 Sep 22 23:21:37.775 INFO Using repair address: 127.0.0.1:42170, task: main
15324 Sep 22 23:21:37.775 INFO No SSL acceptor configured, task: main
15325 The guest has requested activation
15326 Sep 22 23:21:37.775 INFO afdd1779-4d01-4718-83ef-1f0582b0fcbd active request set
15327 Sep 22 23:21:37.775 INFO [0] received activate with gen 1
15328 Sep 22 23:21:37.775 INFO [0] client got ds_active_rx, promote! session 9c74e4ab-e653-4eaa-b48f-6000a2bdbb12
15329 Sep 22 23:21:37.775 INFO [1] received activate with gen 1
15330 Sep 22 23:21:37.775 INFO [1] client got ds_active_rx, promote! session 9c74e4ab-e653-4eaa-b48f-6000a2bdbb12
15331 Sep 22 23:21:37.775 INFO current number of open files limit 65536 is already the maximum
15332 Sep 22 23:21:37.775 INFO [2] received activate with gen 1
15333 Sep 22 23:21:37.775 INFO [2] client got ds_active_rx, promote! session 9c74e4ab-e653-4eaa-b48f-6000a2bdbb12
15334 Sep 22 23:21:37.775 INFO Created new region file "/tmp/downstairs-11dCDdeO/region.json"
15335 Sep 22 23:21:37.776 INFO [2] downstairs client at 127.0.0.1:46846 has UUID d57f7bca-56d6-4fef-ba3b-7c8c7e1674fe
15336 Sep 22 23:21:37.776 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: d57f7bca-56d6-4fef-ba3b-7c8c7e1674fe, encrypted: true, database_read_version: 1, database_write_version: 1 }
15337 Sep 22 23:21:37.776 INFO afdd1779-4d01-4718-83ef-1f0582b0fcbd WaitActive WaitActive WaitActive
15338 Sep 22 23:21:37.776 INFO [0] downstairs client at 127.0.0.1:61806 has UUID 17366ba3-9d52-4dce-a4fd-d4657621aec9
15339 Sep 22 23:21:37.776 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 17366ba3-9d52-4dce-a4fd-d4657621aec9, encrypted: true, database_read_version: 1, database_write_version: 1 }
15340 Sep 22 23:21:37.776 INFO afdd1779-4d01-4718-83ef-1f0582b0fcbd WaitActive WaitActive WaitActive
15341 Sep 22 23:21:37.776 INFO [1] downstairs client at 127.0.0.1:36272 has UUID e173e972-e8cf-416e-bc11-1d8d35f2e9ba
15342 Sep 22 23:21:37.776 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e173e972-e8cf-416e-bc11-1d8d35f2e9ba, encrypted: true, database_read_version: 1, database_write_version: 1 }
15343 Sep 22 23:21:37.777 INFO afdd1779-4d01-4718-83ef-1f0582b0fcbd WaitActive WaitActive WaitActive
15344 Sep 22 23:21:37.777 INFO Current flush_numbers [0..12]: [0, 0]
15345 Sep 22 23:21:37.777 INFO Downstairs has completed Negotiation, task: proc
15346 Sep 22 23:21:37.777 INFO current number of open files limit 65536 is already the maximum
15347 Sep 22 23:21:37.777 INFO Opened existing region file "/tmp/downstairs-GSLa5ha2/region.json"
15348 Sep 22 23:21:37.777 INFO Database read version 1
15349 Sep 22 23:21:37.777 INFO Database write version 1
15350 test test::integration_test_guest_replace_many_downstairs ... ok
15351 Sep 22 23:21:37.778 INFO current number of open files limit 65536 is already the maximum
15352 Sep 22 23:21:37.778 INFO Created new region file "/tmp/downstairs-1gO30onD/region.json"
15353 Sep 22 23:21:37.778 INFO Current flush_numbers [0..12]: [0, 0]
15354 test test::integration_test_guest_downstairs_unwritten ... ok
15355 Sep 22 23:21:37.778 INFO Downstairs has completed Negotiation, task: proc
15356 Sep 22 23:21:37.779 INFO current number of open files limit 65536 is already the maximum
15357 Sep 22 23:21:37.779 INFO Current flush_numbers [0..12]: [0, 0]
15358 Sep 22 23:21:37.779 INFO Created new region file "/tmp/downstairs-4bgragna/region.json"
15359 Sep 22 23:21:37.779 INFO Downstairs has completed Negotiation, task: proc
15360 Sep 22 23:21:37.779 INFO [2] afdd1779-4d01-4718-83ef-1f0582b0fcbd (9c74e4ab-e653-4eaa-b48f-6000a2bdbb12) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
15361 Sep 22 23:21:37.779 INFO [2] Transition from WaitActive to WaitQuorum
15362 Sep 22 23:21:37.779 WARN [2] new RM replaced this: None
15363 Sep 22 23:21:37.779 INFO [2] Starts reconcile loop
15364 Sep 22 23:21:37.779 INFO [0] afdd1779-4d01-4718-83ef-1f0582b0fcbd (9c74e4ab-e653-4eaa-b48f-6000a2bdbb12) WaitActive WaitActive WaitQuorum ds_transition to WaitQuorum
15365 Sep 22 23:21:37.779 INFO [0] Transition from WaitActive to WaitQuorum
15366 Sep 22 23:21:37.779 WARN [0] new RM replaced this: None
15367 Sep 22 23:21:37.779 INFO [0] Starts reconcile loop
15368 Sep 22 23:21:37.779 INFO [1] afdd1779-4d01-4718-83ef-1f0582b0fcbd (9c74e4ab-e653-4eaa-b48f-6000a2bdbb12) WaitQuorum WaitActive WaitQuorum ds_transition to WaitQuorum
15369 Sep 22 23:21:37.779 INFO [1] Transition from WaitActive to WaitQuorum
15370 Sep 22 23:21:37.779 WARN [1] new RM replaced this: None
15371 Sep 22 23:21:37.779 INFO [1] Starts reconcile loop
15372 Sep 22 23:21:37.779 INFO [2] 127.0.0.1:46846 task reports connection:true
15373 Sep 22 23:21:37.779 INFO afdd1779-4d01-4718-83ef-1f0582b0fcbd WaitQuorum WaitQuorum WaitQuorum
15374 Sep 22 23:21:37.779 INFO [0]R flush_numbers: [0, 0]
15375 Sep 22 23:21:37.779 INFO [0]R generation: [0, 0]
15376 Sep 22 23:21:37.779 INFO [0]R dirty: [false, false]
15377 Sep 22 23:21:37.779 INFO [1]R flush_numbers: [0, 0]
15378 Sep 22 23:21:37.779 INFO [1]R generation: [0, 0]
15379 Sep 22 23:21:37.779 INFO [1]R dirty: [false, false]
15380 Sep 22 23:21:37.779 INFO [2]R flush_numbers: [0, 0]
15381 Sep 22 23:21:37.779 INFO [2]R generation: [0, 0]
15382 Sep 22 23:21:37.779 INFO [2]R dirty: [false, false]
15383 Sep 22 23:21:37.780 INFO Max found gen is 1
15384 Sep 22 23:21:37.780 INFO Generation requested: 1 >= found:1
15385 Sep 22 23:21:37.780 INFO current number of open files limit 65536 is already the maximum
15386 Sep 22 23:21:37.780 INFO Next flush: 1
15387 Sep 22 23:21:37.780 INFO Opened existing region file "/tmp/downstairs-11dCDdeO/region.json"
15388 Sep 22 23:21:37.780 INFO All extents match
15389 Sep 22 23:21:37.780 INFO Database read version 1
15390 Sep 22 23:21:37.780 INFO No downstairs repair required
15391 Sep 22 23:21:37.780 INFO Database write version 1
15392 Sep 22 23:21:37.780 INFO No initial repair work was required
15393 Sep 22 23:21:37.780 INFO Set Downstairs and Upstairs active
15394 Sep 22 23:21:37.780 INFO afdd1779-4d01-4718-83ef-1f0582b0fcbd is now active with session: 9c74e4ab-e653-4eaa-b48f-6000a2bdbb12
15395 Sep 22 23:21:37.780 INFO afdd1779-4d01-4718-83ef-1f0582b0fcbd Set Active after no repair
15396 Sep 22 23:21:37.780 INFO Notify all downstairs, region set compare is done.
15397 Sep 22 23:21:37.780 INFO current number of open files limit 65536 is already the maximum
15398 Sep 22 23:21:37.780 INFO Set check for repair
15399 Sep 22 23:21:37.780 INFO UUID: 3ab28074-4b58-4686-8a29-dc39469e56d5
15400 Sep 22 23:21:37.780 INFO Opened existing region file "/tmp/downstairs-3Ng9gZLV/region.json"
15401 Sep 22 23:21:37.780 INFO Blocks per extent:5 Total Extents: 2
15402 Sep 22 23:21:37.780 INFO Database read version 1
15403 Sep 22 23:21:37.780 INFO Database write version 1
15404 Sep 22 23:21:37.780 INFO [0] 127.0.0.1:61806 task reports connection:true
15405 Sep 22 23:21:37.780 INFO afdd1779-4d01-4718-83ef-1f0582b0fcbd Active Active Active
15406 Sep 22 23:21:37.780 INFO Crucible Version: Crucible Version: 0.0.1
15407 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15408 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15409 rustc: 1.70.0 stable x86_64-unknown-illumos
15410 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15411 Sep 22 23:21:37.780 INFO Set check for repair
15412 Sep 22 23:21:37.780 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15413 Sep 22 23:21:37.780 INFO Using address: 127.0.0.1:62024, task: main
15414 Sep 22 23:21:37.780 INFO [1] 127.0.0.1:36272 task reports connection:true
15415 Sep 22 23:21:37.780 INFO afdd1779-4d01-4718-83ef-1f0582b0fcbd Active Active Active
15416 Sep 22 23:21:37.780 INFO Set check for repair
15417 Sep 22 23:21:37.780 INFO [0] received reconcile message
15418 Sep 22 23:21:37.780 INFO [0] All repairs completed, exit
15419 Sep 22 23:21:37.780 INFO [0] Starts cmd_loop
15420 Sep 22 23:21:37.780 INFO Repair listens on 127.0.0.1:0, task: repair
15421 Sep 22 23:21:37.780 INFO [1] received reconcile message
15422 Sep 22 23:21:37.780 INFO [1] All repairs completed, exit
15423 Sep 22 23:21:37.780 INFO [1] Starts cmd_loop
15424 Sep 22 23:21:37.780 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:48900, task: repair
15425 Sep 22 23:21:37.780 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:48900, task: repair
15426 Sep 22 23:21:37.780 INFO [2] received reconcile message
15427 Sep 22 23:21:37.780 INFO listening, local_addr: 127.0.0.1:48900, task: repair
15428 Sep 22 23:21:37.780 INFO [2] All repairs completed, exit
15429 Sep 22 23:21:37.780 INFO [2] Starts cmd_loop
15430 The guest has finished waiting for activation
15431 Sep 22 23:21:37.780 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:48900, task: repair
15432 Sep 22 23:21:37.780 INFO Using repair address: 127.0.0.1:48900, task: main
15433 Sep 22 23:21:37.780 INFO No SSL acceptor configured, task: main
15434 Sep 22 23:21:37.781 DEBG IO Read 1000 has deps []
15435 Sep 22 23:21:37.781 INFO UUID: 890de4ee-4d21-427c-8570-1ce5a5506828
15436 Sep 22 23:21:37.781 INFO Blocks per extent:5 Total Extents: 2
15437 Sep 22 23:21:37.781 INFO Crucible Version: Crucible Version: 0.0.1
15438 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15439 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15440 rustc: 1.70.0 stable x86_64-unknown-illumos
15441 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15442 Sep 22 23:21:37.781 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15443 Sep 22 23:21:37.781 INFO Using address: 127.0.0.1:40738, task: main
15444 Sep 22 23:21:37.781 INFO Upstairs starts
15445 Sep 22 23:21:37.781 INFO Crucible Version: BuildInfo {
15446 version: "0.0.1",
15447 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
15448 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
15449 git_branch: "main",
15450 rustc_semver: "1.70.0",
15451 rustc_channel: "stable",
15452 rustc_host_triple: "x86_64-unknown-illumos",
15453 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
15454 cargo_triple: "x86_64-unknown-illumos",
15455 debug: true,
15456 opt_level: 0,
15457 }
15458 Sep 22 23:21:37.781 INFO Upstairs <-> Downstairs Message Version: 4
15459 Sep 22 23:21:37.781 INFO Crucible stats registered with UUID: c3d60856-e346-48b1-a11b-e1e0e826a086
15460 Sep 22 23:21:37.781 INFO Crucible c3d60856-e346-48b1-a11b-e1e0e826a086 has session id: a8384867-0cbd-477d-8ed0-64521563faca
15461 Sep 22 23:21:37.781 INFO Repair listens on 127.0.0.1:0, task: repair
15462 Sep 22 23:21:37.781 INFO listening on 127.0.0.1:0, task: main
15463 Sep 22 23:21:37.781 INFO listening on 127.0.0.1:0, task: main
15464 Sep 22 23:21:37.781 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:35328, task: repair
15465 Sep 22 23:21:37.781 INFO listening on 127.0.0.1:0, task: main
15466 Sep 22 23:21:37.781 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:35328, task: repair
15467 Sep 22 23:21:37.782 INFO [0] connecting to 127.0.0.1:63732, looper: 0
15468 Sep 22 23:21:37.782 DEBG Read :1000 deps:[] res:true
15469 Sep 22 23:21:37.782 INFO listening, local_addr: 127.0.0.1:35328, task: repair
15470 Sep 22 23:21:37.782 INFO UUID: 8f0829a0-9286-4151-a390-4aadd7c97e80
15471 Sep 22 23:21:37.782 INFO Blocks per extent:5 Total Extents: 2
15472 Sep 22 23:21:37.782 INFO [1] connecting to 127.0.0.1:61215, looper: 1
15473 Sep 22 23:21:37.782 INFO Crucible Version: Crucible Version: 0.0.1
15474 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15475 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15476 rustc: 1.70.0 stable x86_64-unknown-illumos
15477 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15478 Sep 22 23:21:37.782 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15479 Sep 22 23:21:37.782 INFO [2] connecting to 127.0.0.1:62024, looper: 2
15480 Sep 22 23:21:37.782 INFO Using address: 127.0.0.1:49534, task: main
15481 Sep 22 23:21:37.782 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:35328, task: repair
15482 Sep 22 23:21:37.782 DEBG Read :1000 deps:[] res:true
15483 Sep 22 23:21:37.782 INFO up_listen starts, task: up_listen
15484 Sep 22 23:21:37.782 INFO Wait for all three downstairs to come online
15485 Sep 22 23:21:37.782 INFO Using repair address: 127.0.0.1:35328, task: main
15486 Sep 22 23:21:37.782 INFO Flush timeout: 0.5
15487 Sep 22 23:21:37.782 INFO No SSL acceptor configured, task: main
15488 Sep 22 23:21:37.782 INFO current number of open files limit 65536 is already the maximum
15489 Sep 22 23:21:37.782 INFO Opened existing region file "/tmp/downstairs-1gO30onD/region.json"
15490 Sep 22 23:21:37.782 INFO Database read version 1
15491 Sep 22 23:21:37.782 DEBG Read :1000 deps:[] res:true
15492 Sep 22 23:21:37.782 INFO Database write version 1
15493 Sep 22 23:21:37.782 INFO accepted connection from 127.0.0.1:36783, task: main
15494 Sep 22 23:21:37.782 INFO [0] c3d60856-e346-48b1-a11b-e1e0e826a086 looper connected, looper: 0
15495 Sep 22 23:21:37.782 INFO [0] Proc runs for 127.0.0.1:63732 in state New
15496 Sep 22 23:21:37.782 INFO Repair listens on 127.0.0.1:0, task: repair
15497 Sep 22 23:21:37.782 INFO accepted connection from 127.0.0.1:39601, task: main
15498 Sep 22 23:21:37.782 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36538, task: repair
15499 Sep 22 23:21:37.782 INFO accepted connection from 127.0.0.1:42280, task: main
15500 Sep 22 23:21:37.782 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36538, task: repair
15501 Sep 22 23:21:37.782 INFO [1] c3d60856-e346-48b1-a11b-e1e0e826a086 looper connected, looper: 1
15502 Sep 22 23:21:37.782 INFO listening, local_addr: 127.0.0.1:36538, task: repair
15503 Sep 22 23:21:37.782 INFO [1] Proc runs for 127.0.0.1:61215 in state New
15504 Sep 22 23:21:37.782 DEBG [0] Read AckReady 1000, : downstairs
15505 Sep 22 23:21:37.782 INFO [2] c3d60856-e346-48b1-a11b-e1e0e826a086 looper connected, looper: 2
15506 Sep 22 23:21:37.782 INFO [2] Proc runs for 127.0.0.1:62024 in state New
15507 Sep 22 23:21:37.782 DEBG [1] Read already AckReady 1000, : downstairs
15508 Sep 22 23:21:37.782 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36538, task: repair
15509 Sep 22 23:21:37.782 INFO Using repair address: 127.0.0.1:36538, task: main
15510 Sep 22 23:21:37.783 INFO No SSL acceptor configured, task: main
15511 Sep 22 23:21:37.783 DEBG [2] Read already AckReady 1000, : downstairs
15512 Sep 22 23:21:37.783 DEBG up_ds_listen was notified
15513 Sep 22 23:21:37.783 DEBG up_ds_listen process 1000
15514 Sep 22 23:21:37.783 DEBG [A] ack job 1000:1, : downstairs
15515 Sep 22 23:21:37.783 INFO Upstairs starts
15516 Sep 22 23:21:37.783 INFO Connection request from c3d60856-e346-48b1-a11b-e1e0e826a086 with version 4, task: proc
15517 Sep 22 23:21:37.783 INFO upstairs UpstairsConnection { upstairs_id: c3d60856-e346-48b1-a11b-e1e0e826a086, session_id: c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b, gen: 1 } connected, version 4, task: proc
15518 Sep 22 23:21:37.783 INFO Crucible Version: BuildInfo {
15519 version: "0.0.1",
15520 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
15521 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
15522 git_branch: "main",
15523 rustc_semver: "1.70.0",
15524 rustc_channel: "stable",
15525 rustc_host_triple: "x86_64-unknown-illumos",
15526 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
15527 cargo_triple: "x86_64-unknown-illumos",
15528 debug: true,
15529 opt_level: 0,
15530 }
15531 Sep 22 23:21:37.783 INFO Upstairs <-> Downstairs Message Version: 4
15532 Sep 22 23:21:37.783 DEBG up_ds_listen checked 1 jobs, back to waiting
15533 Sep 22 23:21:37.783 INFO Crucible stats registered with UUID: 0509f38e-fad6-4194-ae26-f925e684ff33
15534 Sep 22 23:21:37.783 INFO Connection request from c3d60856-e346-48b1-a11b-e1e0e826a086 with version 4, task: proc
15535 Sep 22 23:21:37.783 INFO Crucible 0509f38e-fad6-4194-ae26-f925e684ff33 has session id: d1777d3f-1d8b-4f8a-8fb7-855b4a3eb57a
15536 Sep 22 23:21:37.783 INFO upstairs UpstairsConnection { upstairs_id: c3d60856-e346-48b1-a11b-e1e0e826a086, session_id: c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b, gen: 1 } connected, version 4, task: proc
15537 Sep 22 23:21:37.783 INFO Connection request from c3d60856-e346-48b1-a11b-e1e0e826a086 with version 4, task: proc
15538 Sep 22 23:21:37.783 INFO upstairs UpstairsConnection { upstairs_id: c3d60856-e346-48b1-a11b-e1e0e826a086, session_id: c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b, gen: 1 } connected, version 4, task: proc
15539 Sep 22 23:21:37.783 INFO current number of open files limit 65536 is already the maximum
15540 Sep 22 23:21:37.783 INFO listening on 127.0.0.1:0, task: main
15541 Sep 22 23:21:37.783 DEBG IO Read 1000 has deps []
15542 Sep 22 23:21:37.783 INFO Created new region file "/tmp/downstairs-SCrOyIq5/region.json"
15543 Sep 22 23:21:37.783 INFO listening on 127.0.0.1:0, task: main
15544 Sep 22 23:21:37.783 INFO listening on 127.0.0.1:0, task: main
15545 Sep 22 23:21:37.783 INFO [0] c3d60856-e346-48b1-a11b-e1e0e826a086 (c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b) New New New ds_transition to WaitActive
15546 Sep 22 23:21:37.783 INFO [0] Transition from New to WaitActive
15547 Sep 22 23:21:37.783 INFO [0] connecting to 127.0.0.1:60322, looper: 0
15548 Sep 22 23:21:37.783 INFO [1] c3d60856-e346-48b1-a11b-e1e0e826a086 (c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b) WaitActive New New ds_transition to WaitActive
15549 Sep 22 23:21:37.783 INFO [1] Transition from New to WaitActive
15550 Sep 22 23:21:37.783 INFO [2] c3d60856-e346-48b1-a11b-e1e0e826a086 (c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b) WaitActive WaitActive New ds_transition to WaitActive
15551 Sep 22 23:21:37.783 INFO [2] Transition from New to WaitActive
15552 Sep 22 23:21:37.783 INFO [1] connecting to 127.0.0.1:48145, looper: 1
15553 The guest has requested activation
15554 Sep 22 23:21:37.783 INFO c3d60856-e346-48b1-a11b-e1e0e826a086 active request set
15555 Sep 22 23:21:37.783 INFO [2] connecting to 127.0.0.1:40738, looper: 2
15556 Sep 22 23:21:37.783 INFO current number of open files limit 65536 is already the maximum
15557 Sep 22 23:21:37.783 INFO Opened existing region file "/tmp/downstairs-4bgragna/region.json"
15558 Sep 22 23:21:37.783 INFO Database read version 1
15559 Sep 22 23:21:37.783 INFO [0] received activate with gen 1
15560 Sep 22 23:21:37.783 INFO Database write version 1
15561 Sep 22 23:21:37.783 INFO [0] client got ds_active_rx, promote! session c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b
15562 Sep 22 23:21:37.783 INFO up_listen starts, task: up_listen
15563 Sep 22 23:21:37.783 INFO [1] received activate with gen 1
15564 Sep 22 23:21:37.784 INFO Wait for all three downstairs to come online
15565 Sep 22 23:21:37.784 INFO [1] client got ds_active_rx, promote! session c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b
15566 Sep 22 23:21:37.784 INFO Flush timeout: 0.5
15567 Sep 22 23:21:37.784 INFO [2] received activate with gen 1
15568 Sep 22 23:21:37.784 INFO [2] client got ds_active_rx, promote! session c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b
15569 Sep 22 23:21:37.784 DEBG Read :1000 deps:[] res:true
15570 Sep 22 23:21:37.784 INFO UpstairsConnection { upstairs_id: c3d60856-e346-48b1-a11b-e1e0e826a086, session_id: c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b, gen: 1 } is now active (read-write)
15571 Sep 22 23:21:37.784 INFO accepted connection from 127.0.0.1:45137, task: main
15572 Sep 22 23:21:37.784 INFO UpstairsConnection { upstairs_id: c3d60856-e346-48b1-a11b-e1e0e826a086, session_id: c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b, gen: 1 } is now active (read-write)
15573 Sep 22 23:21:37.784 INFO accepted connection from 127.0.0.1:36406, task: main
15574 Sep 22 23:21:37.784 DEBG Read :1000 deps:[] res:true
15575 Sep 22 23:21:37.784 INFO UpstairsConnection { upstairs_id: c3d60856-e346-48b1-a11b-e1e0e826a086, session_id: c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b, gen: 1 } is now active (read-write)
15576 Sep 22 23:21:37.784 INFO accepted connection from 127.0.0.1:46801, task: main
15577 Sep 22 23:21:37.784 INFO [0] 0509f38e-fad6-4194-ae26-f925e684ff33 looper connected, looper: 0
15578 Sep 22 23:21:37.784 INFO [0] Proc runs for 127.0.0.1:60322 in state New
15579 Sep 22 23:21:37.784 DEBG Read :1000 deps:[] res:true
15580 Sep 22 23:21:37.784 INFO [1] 0509f38e-fad6-4194-ae26-f925e684ff33 looper connected, looper: 1
15581 Sep 22 23:21:37.784 INFO [1] Proc runs for 127.0.0.1:48145 in state New
15582 Sep 22 23:21:37.784 INFO [2] 0509f38e-fad6-4194-ae26-f925e684ff33 looper connected, looper: 2
15583 Sep 22 23:21:37.784 INFO [0] downstairs client at 127.0.0.1:63732 has UUID 948c8c95-e366-4ab4-b43c-f85926d90cc9
15584 Sep 22 23:21:37.784 INFO [2] Proc runs for 127.0.0.1:40738 in state New
15585 Sep 22 23:21:37.784 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 948c8c95-e366-4ab4-b43c-f85926d90cc9, encrypted: true, database_read_version: 1, database_write_version: 1 }
15586 Sep 22 23:21:37.784 INFO c3d60856-e346-48b1-a11b-e1e0e826a086 WaitActive WaitActive WaitActive
15587 Sep 22 23:21:37.784 INFO [1] downstairs client at 127.0.0.1:61215 has UUID d6f117a0-d707-4b2e-a491-e44ab08fd29f
15588 Sep 22 23:21:37.784 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: d6f117a0-d707-4b2e-a491-e44ab08fd29f, encrypted: true, database_read_version: 1, database_write_version: 1 }
15589 Sep 22 23:21:37.784 INFO UUID: 8ac2d079-494b-4dc0-bfb3-1771389b3218
15590 Sep 22 23:21:37.784 INFO Blocks per extent:5 Total Extents: 2
15591 Sep 22 23:21:37.784 INFO c3d60856-e346-48b1-a11b-e1e0e826a086 WaitActive WaitActive WaitActive
15592 Sep 22 23:21:37.784 DEBG [2] Read AckReady 1000, : downstairs
15593 Sep 22 23:21:37.784 INFO [2] downstairs client at 127.0.0.1:62024 has UUID 3ab28074-4b58-4686-8a29-dc39469e56d5
15594 Sep 22 23:21:37.784 INFO Crucible Version: Crucible Version: 0.0.1
15595 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15596 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15597 rustc: 1.70.0 stable x86_64-unknown-illumos
15598 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15599 Sep 22 23:21:37.784 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 3ab28074-4b58-4686-8a29-dc39469e56d5, encrypted: true, database_read_version: 1, database_write_version: 1 }
15600 Sep 22 23:21:37.784 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15601 Sep 22 23:21:37.784 DEBG [0] Read already AckReady 1000, : downstairs
15602 Sep 22 23:21:37.784 INFO Using address: 127.0.0.1:64153, task: main
15603 Sep 22 23:21:37.784 INFO c3d60856-e346-48b1-a11b-e1e0e826a086 WaitActive WaitActive WaitActive
15604 Sep 22 23:21:37.785 DEBG [1] Read already AckReady 1000, : downstairs
15605 Sep 22 23:21:37.785 DEBG up_ds_listen was notified
15606 Sep 22 23:21:37.785 DEBG up_ds_listen process 1000
15607 Sep 22 23:21:37.785 INFO Current flush_numbers [0..12]: [0, 0]
15608 Sep 22 23:21:37.785 DEBG [A] ack job 1000:1, : downstairs
15609 Sep 22 23:21:37.785 INFO Connection request from 0509f38e-fad6-4194-ae26-f925e684ff33 with version 4, task: proc
15610 Sep 22 23:21:37.785 INFO upstairs UpstairsConnection { upstairs_id: 0509f38e-fad6-4194-ae26-f925e684ff33, session_id: 2e9b507b-029a-450c-8cce-275488d4eebe, gen: 1 } connected, version 4, task: proc
15611 Sep 22 23:21:37.785 INFO Downstairs has completed Negotiation, task: proc
15612 Sep 22 23:21:37.785 DEBG up_ds_listen checked 1 jobs, back to waiting
15613 Sep 22 23:21:37.785 INFO Connection request from 0509f38e-fad6-4194-ae26-f925e684ff33 with version 4, task: proc
15614 Sep 22 23:21:37.785 INFO Repair listens on 127.0.0.1:0, task: repair
15615 Sep 22 23:21:37.785 INFO upstairs UpstairsConnection { upstairs_id: 0509f38e-fad6-4194-ae26-f925e684ff33, session_id: 2e9b507b-029a-450c-8cce-275488d4eebe, gen: 1 } connected, version 4, task: proc
15616 Sep 22 23:21:37.785 INFO Connection request from 0509f38e-fad6-4194-ae26-f925e684ff33 with version 4, task: proc
15617 Sep 22 23:21:37.785 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53793, task: repair
15618 Sep 22 23:21:37.785 INFO upstairs UpstairsConnection { upstairs_id: 0509f38e-fad6-4194-ae26-f925e684ff33, session_id: 2e9b507b-029a-450c-8cce-275488d4eebe, gen: 1 } connected, version 4, task: proc
15619 Sep 22 23:21:37.785 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53793, task: repair
15620 Sep 22 23:21:37.785 INFO listening, local_addr: 127.0.0.1:53793, task: repair
15621 The guest has requested activation
15622 Sep 22 23:21:37.785 INFO Current flush_numbers [0..12]: [0, 0]
15623 Sep 22 23:21:37.785 INFO [0] 0509f38e-fad6-4194-ae26-f925e684ff33 (2e9b507b-029a-450c-8cce-275488d4eebe) New New New ds_transition to WaitActive
15624 Sep 22 23:21:37.785 INFO [0] Transition from New to WaitActive
15625 Sep 22 23:21:37.785 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53793, task: repair
15626 Sep 22 23:21:37.785 INFO Downstairs has completed Negotiation, task: proc
15627 Sep 22 23:21:37.785 INFO Using repair address: 127.0.0.1:53793, task: main
15628 Sep 22 23:21:37.785 INFO No SSL acceptor configured, task: main
15629 Sep 22 23:21:37.785 INFO [1] 0509f38e-fad6-4194-ae26-f925e684ff33 (2e9b507b-029a-450c-8cce-275488d4eebe) WaitActive New New ds_transition to WaitActive
15630 Sep 22 23:21:37.785 INFO [1] Transition from New to WaitActive
15631 Sep 22 23:21:37.785 INFO [2] 0509f38e-fad6-4194-ae26-f925e684ff33 (2e9b507b-029a-450c-8cce-275488d4eebe) WaitActive WaitActive New ds_transition to WaitActive
15632 Sep 22 23:21:37.785 INFO Current flush_numbers [0..12]: [0, 0]
15633 Sep 22 23:21:37.785 INFO [2] Transition from New to WaitActive
15634 Sep 22 23:21:37.785 INFO 0509f38e-fad6-4194-ae26-f925e684ff33 active request set
15635 Sep 22 23:21:37.786 INFO Downstairs has completed Negotiation, task: proc
15636 Sep 22 23:21:37.786 INFO [0] received activate with gen 1
15637 Sep 22 23:21:37.786 INFO [0] client got ds_active_rx, promote! session 2e9b507b-029a-450c-8cce-275488d4eebe
15638 Sep 22 23:21:37.786 INFO [0] c3d60856-e346-48b1-a11b-e1e0e826a086 (c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
15639 Sep 22 23:21:37.786 INFO [1] received activate with gen 1
15640 Sep 22 23:21:37.786 INFO [0] Transition from WaitActive to WaitQuorum
15641 Sep 22 23:21:37.786 WARN [0] new RM replaced this: None
15642 Sep 22 23:21:37.786 INFO [1] client got ds_active_rx, promote! session 2e9b507b-029a-450c-8cce-275488d4eebe
15643 Sep 22 23:21:37.786 INFO current number of open files limit 65536 is already the maximum
15644 Sep 22 23:21:37.786 INFO [0] Starts reconcile loop
15645 Sep 22 23:21:37.786 INFO [2] received activate with gen 1
15646 Sep 22 23:21:37.786 INFO [1] c3d60856-e346-48b1-a11b-e1e0e826a086 (c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
15647 Sep 22 23:21:37.786 INFO [1] Transition from WaitActive to WaitQuorum
15648 Sep 22 23:21:37.786 INFO [2] client got ds_active_rx, promote! session 2e9b507b-029a-450c-8cce-275488d4eebe
15649 Sep 22 23:21:37.786 WARN [1] new RM replaced this: None
15650 Sep 22 23:21:37.786 INFO [1] Starts reconcile loop
15651 Sep 22 23:21:37.786 INFO Created new region file "/tmp/downstairs-KCgrRonK/region.json"
15652 Sep 22 23:21:37.786 INFO [2] c3d60856-e346-48b1-a11b-e1e0e826a086 (c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
15653 Sep 22 23:21:37.786 INFO [2] Transition from WaitActive to WaitQuorum
15654 Sep 22 23:21:37.786 WARN [2] new RM replaced this: None
15655 Sep 22 23:21:37.786 INFO [2] Starts reconcile loop
15656 Sep 22 23:21:37.786 INFO UpstairsConnection { upstairs_id: 0509f38e-fad6-4194-ae26-f925e684ff33, session_id: 2e9b507b-029a-450c-8cce-275488d4eebe, gen: 1 } is now active (read-write)
15657 Sep 22 23:21:37.786 INFO [0] 127.0.0.1:63732 task reports connection:true
15658 Sep 22 23:21:37.786 INFO c3d60856-e346-48b1-a11b-e1e0e826a086 WaitQuorum WaitQuorum WaitQuorum
15659 Sep 22 23:21:37.786 INFO [0]R flush_numbers: [0, 0]
15660 Sep 22 23:21:37.786 INFO UUID: ed2b689d-1a8b-49a7-ad7b-f6874511d0ef
15661 Sep 22 23:21:37.786 INFO [0]R generation: [0, 0]
15662 Sep 22 23:21:37.786 INFO [0]R dirty: [false, false]
15663 Sep 22 23:21:37.786 INFO Blocks per extent:5 Total Extents: 2
15664 Sep 22 23:21:37.786 INFO [1]R flush_numbers: [0, 0]
15665 Sep 22 23:21:37.786 INFO [1]R generation: [0, 0]
15666 Sep 22 23:21:37.786 INFO [1]R dirty: [false, false]
15667 Sep 22 23:21:37.786 INFO [2]R flush_numbers: [0, 0]
15668 Sep 22 23:21:37.786 INFO [2]R generation: [0, 0]
15669 Sep 22 23:21:37.786 INFO UpstairsConnection { upstairs_id: 0509f38e-fad6-4194-ae26-f925e684ff33, session_id: 2e9b507b-029a-450c-8cce-275488d4eebe, gen: 1 } is now active (read-write)
15670 Sep 22 23:21:37.786 INFO [2]R dirty: [false, false]
15671 Sep 22 23:21:37.786 INFO Max found gen is 1
15672 Sep 22 23:21:37.786 INFO Generation requested: 1 >= found:1
15673 Sep 22 23:21:37.786 INFO Next flush: 1
15674 Sep 22 23:21:37.786 INFO Crucible Version: Crucible Version: 0.0.1
15675 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15676 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15677 rustc: 1.70.0 stable x86_64-unknown-illumos
15678 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15679 Sep 22 23:21:37.786 INFO All extents match
15680 Sep 22 23:21:37.786 INFO No downstairs repair required
15681 Sep 22 23:21:37.786 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15682 Sep 22 23:21:37.786 INFO No initial repair work was required
15683 Sep 22 23:21:37.786 INFO Using address: 127.0.0.1:33453, task: main
15684 Sep 22 23:21:37.786 INFO Set Downstairs and Upstairs active
15685 Sep 22 23:21:37.786 INFO UpstairsConnection { upstairs_id: 0509f38e-fad6-4194-ae26-f925e684ff33, session_id: 2e9b507b-029a-450c-8cce-275488d4eebe, gen: 1 } is now active (read-write)
15686 Sep 22 23:21:37.786 INFO c3d60856-e346-48b1-a11b-e1e0e826a086 is now active with session: c1e8da4a-6bc0-4db5-aac9-c4eb133e3c5b
15687 Sep 22 23:21:37.786 INFO c3d60856-e346-48b1-a11b-e1e0e826a086 Set Active after no repair
15688 Sep 22 23:21:37.786 INFO Notify all downstairs, region set compare is done.
15689 Sep 22 23:21:37.786 INFO Set check for repair
15690 Sep 22 23:21:37.786 INFO [1] 127.0.0.1:61215 task reports connection:true
15691 Sep 22 23:21:37.786 INFO c3d60856-e346-48b1-a11b-e1e0e826a086 Active Active Active
15692 Sep 22 23:21:37.786 INFO Set check for repair
15693 Sep 22 23:21:37.786 INFO [2] 127.0.0.1:62024 task reports connection:true
15694 Sep 22 23:21:37.786 INFO c3d60856-e346-48b1-a11b-e1e0e826a086 Active Active Active
15695 Sep 22 23:21:37.786 INFO Set check for repair
15696 Sep 22 23:21:37.786 INFO [0] received reconcile message
15697 Sep 22 23:21:37.787 INFO [0] All repairs completed, exit
15698 Sep 22 23:21:37.787 INFO [0] Starts cmd_loop
15699 Sep 22 23:21:37.787 INFO [1] received reconcile message
15700 Sep 22 23:21:37.787 INFO [1] All repairs completed, exit
15701 Sep 22 23:21:37.787 INFO [1] Starts cmd_loop
15702 Sep 22 23:21:37.787 INFO [2] received reconcile message
15703 Sep 22 23:21:37.787 INFO [2] All repairs completed, exit
15704 Sep 22 23:21:37.787 INFO [2] Starts cmd_loop
15705 The guest has finished waiting for activation
15706 Sep 22 23:21:37.787 INFO Repair listens on 127.0.0.1:0, task: repair
15707 Sep 22 23:21:37.787 INFO [0] downstairs client at 127.0.0.1:60322 has UUID a6815f0f-5f43-4904-a056-e79c5267f195
15708 Sep 22 23:21:37.787 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a6815f0f-5f43-4904-a056-e79c5267f195, encrypted: true, database_read_version: 1, database_write_version: 1 }
15709 Sep 22 23:21:37.787 INFO 0509f38e-fad6-4194-ae26-f925e684ff33 WaitActive WaitActive WaitActive
15710 Sep 22 23:21:37.787 INFO [1] downstairs client at 127.0.0.1:48145 has UUID 9d3f8a28-aea4-4f16-a02f-499f4731649b
15711 Sep 22 23:21:37.787 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52333, task: repair
15712 Sep 22 23:21:37.787 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 9d3f8a28-aea4-4f16-a02f-499f4731649b, encrypted: true, database_read_version: 1, database_write_version: 1 }
15713 Sep 22 23:21:37.787 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52333, task: repair
15714 Sep 22 23:21:37.787 INFO 0509f38e-fad6-4194-ae26-f925e684ff33 WaitActive WaitActive WaitActive
15715 Sep 22 23:21:37.787 INFO listening, local_addr: 127.0.0.1:52333, task: repair
15716 Sep 22 23:21:37.787 DEBG IO Read 1000 has deps []
15717 Sep 22 23:21:37.787 INFO [2] downstairs client at 127.0.0.1:40738 has UUID 890de4ee-4d21-427c-8570-1ce5a5506828
15718 Sep 22 23:21:37.787 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 890de4ee-4d21-427c-8570-1ce5a5506828, encrypted: true, database_read_version: 1, database_write_version: 1 }
15719 Sep 22 23:21:37.787 INFO 0509f38e-fad6-4194-ae26-f925e684ff33 WaitActive WaitActive WaitActive
15720 Sep 22 23:21:37.787 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52333, task: repair
15721 Sep 22 23:21:37.787 INFO Using repair address: 127.0.0.1:52333, task: main
15722 Sep 22 23:21:37.787 INFO No SSL acceptor configured, task: main
15723 Sep 22 23:21:37.787 INFO Current flush_numbers [0..12]: [0, 0]
15724 Sep 22 23:21:37.788 INFO current number of open files limit 65536 is already the maximum
15725 Sep 22 23:21:37.788 INFO Opened existing region file "/tmp/downstairs-SCrOyIq5/region.json"
15726 Sep 22 23:21:37.788 INFO Database read version 1
15727 Sep 22 23:21:37.788 INFO current number of open files limit 65536 is already the maximum
15728 Sep 22 23:21:37.788 INFO Database write version 1
15729 Sep 22 23:21:37.788 INFO Created new region file "/tmp/downstairs-ZkhS3bbX/region.json"
15730 Sep 22 23:21:37.788 DEBG Read :1000 deps:[] res:true
15731 Sep 22 23:21:37.788 INFO Downstairs has completed Negotiation, task: proc
15732 Sep 22 23:21:37.789 INFO Current flush_numbers [0..12]: [0, 0]
15733 Sep 22 23:21:37.789 DEBG Read :1000 deps:[] res:true
15734 test test::integration_test_multi_read_only ... ok
15735 Sep 22 23:21:37.789 INFO current number of open files limit 65536 is already the maximum
15736 Sep 22 23:21:37.789 INFO Downstairs has completed Negotiation, task: proc
15737 Sep 22 23:21:37.789 INFO Created new region file "/tmp/downstairs-VBH4uE22/region.json"
15738 Sep 22 23:21:37.789 INFO Current flush_numbers [0..12]: [0, 0]
15739 Sep 22 23:21:37.789 DEBG Read :1000 deps:[] res:true
15740 Sep 22 23:21:37.790 INFO Downstairs has completed Negotiation, task: proc
15741 Sep 22 23:21:37.790 INFO [0] 0509f38e-fad6-4194-ae26-f925e684ff33 (2e9b507b-029a-450c-8cce-275488d4eebe) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
15742 Sep 22 23:21:37.790 INFO [0] Transition from WaitActive to WaitQuorum
15743 Sep 22 23:21:37.790 WARN [0] new RM replaced this: None
15744 Sep 22 23:21:37.790 INFO UUID: bdfd9105-b870-40c3-a737-5c05b9c9e488
15745 Sep 22 23:21:37.790 INFO [0] Starts reconcile loop
15746 Sep 22 23:21:37.790 INFO Blocks per extent:5 Total Extents: 2
15747 Sep 22 23:21:37.790 INFO Crucible Version: Crucible Version: 0.0.1
15748 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15749 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15750 rustc: 1.70.0 stable x86_64-unknown-illumos
15751 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15752 Sep 22 23:21:37.790 INFO [1] 0509f38e-fad6-4194-ae26-f925e684ff33 (2e9b507b-029a-450c-8cce-275488d4eebe) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
15753 Sep 22 23:21:37.790 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15754 Sep 22 23:21:37.790 INFO [1] Transition from WaitActive to WaitQuorum
15755 Sep 22 23:21:37.790 INFO Using address: 127.0.0.1:50238, task: main
15756 Sep 22 23:21:37.790 WARN [1] new RM replaced this: None
15757 Sep 22 23:21:37.790 INFO [1] Starts reconcile loop
15758 Sep 22 23:21:37.790 INFO [2] 0509f38e-fad6-4194-ae26-f925e684ff33 (2e9b507b-029a-450c-8cce-275488d4eebe) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
15759 Sep 22 23:21:37.790 INFO [2] Transition from WaitActive to WaitQuorum
15760 Sep 22 23:21:37.790 WARN [2] new RM replaced this: None
15761 Sep 22 23:21:37.790 INFO [2] Starts reconcile loop
15762 Sep 22 23:21:37.790 INFO [0] 127.0.0.1:60322 task reports connection:true
15763 Sep 22 23:21:37.790 INFO 0509f38e-fad6-4194-ae26-f925e684ff33 WaitQuorum WaitQuorum WaitQuorum
15764 Sep 22 23:21:37.790 INFO Repair listens on 127.0.0.1:0, task: repair
15765 Sep 22 23:21:37.790 INFO [0]R flush_numbers: [0, 0]
15766 Sep 22 23:21:37.790 INFO [0]R generation: [0, 0]
15767 Sep 22 23:21:37.790 INFO [0]R dirty: [false, false]
15768 Sep 22 23:21:37.790 INFO [1]R flush_numbers: [0, 0]
15769 Sep 22 23:21:37.790 INFO [1]R generation: [0, 0]
15770 Sep 22 23:21:37.790 INFO [1]R dirty: [false, false]
15771 Sep 22 23:21:37.790 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38527, task: repair
15772 Sep 22 23:21:37.790 INFO [2]R flush_numbers: [0, 0]
15773 Sep 22 23:21:37.790 INFO [2]R generation: [0, 0]
15774 Sep 22 23:21:37.790 INFO [2]R dirty: [false, false]
15775 Sep 22 23:21:37.790 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38527, task: repair
15776 Sep 22 23:21:37.790 INFO Max found gen is 1
15777 Sep 22 23:21:37.790 INFO Generation requested: 1 >= found:1
15778 Sep 22 23:21:37.790 INFO listening, local_addr: 127.0.0.1:38527, task: repair
15779 Sep 22 23:21:37.791 INFO Next flush: 1
15780 Sep 22 23:21:37.791 INFO All extents match
15781 Sep 22 23:21:37.791 INFO No downstairs repair required
15782 Sep 22 23:21:37.791 INFO No initial repair work was required
15783 Sep 22 23:21:37.791 INFO Set Downstairs and Upstairs active
15784 Sep 22 23:21:37.791 DEBG [0] Read AckReady 1000, : downstairs
15785 Sep 22 23:21:37.791 INFO 0509f38e-fad6-4194-ae26-f925e684ff33 is now active with session: 2e9b507b-029a-450c-8cce-275488d4eebe
15786 Sep 22 23:21:37.791 INFO 0509f38e-fad6-4194-ae26-f925e684ff33 Set Active after no repair
15787 Sep 22 23:21:37.791 INFO Notify all downstairs, region set compare is done.
15788 Sep 22 23:21:37.791 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38527, task: repair
15789 Sep 22 23:21:37.791 INFO Set check for repair
15790 Sep 22 23:21:37.791 INFO Using repair address: 127.0.0.1:38527, task: main
15791 Sep 22 23:21:37.791 INFO [1] 127.0.0.1:48145 task reports connection:true
15792 Sep 22 23:21:37.791 INFO No SSL acceptor configured, task: main
15793 Sep 22 23:21:37.791 INFO 0509f38e-fad6-4194-ae26-f925e684ff33 Active Active Active
15794 Sep 22 23:21:37.791 INFO Set check for repair
15795 Sep 22 23:21:37.791 DEBG [1] Read already AckReady 1000, : downstairs
15796 Sep 22 23:21:37.791 INFO [2] 127.0.0.1:40738 task reports connection:true
15797 Sep 22 23:21:37.791 INFO 0509f38e-fad6-4194-ae26-f925e684ff33 Active Active Active
15798 Sep 22 23:21:37.791 INFO Set check for repair
15799 Sep 22 23:21:37.791 INFO [0] received reconcile message
15800 Sep 22 23:21:37.791 INFO [0] All repairs completed, exit
15801 Sep 22 23:21:37.791 DEBG [2] Read already AckReady 1000, : downstairs
15802 Sep 22 23:21:37.791 INFO [0] Starts cmd_loop
15803 Sep 22 23:21:37.791 DEBG up_ds_listen was notified
15804 Sep 22 23:21:37.791 INFO [1] received reconcile message
15805 Sep 22 23:21:37.791 DEBG up_ds_listen process 1000
15806 Sep 22 23:21:37.791 INFO [1] All repairs completed, exit
15807 Sep 22 23:21:37.791 INFO [1] Starts cmd_loop
15808 Sep 22 23:21:37.791 DEBG [A] ack job 1000:1, : downstairs
15809 Sep 22 23:21:37.791 INFO current number of open files limit 65536 is already the maximum
15810 Sep 22 23:21:37.791 INFO Opened existing region file "/tmp/downstairs-KCgrRonK/region.json"
15811 Sep 22 23:21:37.791 INFO [2] received reconcile message
15812 Sep 22 23:21:37.791 INFO Database read version 1
15813 Sep 22 23:21:37.791 INFO Database write version 1
15814 Sep 22 23:21:37.791 INFO [2] All repairs completed, exit
15815 Sep 22 23:21:37.791 INFO [2] Starts cmd_loop
15816 The guest has finished waiting for activation
15817 Sep 22 23:21:37.791 INFO current number of open files limit 65536 is already the maximum
15818 Sep 22 23:21:37.791 INFO Created new region file "/tmp/downstairs-Ezm5ouTm/region.json"
15819 Sep 22 23:21:37.792 DEBG up_ds_listen checked 1 jobs, back to waiting
15820 Sep 22 23:21:37.792 DEBG IO Read 1000 has deps []
15821 Sep 22 23:21:37.793 INFO current number of open files limit 65536 is already the maximum
15822 Sep 22 23:21:37.793 INFO Opened existing region file "/tmp/downstairs-ZkhS3bbX/region.json"
15823 Sep 22 23:21:37.793 INFO Database read version 1
15824 Sep 22 23:21:37.793 INFO Database write version 1
15825 Sep 22 23:21:37.793 DEBG Read :1000 deps:[] res:true
15826 Sep 22 23:21:37.793 INFO UUID: 816d86cc-233d-41c9-991c-d98e4fdc6748
15827 Sep 22 23:21:37.793 INFO Blocks per extent:5 Total Extents: 2
15828 Sep 22 23:21:37.793 INFO Crucible Version: Crucible Version: 0.0.1
15829 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15830 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15831 rustc: 1.70.0 stable x86_64-unknown-illumos
15832 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15833 Sep 22 23:21:37.793 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15834 Sep 22 23:21:37.793 INFO Using address: 127.0.0.1:59973, task: main
15835 Sep 22 23:21:37.793 DEBG Read :1000 deps:[] res:true
15836 Sep 22 23:21:37.794 INFO Repair listens on 127.0.0.1:0, task: repair
15837 Sep 22 23:21:37.794 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:47256, task: repair
15838 Sep 22 23:21:37.794 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:47256, task: repair
15839 Sep 22 23:21:37.794 INFO listening, local_addr: 127.0.0.1:47256, task: repair
15840 Sep 22 23:21:37.794 DEBG Read :1000 deps:[] res:true
15841 Sep 22 23:21:37.794 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:47256, task: repair
15842 Sep 22 23:21:37.794 INFO Using repair address: 127.0.0.1:47256, task: main
15843 Sep 22 23:21:37.794 INFO No SSL acceptor configured, task: main
15844 Sep 22 23:21:37.794 INFO current number of open files limit 65536 is already the maximum
15845 Sep 22 23:21:37.794 INFO current number of open files limit 65536 is already the maximum
15846 Sep 22 23:21:37.795 INFO Opened existing region file "/tmp/downstairs-VBH4uE22/region.json"
15847 Sep 22 23:21:37.795 DEBG IO Write 1001 has deps [JobId(1000)]
15848 Sep 22 23:21:37.795 INFO Database read version 1
15849 Sep 22 23:21:37.795 INFO Database write version 1
15850 Sep 22 23:21:37.795 INFO Created new region file "/tmp/downstairs-KV5pdWk6/region.json"
15851 Sep 22 23:21:37.795 DEBG up_ds_listen was notified
15852 Sep 22 23:21:37.795 DEBG up_ds_listen process 1001
15853 Sep 22 23:21:37.795 DEBG [A] ack job 1001:2, : downstairs
15854 Sep 22 23:21:37.795 DEBG up_ds_listen checked 1 jobs, back to waiting
15855 Sep 22 23:21:37.795 DEBG [0] Read AckReady 1000, : downstairs
15856 Sep 22 23:21:37.795 DEBG [1] Read already AckReady 1000, : downstairs
15857 Sep 22 23:21:37.795 INFO UUID: e251ad5f-6614-4ef0-b821-00d45953fcfc
15858 Sep 22 23:21:37.795 INFO Blocks per extent:5 Total Extents: 2
15859 Sep 22 23:21:37.796 DEBG [2] Read already AckReady 1000, : downstairs
15860 Sep 22 23:21:37.796 INFO Crucible Version: Crucible Version: 0.0.1
15861 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15862 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15863 rustc: 1.70.0 stable x86_64-unknown-illumos
15864 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15865 Sep 22 23:21:37.796 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15866 Sep 22 23:21:37.796 INFO Using address: 127.0.0.1:36989, task: main
15867 Sep 22 23:21:37.796 DEBG up_ds_listen was notified
15868 Sep 22 23:21:37.796 DEBG up_ds_listen process 1000
15869 Sep 22 23:21:37.796 DEBG [A] ack job 1000:1, : downstairs
15870 Sep 22 23:21:37.796 INFO Repair listens on 127.0.0.1:0, task: repair
15871 Sep 22 23:21:37.796 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:37445, task: repair
15872 Sep 22 23:21:37.796 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:37445, task: repair
15873 Sep 22 23:21:37.796 INFO listening, local_addr: 127.0.0.1:37445, task: repair
15874 Sep 22 23:21:37.796 DEBG up_ds_listen checked 1 jobs, back to waiting
15875 Sep 22 23:21:37.796 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:37445, task: repair
15876 Sep 22 23:21:37.796 INFO Using repair address: 127.0.0.1:37445, task: main
15877 Sep 22 23:21:37.796 INFO No SSL acceptor configured, task: main
15878 Sep 22 23:21:37.797 INFO UUID: 225348bb-ea71-4678-9320-8b37dbfcd881
15879 Sep 22 23:21:37.797 INFO Blocks per extent:5 Total Extents: 2
15880 Sep 22 23:21:37.797 INFO Crucible Version: Crucible Version: 0.0.1
15881 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15882 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15883 rustc: 1.70.0 stable x86_64-unknown-illumos
15884 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15885 Sep 22 23:21:37.797 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15886 Sep 22 23:21:37.797 INFO Using address: 127.0.0.1:38477, task: main
15887 Sep 22 23:21:37.797 INFO current number of open files limit 65536 is already the maximum
15888 Sep 22 23:21:37.797 INFO Created new region file "/tmp/downstairs-PbY62a19/region.json"
15889 Sep 22 23:21:37.797 INFO Repair listens on 127.0.0.1:0, task: repair
15890 Sep 22 23:21:37.797 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50593, task: repair
15891 Sep 22 23:21:37.797 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50593, task: repair
15892 Sep 22 23:21:37.797 INFO listening, local_addr: 127.0.0.1:50593, task: repair
15893 Sep 22 23:21:37.797 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50593, task: repair
15894 Sep 22 23:21:37.797 INFO Using repair address: 127.0.0.1:50593, task: main
15895 Sep 22 23:21:37.797 INFO No SSL acceptor configured, task: main
15896 Sep 22 23:21:37.797 INFO Scrub check for 996ff2d1-184c-4c18-9e4b-609bb90feb01
15897 Sep 22 23:21:37.798 INFO Scrub for 996ff2d1-184c-4c18-9e4b-609bb90feb01 begins
15898 Sep 22 23:21:37.798 INFO Scrub with total_size:5120 block_size:512
15899 Sep 22 23:21:37.798 INFO Scrubs from block 0 to 10 in (256) 131072 size IOs pm:0
15900 Sep 22 23:21:37.798 INFO Adjust block_count to 10 at offset 0
15901 Sep 22 23:21:37.798 INFO current number of open files limit 65536 is already the maximum
15902 Sep 22 23:21:37.798 INFO Opened existing region file "/tmp/downstairs-Ezm5ouTm/region.json"
15903 Sep 22 23:21:37.798 INFO Database read version 1
15904 Sep 22 23:21:37.798 INFO Database write version 1
15905 Sep 22 23:21:37.798 INFO current number of open files limit 65536 is already the maximum
15906 Sep 22 23:21:37.798 INFO Created new region file "/tmp/downstairs-xy4dQl33/region.json"
15907 Sep 22 23:21:37.798 DEBG Write :1001 deps:[JobId(1000)] res:true
15908 Sep 22 23:21:37.799 DEBG Write :1001 deps:[JobId(1000)] res:true
15909 Sep 22 23:21:37.799 INFO current number of open files limit 65536 is already the maximum
15910 Sep 22 23:21:37.799 INFO Opened existing region file "/tmp/downstairs-KV5pdWk6/region.json"
15911 Sep 22 23:21:37.799 INFO Database read version 1
15912 Sep 22 23:21:37.799 INFO Database write version 1
15913 Sep 22 23:21:37.800 DEBG Write :1001 deps:[JobId(1000)] res:true
15914 Sep 22 23:21:37.801 INFO UUID: 7f0f7567-102c-47f7-afe6-1eed37005958
15915 Sep 22 23:21:37.801 INFO Blocks per extent:5 Total Extents: 2
15916 Sep 22 23:21:37.801 DEBG IO Read 1002 has deps [JobId(1001)]
15917 Sep 22 23:21:37.801 INFO Crucible Version: Crucible Version: 0.0.1
15918 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15919 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15920 rustc: 1.70.0 stable x86_64-unknown-illumos
15921 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15922 Sep 22 23:21:37.801 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15923 Sep 22 23:21:37.801 INFO Using address: 127.0.0.1:64264, task: main
15924 Sep 22 23:21:37.801 DEBG IO Write 1001 has deps [JobId(1000)]
15925 Sep 22 23:21:37.801 INFO Repair listens on 127.0.0.1:0, task: repair
15926 Sep 22 23:21:37.801 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33498, task: repair
15927 Sep 22 23:21:37.801 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33498, task: repair
15928 Sep 22 23:21:37.801 INFO listening, local_addr: 127.0.0.1:33498, task: repair
15929 Sep 22 23:21:37.801 DEBG Read :1002 deps:[JobId(1001)] res:true
15930 Sep 22 23:21:37.801 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33498, task: repair
15931 Sep 22 23:21:37.801 INFO Using repair address: 127.0.0.1:33498, task: main
15932 Sep 22 23:21:37.801 INFO No SSL acceptor configured, task: main
15933 Sep 22 23:21:37.802 DEBG Read :1002 deps:[JobId(1001)] res:true
15934 Sep 22 23:21:37.802 INFO Upstairs starts
15935 Sep 22 23:21:37.802 INFO Crucible Version: BuildInfo {
15936 version: "0.0.1",
15937 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
15938 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
15939 git_branch: "main",
15940 rustc_semver: "1.70.0",
15941 rustc_channel: "stable",
15942 rustc_host_triple: "x86_64-unknown-illumos",
15943 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
15944 cargo_triple: "x86_64-unknown-illumos",
15945 debug: true,
15946 opt_level: 0,
15947 }
15948 Sep 22 23:21:37.802 INFO Upstairs <-> Downstairs Message Version: 4
15949 Sep 22 23:21:37.802 DEBG Read :1002 deps:[JobId(1001)] res:true
15950 Sep 22 23:21:37.802 INFO Crucible stats registered with UUID: c27ba65a-3c7e-4977-b88b-9c74b48e942f
15951 Sep 22 23:21:37.802 INFO Crucible c27ba65a-3c7e-4977-b88b-9c74b48e942f has session id: 1e3ca93d-2e4e-478f-b33f-3a1688e22eed
15952 Sep 22 23:21:37.802 INFO UUID: 8d7b21f8-00cc-4c41-81e9-de537a7d24ce
15953 Sep 22 23:21:37.802 INFO Blocks per extent:5 Total Extents: 2
15954 Sep 22 23:21:37.803 INFO listening on 127.0.0.1:0, task: main
15955 Sep 22 23:21:37.803 INFO Crucible Version: Crucible Version: 0.0.1
15956 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15957 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15958 rustc: 1.70.0 stable x86_64-unknown-illumos
15959 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15960 Sep 22 23:21:37.803 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15961 Sep 22 23:21:37.803 INFO listening on 127.0.0.1:0, task: main
15962 Sep 22 23:21:37.803 INFO Using address: 127.0.0.1:46588, task: main
15963 Sep 22 23:21:37.803 INFO current number of open files limit 65536 is already the maximum
15964 Sep 22 23:21:37.803 INFO Opened existing region file "/tmp/downstairs-xy4dQl33/region.json"
15965 Sep 22 23:21:37.803 INFO listening on 127.0.0.1:0, task: main
15966 Sep 22 23:21:37.803 INFO current number of open files limit 65536 is already the maximum
15967 Sep 22 23:21:37.803 INFO Database read version 1
15968 Sep 22 23:21:37.803 INFO Database write version 1
15969 Sep 22 23:21:37.803 INFO Opened existing region file "/tmp/downstairs-PbY62a19/region.json"
15970 Sep 22 23:21:37.803 INFO Database read version 1
15971 Sep 22 23:21:37.803 INFO Database write version 1
15972 Sep 22 23:21:37.803 INFO [0] connecting to 127.0.0.1:49534, looper: 0
15973 Sep 22 23:21:37.803 INFO [1] connecting to 127.0.0.1:50238, looper: 1
15974 Sep 22 23:21:37.803 INFO Repair listens on 127.0.0.1:0, task: repair
15975 Sep 22 23:21:37.803 INFO [2] connecting to 127.0.0.1:64264, looper: 2
15976 Sep 22 23:21:37.803 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:48398, task: repair
15977 Sep 22 23:21:37.803 INFO up_listen starts, task: up_listen
15978 Sep 22 23:21:37.803 INFO Wait for all three downstairs to come online
15979 Sep 22 23:21:37.803 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:48398, task: repair
15980 Sep 22 23:21:37.803 INFO Flush timeout: 0.5
15981 Sep 22 23:21:37.803 INFO listening, local_addr: 127.0.0.1:48398, task: repair
15982 Sep 22 23:21:37.803 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:48398, task: repair
15983 Sep 22 23:21:37.803 INFO Using repair address: 127.0.0.1:48398, task: main
15984 Sep 22 23:21:37.803 INFO No SSL acceptor configured, task: main
15985 Sep 22 23:21:37.803 INFO accepted connection from 127.0.0.1:63449, task: main
15986 Sep 22 23:21:37.804 INFO accepted connection from 127.0.0.1:42043, task: main
15987 Sep 22 23:21:37.804 INFO accepted connection from 127.0.0.1:62571, task: main
15988 Sep 22 23:21:37.804 INFO [0] c27ba65a-3c7e-4977-b88b-9c74b48e942f looper connected, looper: 0
15989 Sep 22 23:21:37.804 INFO [0] Proc runs for 127.0.0.1:49534 in state New
15990 Sep 22 23:21:37.804 INFO [1] c27ba65a-3c7e-4977-b88b-9c74b48e942f looper connected, looper: 1
15991 Sep 22 23:21:37.804 INFO [1] Proc runs for 127.0.0.1:50238 in state New
15992 Sep 22 23:21:37.804 INFO [2] c27ba65a-3c7e-4977-b88b-9c74b48e942f looper connected, looper: 2
15993 Sep 22 23:21:37.804 INFO [2] Proc runs for 127.0.0.1:64264 in state New
15994 Sep 22 23:21:37.804 INFO Connection request from c27ba65a-3c7e-4977-b88b-9c74b48e942f with version 4, task: proc
15995 Sep 22 23:21:37.804 INFO upstairs UpstairsConnection { upstairs_id: c27ba65a-3c7e-4977-b88b-9c74b48e942f, session_id: 046c4783-9946-4b0c-8eb5-0996fef4cd23, gen: 1 } connected, version 4, task: proc
15996 Sep 22 23:21:37.804 INFO Connection request from c27ba65a-3c7e-4977-b88b-9c74b48e942f with version 4, task: proc
15997 Sep 22 23:21:37.805 INFO Upstairs starts
15998 Sep 22 23:21:37.805 INFO upstairs UpstairsConnection { upstairs_id: c27ba65a-3c7e-4977-b88b-9c74b48e942f, session_id: 046c4783-9946-4b0c-8eb5-0996fef4cd23, gen: 1 } connected, version 4, task: proc
15999 Sep 22 23:21:37.805 INFO Crucible Version: BuildInfo {
16000 version: "0.0.1",
16001 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
16002 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
16003 git_branch: "main",
16004 rustc_semver: "1.70.0",
16005 rustc_channel: "stable",
16006 rustc_host_triple: "x86_64-unknown-illumos",
16007 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
16008 cargo_triple: "x86_64-unknown-illumos",
16009 debug: true,
16010 opt_level: 0,
16011 }
16012 Sep 22 23:21:37.805 INFO Upstairs <-> Downstairs Message Version: 4
16013 Sep 22 23:21:37.805 INFO Crucible stats registered with UUID: 8a9244fd-a219-4407-95b8-2e8c6b37613e
16014 Sep 22 23:21:37.805 INFO Crucible 8a9244fd-a219-4407-95b8-2e8c6b37613e has session id: 05c4a641-4bef-4a5b-bbfa-45eb16265cac
16015 Sep 22 23:21:37.805 INFO Connection request from c27ba65a-3c7e-4977-b88b-9c74b48e942f with version 4, task: proc
16016 Sep 22 23:21:37.805 INFO upstairs UpstairsConnection { upstairs_id: c27ba65a-3c7e-4977-b88b-9c74b48e942f, session_id: 046c4783-9946-4b0c-8eb5-0996fef4cd23, gen: 1 } connected, version 4, task: proc
16017 The guest has requested activation
16018 Sep 22 23:21:37.805 INFO listening on 127.0.0.1:0, task: main
16019 Sep 22 23:21:37.805 INFO listening on 127.0.0.1:0, task: main
16020 Sep 22 23:21:37.805 INFO [0] c27ba65a-3c7e-4977-b88b-9c74b48e942f (046c4783-9946-4b0c-8eb5-0996fef4cd23) New New New ds_transition to WaitActive
16021 Sep 22 23:21:37.805 INFO [0] Transition from New to WaitActive
16022 Sep 22 23:21:37.805 INFO listening on 127.0.0.1:0, task: main
16023 Sep 22 23:21:37.805 INFO [0] connecting to 127.0.0.1:64153, looper: 0
16024 Sep 22 23:21:37.805 INFO [1] c27ba65a-3c7e-4977-b88b-9c74b48e942f (046c4783-9946-4b0c-8eb5-0996fef4cd23) WaitActive New New ds_transition to WaitActive
16025 Sep 22 23:21:37.805 DEBG [0] Read AckReady 1002, : downstairs
16026 Sep 22 23:21:37.805 INFO [1] connecting to 127.0.0.1:59973, looper: 1
16027 Sep 22 23:21:37.805 INFO [1] Transition from New to WaitActive
16028 Sep 22 23:21:37.805 INFO [2] c27ba65a-3c7e-4977-b88b-9c74b48e942f (046c4783-9946-4b0c-8eb5-0996fef4cd23) WaitActive WaitActive New ds_transition to WaitActive
16029 Sep 22 23:21:37.805 INFO [2] connecting to 127.0.0.1:46588, looper: 2
16030 Sep 22 23:21:37.805 INFO [2] Transition from New to WaitActive
16031 Sep 22 23:21:37.805 INFO c27ba65a-3c7e-4977-b88b-9c74b48e942f active request set
16032 Sep 22 23:21:37.805 INFO up_listen starts, task: up_listen
16033 Sep 22 23:21:37.805 INFO Wait for all three downstairs to come online
16034 Sep 22 23:21:37.805 INFO Flush timeout: 0.5
16035 Sep 22 23:21:37.805 INFO [0] received activate with gen 1
16036 Sep 22 23:21:37.806 INFO [0] client got ds_active_rx, promote! session 046c4783-9946-4b0c-8eb5-0996fef4cd23
16037 Sep 22 23:21:37.806 INFO [1] received activate with gen 1
16038 Sep 22 23:21:37.806 INFO [1] client got ds_active_rx, promote! session 046c4783-9946-4b0c-8eb5-0996fef4cd23
16039 Sep 22 23:21:37.806 INFO [2] received activate with gen 1
16040 Sep 22 23:21:37.806 INFO accepted connection from 127.0.0.1:58780, task: main
16041 Sep 22 23:21:37.806 INFO [2] client got ds_active_rx, promote! session 046c4783-9946-4b0c-8eb5-0996fef4cd23
16042 Sep 22 23:21:37.806 INFO accepted connection from 127.0.0.1:46822, task: main
16043 Sep 22 23:21:37.806 INFO UUID: 2a37b56b-50b2-4d7c-9cf7-bf9557fd6093
16044 Sep 22 23:21:37.806 INFO UUID: e05c70a3-48dd-4fb2-827c-e28061a0ccac
16045 Sep 22 23:21:37.806 INFO Blocks per extent:5 Total Extents: 2
16046 Sep 22 23:21:37.806 INFO accepted connection from 127.0.0.1:57952, task: main
16047 Sep 22 23:21:37.806 INFO Blocks per extent:5 Total Extents: 2
16048 Sep 22 23:21:37.806 INFO UpstairsConnection { upstairs_id: c27ba65a-3c7e-4977-b88b-9c74b48e942f, session_id: 046c4783-9946-4b0c-8eb5-0996fef4cd23, gen: 1 } is now active (read-write)
16049 Sep 22 23:21:37.806 INFO [0] 8a9244fd-a219-4407-95b8-2e8c6b37613e looper connected, looper: 0
16050 Sep 22 23:21:37.806 INFO Crucible Version: Crucible Version: 0.0.1
16051 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16052 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16053 rustc: 1.70.0 stable x86_64-unknown-illumos
16054 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16055 Sep 22 23:21:37.806 INFO [0] Proc runs for 127.0.0.1:64153 in state New
16056 Sep 22 23:21:37.806 INFO Crucible Version: Crucible Version: 0.0.1
16057 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16058 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16059 rustc: 1.70.0 stable x86_64-unknown-illumos
16060 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16061 Sep 22 23:21:37.806 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16062 Sep 22 23:21:37.806 INFO UpstairsConnection { upstairs_id: c27ba65a-3c7e-4977-b88b-9c74b48e942f, session_id: 046c4783-9946-4b0c-8eb5-0996fef4cd23, gen: 1 } is now active (read-write)
16063 Sep 22 23:21:37.806 INFO Using address: 127.0.0.1:39308, task: main
16064 Sep 22 23:21:37.806 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16065 Sep 22 23:21:37.806 INFO Using address: 127.0.0.1:55757, task: main
16066 Sep 22 23:21:37.806 DEBG up_ds_listen was notified
16067 Sep 22 23:21:37.806 INFO [1] 8a9244fd-a219-4407-95b8-2e8c6b37613e looper connected, looper: 1
16068 Sep 22 23:21:37.806 DEBG up_ds_listen process 1001
16069 Sep 22 23:21:37.806 INFO [1] Proc runs for 127.0.0.1:59973 in state New
16070 Sep 22 23:21:37.806 INFO UpstairsConnection { upstairs_id: c27ba65a-3c7e-4977-b88b-9c74b48e942f, session_id: 046c4783-9946-4b0c-8eb5-0996fef4cd23, gen: 1 } is now active (read-write)
16071 Sep 22 23:21:37.806 DEBG [A] ack job 1001:2, : downstairs
16072 Sep 22 23:21:37.806 DEBG up_ds_listen checked 1 jobs, back to waiting
16073 Sep 22 23:21:37.806 INFO [2] 8a9244fd-a219-4407-95b8-2e8c6b37613e looper connected, looper: 2
16074 Sep 22 23:21:37.806 INFO [2] Proc runs for 127.0.0.1:46588 in state New
16075 Sep 22 23:21:37.806 INFO Scrub at offset 10/10 sp:10
16076 Sep 22 23:21:37.806 INFO Repair listens on 127.0.0.1:0, task: repair
16077 Sep 22 23:21:37.806 INFO Repair listens on 127.0.0.1:0, task: repair
16078 Sep 22 23:21:37.806 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:45242, task: repair
16079 Sep 22 23:21:37.806 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:45242, task: repair
16080 Sep 22 23:21:37.806 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:59077, task: repair
16081 Sep 22 23:21:37.807 INFO listening, local_addr: 127.0.0.1:45242, task: repair
16082 Sep 22 23:21:37.807 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:59077, task: repair
16083 Sep 22 23:21:37.807 INFO Connection request from 8a9244fd-a219-4407-95b8-2e8c6b37613e with version 4, task: proc
16084 Sep 22 23:21:37.807 INFO listening, local_addr: 127.0.0.1:59077, task: repair
16085 Sep 22 23:21:37.807 INFO upstairs UpstairsConnection { upstairs_id: 8a9244fd-a219-4407-95b8-2e8c6b37613e, session_id: af1123f4-626a-46f8-a46e-4008ec56e922, gen: 1 } connected, version 4, task: proc
16086 Sep 22 23:21:37.807 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:45242, task: repair
16087 Sep 22 23:21:37.807 INFO Using repair address: 127.0.0.1:45242, task: main
16088 Sep 22 23:21:37.807 INFO No SSL acceptor configured, task: main
16089 Sep 22 23:21:37.807 INFO [0] downstairs client at 127.0.0.1:49534 has UUID 8f0829a0-9286-4151-a390-4aadd7c97e80
16090 Sep 22 23:21:37.807 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 8f0829a0-9286-4151-a390-4aadd7c97e80, encrypted: true, database_read_version: 1, database_write_version: 1 }
16091 Sep 22 23:21:37.807 INFO Connection request from 8a9244fd-a219-4407-95b8-2e8c6b37613e with version 4, task: proc
16092 Sep 22 23:21:37.807 INFO upstairs UpstairsConnection { upstairs_id: 8a9244fd-a219-4407-95b8-2e8c6b37613e, session_id: af1123f4-626a-46f8-a46e-4008ec56e922, gen: 1 } connected, version 4, task: proc
16093 Sep 22 23:21:37.807 INFO c27ba65a-3c7e-4977-b88b-9c74b48e942f WaitActive WaitActive WaitActive
16094 Sep 22 23:21:37.807 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:59077, task: repair
16095 Sep 22 23:21:37.807 INFO Using repair address: 127.0.0.1:59077, task: main
16096 Sep 22 23:21:37.807 INFO No SSL acceptor configured, task: main
16097 Sep 22 23:21:37.807 INFO [1] downstairs client at 127.0.0.1:50238 has UUID bdfd9105-b870-40c3-a737-5c05b9c9e488
16098 Sep 22 23:21:37.807 INFO Connection request from 8a9244fd-a219-4407-95b8-2e8c6b37613e with version 4, task: proc
16099 Sep 22 23:21:37.807 INFO upstairs UpstairsConnection { upstairs_id: 8a9244fd-a219-4407-95b8-2e8c6b37613e, session_id: af1123f4-626a-46f8-a46e-4008ec56e922, gen: 1 } connected, version 4, task: proc
16100 Sep 22 23:21:37.807 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: bdfd9105-b870-40c3-a737-5c05b9c9e488, encrypted: true, database_read_version: 1, database_write_version: 1 }
16101 Sep 22 23:21:37.807 INFO c27ba65a-3c7e-4977-b88b-9c74b48e942f WaitActive WaitActive WaitActive
16102 Sep 22 23:21:37.807 INFO [2] downstairs client at 127.0.0.1:64264 has UUID 7f0f7567-102c-47f7-afe6-1eed37005958
16103 Sep 22 23:21:37.807 DEBG [1] Read already AckReady 1002, : downstairs
16104 Sep 22 23:21:37.807 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 7f0f7567-102c-47f7-afe6-1eed37005958, encrypted: true, database_read_version: 1, database_write_version: 1 }
16105 The guest has requested activation
16106 Sep 22 23:21:37.807 INFO c27ba65a-3c7e-4977-b88b-9c74b48e942f WaitActive WaitActive WaitActive
16107 Sep 22 23:21:37.807 INFO Upstairs starts
16108 Sep 22 23:21:37.807 INFO [0] 8a9244fd-a219-4407-95b8-2e8c6b37613e (af1123f4-626a-46f8-a46e-4008ec56e922) New New New ds_transition to WaitActive
16109 Sep 22 23:21:37.807 INFO Crucible Version: BuildInfo {
16110 version: "0.0.1",
16111 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
16112 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
16113 git_branch: "main",
16114 rustc_semver: "1.70.0",
16115 rustc_channel: "stable",
16116 rustc_host_triple: "x86_64-unknown-illumos",
16117 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
16118 cargo_triple: "x86_64-unknown-illumos",
16119 debug: true,
16120 opt_level: 0,
16121 }
16122 Sep 22 23:21:37.807 INFO [0] Transition from New to WaitActive
16123 Sep 22 23:21:37.807 INFO Upstairs <-> Downstairs Message Version: 4
16124 Sep 22 23:21:37.807 INFO Crucible stats registered with UUID: bc449dbb-4e8d-49d4-9ec4-c4ddf333de98
16125 Sep 22 23:21:37.807 INFO Crucible bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 has session id: 44632fdd-5827-4865-829c-ea82538a3d81
16126 Sep 22 23:21:37.807 INFO [1] 8a9244fd-a219-4407-95b8-2e8c6b37613e (af1123f4-626a-46f8-a46e-4008ec56e922) WaitActive New New ds_transition to WaitActive
16127 Sep 22 23:21:37.807 INFO [1] Transition from New to WaitActive
16128 Sep 22 23:21:37.807 INFO current number of open files limit 65536 is already the maximum
16129 Sep 22 23:21:37.807 INFO Current flush_numbers [0..12]: [0, 0]
16130 Sep 22 23:21:37.807 INFO [2] 8a9244fd-a219-4407-95b8-2e8c6b37613e (af1123f4-626a-46f8-a46e-4008ec56e922) WaitActive WaitActive New ds_transition to WaitActive
16131 Sep 22 23:21:37.807 INFO [2] Transition from New to WaitActive
16132 Sep 22 23:21:37.807 INFO Created new region file "/tmp/downstairs-iBpIXgok/region.json"
16133 Sep 22 23:21:37.807 INFO 8a9244fd-a219-4407-95b8-2e8c6b37613e active request set
16134 Sep 22 23:21:37.807 INFO Scrub 996ff2d1-184c-4c18-9e4b-609bb90feb01 done in 0 seconds. Retries:0 scrub_size:5120 size:10 pause_milli:0
16135 Sep 22 23:21:37.807 INFO Downstairs has completed Negotiation, task: proc
16136 Sep 22 23:21:37.808 INFO listening on 127.0.0.1:0, task: main
16137 Sep 22 23:21:37.808 INFO [0] received activate with gen 1
16138 Sep 22 23:21:37.808 INFO [0] client got ds_active_rx, promote! session af1123f4-626a-46f8-a46e-4008ec56e922
16139 Sep 22 23:21:37.808 INFO listening on 127.0.0.1:0, task: main
16140 Sep 22 23:21:37.808 DEBG IO Flush 1002 has deps [JobId(1001), JobId(1000)]
16141 Sep 22 23:21:37.808 INFO listening on 127.0.0.1:0, task: main
16142 Sep 22 23:21:37.808 INFO [1] received activate with gen 1
16143 Sep 22 23:21:37.808 INFO [1] client got ds_active_rx, promote! session af1123f4-626a-46f8-a46e-4008ec56e922
16144 Sep 22 23:21:37.808 INFO [0] connecting to 127.0.0.1:33453, looper: 0
16145 Sep 22 23:21:37.808 INFO Current flush_numbers [0..12]: [0, 0]
16146 Sep 22 23:21:37.808 INFO [2] received activate with gen 1
16147 Sep 22 23:21:37.808 INFO [2] client got ds_active_rx, promote! session af1123f4-626a-46f8-a46e-4008ec56e922
16148 Sep 22 23:21:37.808 INFO [1] connecting to 127.0.0.1:36989, looper: 1
16149 Sep 22 23:21:37.808 INFO UpstairsConnection { upstairs_id: 8a9244fd-a219-4407-95b8-2e8c6b37613e, session_id: af1123f4-626a-46f8-a46e-4008ec56e922, gen: 1 } is now active (read-write)
16150 Sep 22 23:21:37.808 INFO [2] connecting to 127.0.0.1:55757, looper: 2
16151 Sep 22 23:21:37.808 INFO Downstairs has completed Negotiation, task: proc
16152 Sep 22 23:21:37.808 INFO UpstairsConnection { upstairs_id: 8a9244fd-a219-4407-95b8-2e8c6b37613e, session_id: af1123f4-626a-46f8-a46e-4008ec56e922, gen: 1 } is now active (read-write)
16153 Sep 22 23:21:37.808 INFO UpstairsConnection { upstairs_id: 8a9244fd-a219-4407-95b8-2e8c6b37613e, session_id: af1123f4-626a-46f8-a46e-4008ec56e922, gen: 1 } is now active (read-write)
16154 Sep 22 23:21:37.808 INFO up_listen starts, task: up_listen
16155 Sep 22 23:21:37.808 INFO Wait for all three downstairs to come online
16156 Sep 22 23:21:37.808 INFO Current flush_numbers [0..12]: [0, 0]
16157 Sep 22 23:21:37.808 INFO Flush timeout: 0.5
16158 Sep 22 23:21:37.808 INFO Downstairs has completed Negotiation, task: proc
16159 Sep 22 23:21:37.809 INFO [0] c27ba65a-3c7e-4977-b88b-9c74b48e942f (046c4783-9946-4b0c-8eb5-0996fef4cd23) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
16160 Sep 22 23:21:37.809 INFO accepted connection from 127.0.0.1:37528, task: main
16161 Sep 22 23:21:37.809 INFO [0] Transition from WaitActive to WaitQuorum
16162 Sep 22 23:21:37.809 WARN [0] new RM replaced this: None
16163 Sep 22 23:21:37.809 INFO [0] downstairs client at 127.0.0.1:64153 has UUID 8ac2d079-494b-4dc0-bfb3-1771389b3218
16164 Sep 22 23:21:37.809 INFO [0] Starts reconcile loop
16165 Sep 22 23:21:37.809 INFO accepted connection from 127.0.0.1:39885, task: main
16166 Sep 22 23:21:37.809 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 8ac2d079-494b-4dc0-bfb3-1771389b3218, encrypted: true, database_read_version: 1, database_write_version: 1 }
16167 Sep 22 23:21:37.809 INFO 8a9244fd-a219-4407-95b8-2e8c6b37613e WaitActive WaitActive WaitActive
16168 Sep 22 23:21:37.809 INFO [0] bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 looper connected, looper: 0
16169 Sep 22 23:21:37.809 INFO [1] c27ba65a-3c7e-4977-b88b-9c74b48e942f (046c4783-9946-4b0c-8eb5-0996fef4cd23) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
16170 Sep 22 23:21:37.809 INFO [1] Transition from WaitActive to WaitQuorum
16171 Sep 22 23:21:37.809 INFO [0] Proc runs for 127.0.0.1:33453 in state New
16172 Sep 22 23:21:37.809 DEBG [2] Read already AckReady 1002, : downstairs
16173 Sep 22 23:21:37.809 WARN [1] new RM replaced this: None
16174 Sep 22 23:21:37.809 INFO [1] downstairs client at 127.0.0.1:59973 has UUID 816d86cc-233d-41c9-991c-d98e4fdc6748
16175 Sep 22 23:21:37.809 INFO [1] Starts reconcile loop
16176 Sep 22 23:21:37.809 DEBG up_ds_listen was notified
16177 Sep 22 23:21:37.809 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 816d86cc-233d-41c9-991c-d98e4fdc6748, encrypted: true, database_read_version: 1, database_write_version: 1 }
16178 Sep 22 23:21:37.809 DEBG up_ds_listen process 1002
16179 Sep 22 23:21:37.809 DEBG [A] ack job 1002:3, : downstairs
16180 Sep 22 23:21:37.809 INFO accepted connection from 127.0.0.1:43288, task: main
16181 Sep 22 23:21:37.809 INFO 8a9244fd-a219-4407-95b8-2e8c6b37613e WaitActive WaitActive WaitActive
16182 Sep 22 23:21:37.809 INFO [2] c27ba65a-3c7e-4977-b88b-9c74b48e942f (046c4783-9946-4b0c-8eb5-0996fef4cd23) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
16183 Sep 22 23:21:37.809 INFO [2] Transition from WaitActive to WaitQuorum
16184 Sep 22 23:21:37.809 WARN [2] new RM replaced this: None
16185 Sep 22 23:21:37.809 INFO [2] bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 looper connected, looper: 2
16186 Sep 22 23:21:37.809 INFO [2] Starts reconcile loop
16187 Sep 22 23:21:37.809 INFO [2] downstairs client at 127.0.0.1:46588 has UUID 8d7b21f8-00cc-4c41-81e9-de537a7d24ce
16188 Sep 22 23:21:37.809 INFO [2] Proc runs for 127.0.0.1:55757 in state New
16189 Sep 22 23:21:37.809 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 8d7b21f8-00cc-4c41-81e9-de537a7d24ce, encrypted: true, database_read_version: 1, database_write_version: 1 }
16190 Sep 22 23:21:37.809 INFO [0] 127.0.0.1:49534 task reports connection:true
16191 Sep 22 23:21:37.809 INFO 8a9244fd-a219-4407-95b8-2e8c6b37613e WaitActive WaitActive WaitActive
16192 Sep 22 23:21:37.809 INFO c27ba65a-3c7e-4977-b88b-9c74b48e942f WaitQuorum WaitQuorum WaitQuorum
16193 Sep 22 23:21:37.809 INFO [1] bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 looper connected, looper: 1
16194 Sep 22 23:21:37.809 INFO [0]R flush_numbers: [0, 0]
16195 Sep 22 23:21:37.809 INFO [0]R generation: [0, 0]
16196 Sep 22 23:21:37.809 INFO [1] Proc runs for 127.0.0.1:36989 in state New
16197 Sep 22 23:21:37.809 INFO [0]R dirty: [false, false]
16198 Sep 22 23:21:37.809 INFO [1]R flush_numbers: [0, 0]
16199 Sep 22 23:21:37.809 INFO [1]R generation: [0, 0]
16200 Sep 22 23:21:37.809 INFO [1]R dirty: [false, false]
16201 Sep 22 23:21:37.809 DEBG up_ds_listen checked 1 jobs, back to waiting
16202 Sep 22 23:21:37.809 INFO [2]R flush_numbers: [0, 0]
16203 Sep 22 23:21:37.809 INFO [2]R generation: [0, 0]
16204 Sep 22 23:21:37.809 INFO [2]R dirty: [false, false]
16205 Sep 22 23:21:37.809 INFO Max found gen is 1
16206 Sep 22 23:21:37.809 INFO Generation requested: 1 >= found:1
16207 Sep 22 23:21:37.809 INFO Next flush: 1
16208 Sep 22 23:21:37.809 INFO All extents match
16209 Sep 22 23:21:37.809 INFO No downstairs repair required
16210 Sep 22 23:21:37.809 INFO No initial repair work was required
16211 Sep 22 23:21:37.809 INFO Set Downstairs and Upstairs active
16212 Sep 22 23:21:37.809 INFO c27ba65a-3c7e-4977-b88b-9c74b48e942f is now active with session: 046c4783-9946-4b0c-8eb5-0996fef4cd23
16213 Sep 22 23:21:37.809 INFO c27ba65a-3c7e-4977-b88b-9c74b48e942f Set Active after no repair
16214 Sep 22 23:21:37.809 INFO Notify all downstairs, region set compare is done.
16215 Sep 22 23:21:37.809 INFO Set check for repair
16216 Sep 22 23:21:37.809 INFO [1] 127.0.0.1:50238 task reports connection:true
16217 Sep 22 23:21:37.810 INFO c27ba65a-3c7e-4977-b88b-9c74b48e942f Active Active Active
16218 Sep 22 23:21:37.810 INFO Current flush_numbers [0..12]: [0, 0]
16219 Sep 22 23:21:37.810 INFO Set check for repair
16220 Sep 22 23:21:37.810 INFO [2] 127.0.0.1:64264 task reports connection:true
16221 Sep 22 23:21:37.810 INFO c27ba65a-3c7e-4977-b88b-9c74b48e942f Active Active Active
16222 Sep 22 23:21:37.810 INFO Connection request from bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 with version 4, task: proc
16223 Sep 22 23:21:37.810 INFO Set check for repair
16224 Sep 22 23:21:37.810 INFO upstairs UpstairsConnection { upstairs_id: bc449dbb-4e8d-49d4-9ec4-c4ddf333de98, session_id: 82d82a34-37ca-41ad-83f4-a1bf54c18aff, gen: 1 } connected, version 4, task: proc
16225 Sep 22 23:21:37.810 INFO [0] received reconcile message
16226 Sep 22 23:21:37.810 INFO Downstairs has completed Negotiation, task: proc
16227 Sep 22 23:21:37.810 INFO [0] All repairs completed, exit
16228 Sep 22 23:21:37.810 INFO Connection request from bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 with version 4, task: proc
16229 Sep 22 23:21:37.810 INFO [0] Starts cmd_loop
16230 Sep 22 23:21:37.810 INFO upstairs UpstairsConnection { upstairs_id: bc449dbb-4e8d-49d4-9ec4-c4ddf333de98, session_id: 82d82a34-37ca-41ad-83f4-a1bf54c18aff, gen: 1 } connected, version 4, task: proc
16231 Sep 22 23:21:37.810 INFO [1] received reconcile message
16232 Sep 22 23:21:37.810 INFO Connection request from bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 with version 4, task: proc
16233 Sep 22 23:21:37.810 INFO [1] All repairs completed, exit
16234 Sep 22 23:21:37.810 INFO upstairs UpstairsConnection { upstairs_id: bc449dbb-4e8d-49d4-9ec4-c4ddf333de98, session_id: 82d82a34-37ca-41ad-83f4-a1bf54c18aff, gen: 1 } connected, version 4, task: proc
16235 Sep 22 23:21:37.810 INFO [1] Starts cmd_loop
16236 Sep 22 23:21:37.810 INFO [2] received reconcile message
16237 Sep 22 23:21:37.810 INFO [2] All repairs completed, exit
16238 Sep 22 23:21:37.810 INFO [2] Starts cmd_loop
16239 The guest has requested activation
16240 The guest has finished waiting for activation
16241 Sep 22 23:21:37.810 INFO Current flush_numbers [0..12]: [0, 0]
16242 Sep 22 23:21:37.810 INFO [0] bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 (82d82a34-37ca-41ad-83f4-a1bf54c18aff) New New New ds_transition to WaitActive
16243 Sep 22 23:21:37.810 INFO [0] Transition from New to WaitActive
16244 Sep 22 23:21:37.810 INFO [2] bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 (82d82a34-37ca-41ad-83f4-a1bf54c18aff) WaitActive New New ds_transition to WaitActive
16245 Sep 22 23:21:37.810 INFO [2] Transition from New to WaitActive
16246 Sep 22 23:21:37.810 INFO Downstairs has completed Negotiation, task: proc
16247 Sep 22 23:21:37.810 INFO [1] bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 (82d82a34-37ca-41ad-83f4-a1bf54c18aff) WaitActive New WaitActive ds_transition to WaitActive
16248 Sep 22 23:21:37.810 INFO [1] Transition from New to WaitActive
16249 Sep 22 23:21:37.810 DEBG IO Read 1000 has deps []
16250 Sep 22 23:21:37.810 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
16251 Sep 22 23:21:37.810 INFO bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 active request set
16252 Sep 22 23:21:37.811 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
16253 Sep 22 23:21:37.811 INFO [0] received activate with gen 1
16254 Sep 22 23:21:37.811 INFO [0] client got ds_active_rx, promote! session 82d82a34-37ca-41ad-83f4-a1bf54c18aff
16255 Sep 22 23:21:37.811 INFO [1] received activate with gen 1
16256 Sep 22 23:21:37.811 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
16257 Sep 22 23:21:37.811 INFO [1] client got ds_active_rx, promote! session 82d82a34-37ca-41ad-83f4-a1bf54c18aff
16258 Sep 22 23:21:37.811 INFO Current flush_numbers [0..12]: [0, 0]
16259 Sep 22 23:21:37.811 INFO [2] received activate with gen 1
16260 Sep 22 23:21:37.811 INFO [2] client got ds_active_rx, promote! session 82d82a34-37ca-41ad-83f4-a1bf54c18aff
16261 Sep 22 23:21:37.811 INFO UpstairsConnection { upstairs_id: bc449dbb-4e8d-49d4-9ec4-c4ddf333de98, session_id: 82d82a34-37ca-41ad-83f4-a1bf54c18aff, gen: 1 } is now active (read-write)
16262 Sep 22 23:21:37.811 DEBG up_ds_listen was notified
16263 Sep 22 23:21:37.811 INFO UpstairsConnection { upstairs_id: bc449dbb-4e8d-49d4-9ec4-c4ddf333de98, session_id: 82d82a34-37ca-41ad-83f4-a1bf54c18aff, gen: 1 } is now active (read-write)
16264 Sep 22 23:21:37.811 DEBG up_ds_listen process 1002
16265 Sep 22 23:21:37.811 INFO current number of open files limit 65536 is already the maximum
16266 Sep 22 23:21:37.811 DEBG [A] ack job 1002:3, : downstairs
16267 Sep 22 23:21:37.811 INFO Opened existing region file "/tmp/downstairs-iBpIXgok/region.json"
16268 Sep 22 23:21:37.811 INFO Database read version 1
16269 Sep 22 23:21:37.811 INFO Database write version 1
16270 Sep 22 23:21:37.811 INFO UpstairsConnection { upstairs_id: bc449dbb-4e8d-49d4-9ec4-c4ddf333de98, session_id: 82d82a34-37ca-41ad-83f4-a1bf54c18aff, gen: 1 } is now active (read-write)
16271 Sep 22 23:21:37.811 DEBG [rc] retire 1002 clears [JobId(1000), JobId(1001), JobId(1002)], : downstairs
16272 Sep 22 23:21:37.811 DEBG up_ds_listen checked 1 jobs, back to waiting
16273 Sep 22 23:21:37.811 INFO Downstairs has completed Negotiation, task: proc
16274 Sep 22 23:21:37.812 INFO [0] 8a9244fd-a219-4407-95b8-2e8c6b37613e (af1123f4-626a-46f8-a46e-4008ec56e922) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
16275 Sep 22 23:21:37.812 INFO [0] Transition from WaitActive to WaitQuorum
16276 Sep 22 23:21:37.812 WARN [0] new RM replaced this: None
16277 Sep 22 23:21:37.812 INFO [0] Starts reconcile loop
16278 Sep 22 23:21:37.812 INFO [0] downstairs client at 127.0.0.1:33453 has UUID ed2b689d-1a8b-49a7-ad7b-f6874511d0ef
16279 Sep 22 23:21:37.812 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ed2b689d-1a8b-49a7-ad7b-f6874511d0ef, encrypted: true, database_read_version: 1, database_write_version: 1 }
16280 Sep 22 23:21:37.812 DEBG Read :1000 deps:[] res:true
16281 Sep 22 23:21:37.812 INFO [1] 8a9244fd-a219-4407-95b8-2e8c6b37613e (af1123f4-626a-46f8-a46e-4008ec56e922) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
16282 Sep 22 23:21:37.812 INFO [1] Transition from WaitActive to WaitQuorum
16283 Sep 22 23:21:37.812 INFO bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 WaitActive WaitActive WaitActive
16284 Sep 22 23:21:37.812 WARN [1] new RM replaced this: None
16285 Sep 22 23:21:37.812 INFO [1] Starts reconcile loop
16286 Sep 22 23:21:37.812 INFO [2] downstairs client at 127.0.0.1:55757 has UUID e05c70a3-48dd-4fb2-827c-e28061a0ccac
16287 Sep 22 23:21:37.812 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e05c70a3-48dd-4fb2-827c-e28061a0ccac, encrypted: true, database_read_version: 1, database_write_version: 1 }
16288 Sep 22 23:21:37.812 INFO bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 WaitActive WaitActive WaitActive
16289 Sep 22 23:21:37.812 INFO [2] 8a9244fd-a219-4407-95b8-2e8c6b37613e (af1123f4-626a-46f8-a46e-4008ec56e922) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
16290 Sep 22 23:21:37.812 INFO [2] Transition from WaitActive to WaitQuorum
16291 Sep 22 23:21:37.812 WARN [2] new RM replaced this: None
16292 Sep 22 23:21:37.812 INFO [1] downstairs client at 127.0.0.1:36989 has UUID e251ad5f-6614-4ef0-b821-00d45953fcfc
16293 Sep 22 23:21:37.812 INFO [2] Starts reconcile loop
16294 Sep 22 23:21:37.812 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e251ad5f-6614-4ef0-b821-00d45953fcfc, encrypted: true, database_read_version: 1, database_write_version: 1 }
16295 Sep 22 23:21:37.812 INFO bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 WaitActive WaitActive WaitActive
16296 Sep 22 23:21:37.812 INFO [0] 127.0.0.1:64153 task reports connection:true
16297 Sep 22 23:21:37.812 INFO 8a9244fd-a219-4407-95b8-2e8c6b37613e WaitQuorum WaitQuorum WaitQuorum
16298 Sep 22 23:21:37.812 INFO [0]R flush_numbers: [0, 0]
16299 Sep 22 23:21:37.812 INFO [0]R generation: [0, 0]
16300 Sep 22 23:21:37.812 INFO [0]R dirty: [false, false]
16301 Sep 22 23:21:37.812 INFO [1]R flush_numbers: [0, 0]
16302 Sep 22 23:21:37.812 INFO [1]R generation: [0, 0]
16303 Sep 22 23:21:37.812 INFO [1]R dirty: [false, false]
16304 Sep 22 23:21:37.812 DEBG Read :1000 deps:[] res:true
16305 Sep 22 23:21:37.812 INFO Current flush_numbers [0..12]: [0, 0]
16306 Sep 22 23:21:37.812 INFO [2]R flush_numbers: [0, 0]
16307 Sep 22 23:21:37.812 INFO [2]R generation: [0, 0]
16308 Sep 22 23:21:37.812 INFO [2]R dirty: [false, false]
16309 Sep 22 23:21:37.812 INFO Max found gen is 1
16310 Sep 22 23:21:37.812 INFO Generation requested: 1 >= found:1
16311 Sep 22 23:21:37.812 INFO Next flush: 1
16312 Sep 22 23:21:37.812 INFO All extents match
16313 Sep 22 23:21:37.812 INFO No downstairs repair required
16314 Sep 22 23:21:37.812 INFO No initial repair work was required
16315 Sep 22 23:21:37.812 INFO Set Downstairs and Upstairs active
16316 Sep 22 23:21:37.812 INFO 8a9244fd-a219-4407-95b8-2e8c6b37613e is now active with session: af1123f4-626a-46f8-a46e-4008ec56e922
16317 Sep 22 23:21:37.812 INFO 8a9244fd-a219-4407-95b8-2e8c6b37613e Set Active after no repair
16318 Sep 22 23:21:37.812 INFO Notify all downstairs, region set compare is done.
16319 Sep 22 23:21:37.812 INFO Set check for repair
16320 Sep 22 23:21:37.812 INFO [1] 127.0.0.1:59973 task reports connection:true
16321 Sep 22 23:21:37.812 INFO 8a9244fd-a219-4407-95b8-2e8c6b37613e Active Active Active
16322 Sep 22 23:21:37.812 INFO Set check for repair
16323 Sep 22 23:21:37.812 INFO [2] 127.0.0.1:46588 task reports connection:true
16324 Sep 22 23:21:37.812 INFO 8a9244fd-a219-4407-95b8-2e8c6b37613e Active Active Active
16325 Sep 22 23:21:37.812 INFO Set check for repair
16326 Sep 22 23:21:37.813 INFO Downstairs has completed Negotiation, task: proc
16327 Sep 22 23:21:37.813 INFO [0] received reconcile message
16328 Sep 22 23:21:37.813 INFO [0] All repairs completed, exit
16329 Sep 22 23:21:37.813 INFO [0] Starts cmd_loop
16330 Sep 22 23:21:37.813 INFO [1] received reconcile message
16331 Sep 22 23:21:37.813 INFO [1] All repairs completed, exit
16332 Sep 22 23:21:37.813 INFO [1] Starts cmd_loop
16333 Sep 22 23:21:37.813 INFO [2] received reconcile message
16334 Sep 22 23:21:37.813 INFO Current flush_numbers [0..12]: [0, 0]
16335 Sep 22 23:21:37.813 INFO [2] All repairs completed, exit
16336 Sep 22 23:21:37.813 INFO [2] Starts cmd_loop
16337 Sep 22 23:21:37.813 DEBG Read :1000 deps:[] res:true
16338 The guest has finished waiting for activation
16339 Sep 22 23:21:37.813 INFO Downstairs has completed Negotiation, task: proc
16340 Sep 22 23:21:37.813 INFO UUID: 9278686c-4d89-49de-9739-9711102f9bee
16341 Sep 22 23:21:37.813 INFO Blocks per extent:5 Total Extents: 2
16342 Sep 22 23:21:37.813 INFO Current flush_numbers [0..12]: [0, 0]
16343 Sep 22 23:21:37.813 INFO Crucible Version: Crucible Version: 0.0.1
16344 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16345 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16346 rustc: 1.70.0 stable x86_64-unknown-illumos
16347 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16348 Sep 22 23:21:37.813 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16349 Sep 22 23:21:37.813 INFO Using address: 127.0.0.1:58397, task: main
16350 Sep 22 23:21:37.813 DEBG IO Read 1000 has deps []
16351 test test::integration_test_region ... ok
16352 Sep 22 23:21:37.814 INFO Downstairs has completed Negotiation, task: proc
16353 Sep 22 23:21:37.814 INFO Repair listens on 127.0.0.1:0, task: repair
16354 Sep 22 23:21:37.814 DEBG [0] Read AckReady 1000, : downstairs
16355 Sep 22 23:21:37.814 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:64974, task: repair
16356 Sep 22 23:21:37.814 INFO [0] bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 (82d82a34-37ca-41ad-83f4-a1bf54c18aff) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
16357 Sep 22 23:21:37.814 INFO [0] Transition from WaitActive to WaitQuorum
16358 Sep 22 23:21:37.814 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:64974, task: repair
16359 Sep 22 23:21:37.814 WARN [0] new RM replaced this: None
16360 Sep 22 23:21:37.814 INFO current number of open files limit 65536 is already the maximum
16361 Sep 22 23:21:37.814 INFO listening, local_addr: 127.0.0.1:64974, task: repair
16362 Sep 22 23:21:37.814 DEBG [1] Read already AckReady 1000, : downstairs
16363 Sep 22 23:21:37.814 INFO [0] Starts reconcile loop
16364 Sep 22 23:21:37.814 INFO Created new region file "/tmp/downstairs-bxfe8IV2/region.json"
16365 Sep 22 23:21:37.814 INFO [2] bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 (82d82a34-37ca-41ad-83f4-a1bf54c18aff) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
16366 Sep 22 23:21:37.814 INFO [2] Transition from WaitActive to WaitQuorum
16367 Sep 22 23:21:37.814 WARN [2] new RM replaced this: None
16368 Sep 22 23:21:37.814 DEBG [2] Read already AckReady 1000, : downstairs
16369 Sep 22 23:21:37.814 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:64974, task: repair
16370 Sep 22 23:21:37.814 INFO [2] Starts reconcile loop
16371 Sep 22 23:21:37.814 DEBG up_ds_listen was notified
16372 Sep 22 23:21:37.814 INFO Using repair address: 127.0.0.1:64974, task: main
16373 Sep 22 23:21:37.814 DEBG up_ds_listen process 1000
16374 Sep 22 23:21:37.814 INFO No SSL acceptor configured, task: main
16375 Sep 22 23:21:37.814 DEBG [A] ack job 1000:1, : downstairs
16376 Sep 22 23:21:37.814 DEBG IO Write 1003 has deps []
16377 Sep 22 23:21:37.814 INFO [1] bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 (82d82a34-37ca-41ad-83f4-a1bf54c18aff) WaitQuorum WaitActive WaitQuorum ds_transition to WaitQuorum
16378 Sep 22 23:21:37.814 INFO [1] Transition from WaitActive to WaitQuorum
16379 Sep 22 23:21:37.814 WARN [1] new RM replaced this: None
16380 Sep 22 23:21:37.814 DEBG Read :1000 deps:[] res:true
16381 Sep 22 23:21:37.814 INFO [1] Starts reconcile loop
16382 Sep 22 23:21:37.814 INFO [0] 127.0.0.1:33453 task reports connection:true
16383 Sep 22 23:21:37.814 INFO bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 WaitQuorum WaitQuorum WaitQuorum
16384 Sep 22 23:21:37.814 INFO [0]R flush_numbers: [0, 0]
16385 Sep 22 23:21:37.814 INFO [0]R generation: [0, 0]
16386 Sep 22 23:21:37.814 INFO [0]R dirty: [false, false]
16387 Sep 22 23:21:37.814 INFO [1]R flush_numbers: [0, 0]
16388 Sep 22 23:21:37.814 INFO [1]R generation: [0, 0]
16389 Sep 22 23:21:37.814 INFO [1]R dirty: [false, false]
16390 Sep 22 23:21:37.814 INFO [2]R flush_numbers: [0, 0]
16391 Sep 22 23:21:37.814 DEBG Read :1000 deps:[] res:true
16392 Sep 22 23:21:37.815 INFO [2]R generation: [0, 0]
16393 Sep 22 23:21:37.815 INFO [2]R dirty: [false, false]
16394 Sep 22 23:21:37.815 INFO Max found gen is 1
16395 Sep 22 23:21:37.815 INFO Generation requested: 1 >= found:1
16396 Sep 22 23:21:37.815 INFO Next flush: 1
16397 Sep 22 23:21:37.815 INFO All extents match
16398 Sep 22 23:21:37.815 INFO No downstairs repair required
16399 Sep 22 23:21:37.815 DEBG up_ds_listen checked 1 jobs, back to waiting
16400 Sep 22 23:21:37.815 INFO No initial repair work was required
16401 Sep 22 23:21:37.815 INFO Set Downstairs and Upstairs active
16402 Sep 22 23:21:37.815 INFO bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 is now active with session: 82d82a34-37ca-41ad-83f4-a1bf54c18aff
16403 Sep 22 23:21:37.815 INFO bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 Set Active after no repair
16404 Sep 22 23:21:37.815 DEBG Read :1000 deps:[] res:true
16405 Sep 22 23:21:37.815 INFO Notify all downstairs, region set compare is done.
16406 Sep 22 23:21:37.815 INFO Set check for repair
16407 Sep 22 23:21:37.815 INFO [2] 127.0.0.1:55757 task reports connection:true
16408 Sep 22 23:21:37.815 INFO bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 Active Active Active
16409 Sep 22 23:21:37.815 INFO Set check for repair
16410 Sep 22 23:21:37.815 INFO [1] 127.0.0.1:36989 task reports connection:true
16411 Sep 22 23:21:37.815 INFO bc449dbb-4e8d-49d4-9ec4-c4ddf333de98 Active Active Active
16412 Sep 22 23:21:37.815 INFO Set check for repair
16413 Sep 22 23:21:37.815 INFO [0] received reconcile message
16414 Sep 22 23:21:37.815 INFO [0] All repairs completed, exit
16415 Sep 22 23:21:37.815 INFO [0] Starts cmd_loop
16416 Sep 22 23:21:37.815 INFO Upstairs starts
16417 Sep 22 23:21:37.815 INFO Crucible Version: BuildInfo {
16418 version: "0.0.1",
16419 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
16420 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
16421 git_branch: "main",
16422 rustc_semver: "1.70.0",
16423 rustc_channel: "stable",
16424 rustc_host_triple: "x86_64-unknown-illumos",
16425 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
16426 cargo_triple: "x86_64-unknown-illumos",
16427 debug: true,
16428 opt_level: 0,
16429 }
16430 Sep 22 23:21:37.815 INFO Upstairs <-> Downstairs Message Version: 4
16431 Sep 22 23:21:37.815 INFO Crucible stats registered with UUID: 1ba80387-235e-4d57-bd6a-ddde20bc88f5
16432 Sep 22 23:21:37.815 INFO [1] received reconcile message
16433 Sep 22 23:21:37.815 INFO [1] All repairs completed, exit
16434 Sep 22 23:21:37.815 INFO Crucible 1ba80387-235e-4d57-bd6a-ddde20bc88f5 has session id: 930575d8-984d-4b49-ae80-e9a9c3c00aea
16435 Sep 22 23:21:37.815 INFO [1] Starts cmd_loop
16436 Sep 22 23:21:37.815 INFO [2] received reconcile message
16437 Sep 22 23:21:37.815 DEBG [0] Read AckReady 1000, : downstairs
16438 Sep 22 23:21:37.816 INFO listening on 127.0.0.1:0, task: main
16439 Sep 22 23:21:37.816 INFO [2] All repairs completed, exit
16440 Sep 22 23:21:37.816 INFO [2] Starts cmd_loop
16441 Sep 22 23:21:37.816 INFO listening on 127.0.0.1:0, task: main
16442 Sep 22 23:21:37.816 INFO listening on 127.0.0.1:0, task: main
16443 The guest has finished waiting for activation
16444 Sep 22 23:21:37.816 DEBG [1] Read already AckReady 1000, : downstairs
16445 Sep 22 23:21:37.816 INFO [0] connecting to 127.0.0.1:38477, looper: 0
16446 Sep 22 23:21:37.816 DEBG [2] Read already AckReady 1000, : downstairs
16447 Sep 22 23:21:37.816 INFO [1] connecting to 127.0.0.1:39308, looper: 1
16448 Sep 22 23:21:37.816 DEBG up_ds_listen was notified
16449 Sep 22 23:21:37.816 DEBG up_ds_listen process 1000
16450 Sep 22 23:21:37.816 DEBG [A] ack job 1000:1, : downstairs
16451 Sep 22 23:21:37.816 INFO [2] connecting to 127.0.0.1:58397, looper: 2
16452 Sep 22 23:21:37.816 INFO up_listen starts, task: up_listen
16453 Sep 22 23:21:37.816 INFO Wait for all three downstairs to come online
16454 Sep 22 23:21:37.816 INFO Flush timeout: 0.5
16455 Sep 22 23:21:37.816 DEBG up_ds_listen checked 1 jobs, back to waiting
16456 Sep 22 23:21:37.816 DEBG IO Write 1000 has deps []
16457 Sep 22 23:21:37.816 INFO accepted connection from 127.0.0.1:51609, task: main
16458 Sep 22 23:21:37.816 DEBG IO Write 1001 has deps [JobId(1000)]
16459 Sep 22 23:21:37.816 DEBG up_ds_listen was notified
16460 Sep 22 23:21:37.816 DEBG up_ds_listen process 1000
16461 Sep 22 23:21:37.816 DEBG [A] ack job 1000:1, : downstairs
16462 Sep 22 23:21:37.816 INFO accepted connection from 127.0.0.1:50744, task: main
16463 Sep 22 23:21:37.816 DEBG up_ds_listen was notified
16464 Sep 22 23:21:37.817 DEBG up_ds_listen process 1001
16465 Sep 22 23:21:37.817 DEBG up_ds_listen checked 1 jobs, back to waiting
16466 Sep 22 23:21:37.817 DEBG [A] ack job 1001:2, : downstairs
16467 Sep 22 23:21:37.817 INFO [0] 1ba80387-235e-4d57-bd6a-ddde20bc88f5 looper connected, looper: 0
16468 Sep 22 23:21:37.817 DEBG up_ds_listen checked 1 jobs, back to waiting
16469 Sep 22 23:21:37.817 INFO [0] Proc runs for 127.0.0.1:38477 in state New
16470 Sep 22 23:21:37.817 INFO accepted connection from 127.0.0.1:64280, task: main
16471 Sep 22 23:21:37.817 INFO [2] 1ba80387-235e-4d57-bd6a-ddde20bc88f5 looper connected, looper: 2
16472 Sep 22 23:21:37.817 INFO [2] Proc runs for 127.0.0.1:58397 in state New
16473 Sep 22 23:21:37.817 INFO [1] 1ba80387-235e-4d57-bd6a-ddde20bc88f5 looper connected, looper: 1
16474 Sep 22 23:21:37.817 INFO [1] Proc runs for 127.0.0.1:39308 in state New
16475 Sep 22 23:21:37.817 DEBG up_ds_listen was notified
16476 Sep 22 23:21:37.817 DEBG up_ds_listen process 1003
16477 Sep 22 23:21:37.817 DEBG [A] ack job 1003:4, : downstairs
16478 Sep 22 23:21:37.817 DEBG IO Read 1001 has deps []
16479 Sep 22 23:21:37.817 DEBG up_ds_listen checked 1 jobs, back to waiting
16480 Sep 22 23:21:37.817 INFO Scrub check for 75f46847-6259-4674-b0dd-3bfaab313ce5
16481 Sep 22 23:21:37.817 INFO Scrub for 75f46847-6259-4674-b0dd-3bfaab313ce5 not required
16482 Sep 22 23:21:37.817 INFO Connection request from 1ba80387-235e-4d57-bd6a-ddde20bc88f5 with version 4, task: proc
16483 Sep 22 23:21:37.817 DEBG IO Read 1004 has deps [JobId(1003)]
16484 Sep 22 23:21:37.817 INFO upstairs UpstairsConnection { upstairs_id: 1ba80387-235e-4d57-bd6a-ddde20bc88f5, session_id: 225d651b-211c-4da3-aced-2077e6a64214, gen: 1 } connected, version 4, task: proc
16485 Sep 22 23:21:37.817 INFO Connection request from 1ba80387-235e-4d57-bd6a-ddde20bc88f5 with version 4, task: proc
16486 Sep 22 23:21:37.817 INFO upstairs UpstairsConnection { upstairs_id: 1ba80387-235e-4d57-bd6a-ddde20bc88f5, session_id: 225d651b-211c-4da3-aced-2077e6a64214, gen: 1 } connected, version 4, task: proc
16487 Sep 22 23:21:37.818 INFO Connection request from 1ba80387-235e-4d57-bd6a-ddde20bc88f5 with version 4, task: proc
16488 Sep 22 23:21:37.818 INFO upstairs UpstairsConnection { upstairs_id: 1ba80387-235e-4d57-bd6a-ddde20bc88f5, session_id: 225d651b-211c-4da3-aced-2077e6a64214, gen: 1 } connected, version 4, task: proc
16489 Sep 22 23:21:37.818 INFO current number of open files limit 65536 is already the maximum
16490 Sep 22 23:21:37.818 INFO Opened existing region file "/tmp/downstairs-bxfe8IV2/region.json"
16491 Sep 22 23:21:37.818 INFO Database read version 1
16492 Sep 22 23:21:37.818 INFO Database write version 1
16493 The guest has requested activation
16494 Sep 22 23:21:37.818 DEBG Write :1000 deps:[] res:true
16495 Sep 22 23:21:37.818 INFO [0] 1ba80387-235e-4d57-bd6a-ddde20bc88f5 (225d651b-211c-4da3-aced-2077e6a64214) New New New ds_transition to WaitActive
16496 Sep 22 23:21:37.818 DEBG Read :1001 deps:[] res:true
16497 Sep 22 23:21:37.818 INFO [0] Transition from New to WaitActive
16498 Sep 22 23:21:37.818 DEBG Read :1004 deps:[JobId(1003)] res:true
16499 Sep 22 23:21:37.818 INFO [2] 1ba80387-235e-4d57-bd6a-ddde20bc88f5 (225d651b-211c-4da3-aced-2077e6a64214) WaitActive New New ds_transition to WaitActive
16500 Sep 22 23:21:37.818 INFO [2] Transition from New to WaitActive
16501 Sep 22 23:21:37.818 INFO [1] 1ba80387-235e-4d57-bd6a-ddde20bc88f5 (225d651b-211c-4da3-aced-2077e6a64214) WaitActive New WaitActive ds_transition to WaitActive
16502 Sep 22 23:21:37.818 INFO [1] Transition from New to WaitActive
16503 Sep 22 23:21:37.818 DEBG Read :1001 deps:[] res:true
16504 Sep 22 23:21:37.818 INFO 1ba80387-235e-4d57-bd6a-ddde20bc88f5 active request set
16505 Sep 22 23:21:37.818 INFO [0] received activate with gen 1
16506 Sep 22 23:21:37.818 DEBG Read :1004 deps:[JobId(1003)] res:true
16507 Sep 22 23:21:37.818 INFO [0] client got ds_active_rx, promote! session 225d651b-211c-4da3-aced-2077e6a64214
16508 Sep 22 23:21:37.818 DEBG Write :1000 deps:[] res:true
16509 Sep 22 23:21:37.818 INFO [1] received activate with gen 1
16510 Sep 22 23:21:37.818 DEBG Read :1001 deps:[] res:true
16511 Sep 22 23:21:37.818 INFO [1] client got ds_active_rx, promote! session 225d651b-211c-4da3-aced-2077e6a64214
16512 Sep 22 23:21:37.818 INFO [2] received activate with gen 1
16513 Sep 22 23:21:37.818 INFO [2] client got ds_active_rx, promote! session 225d651b-211c-4da3-aced-2077e6a64214
16514 Sep 22 23:21:37.819 DEBG Read :1004 deps:[JobId(1003)] res:true
16515 Sep 22 23:21:37.819 INFO UpstairsConnection { upstairs_id: 1ba80387-235e-4d57-bd6a-ddde20bc88f5, session_id: 225d651b-211c-4da3-aced-2077e6a64214, gen: 1 } is now active (read-write)
16516 Sep 22 23:21:37.819 DEBG Write :1000 deps:[] res:true
16517 Sep 22 23:21:37.819 DEBG Write :1001 deps:[JobId(1000)] res:true
16518 Sep 22 23:21:37.819 INFO UpstairsConnection { upstairs_id: 1ba80387-235e-4d57-bd6a-ddde20bc88f5, session_id: 225d651b-211c-4da3-aced-2077e6a64214, gen: 1 } is now active (read-write)
16519 Sep 22 23:21:37.819 INFO UpstairsConnection { upstairs_id: 1ba80387-235e-4d57-bd6a-ddde20bc88f5, session_id: 225d651b-211c-4da3-aced-2077e6a64214, gen: 1 } is now active (read-write)
16520 Sep 22 23:21:37.819 DEBG [0] Read AckReady 1001, : downstairs
16521 Sep 22 23:21:37.819 DEBG Write :1001 deps:[JobId(1000)] res:true
16522 Sep 22 23:21:37.819 DEBG [1] Read already AckReady 1001, : downstairs
16523 Sep 22 23:21:37.819 DEBG IO Write 1001 has deps []
16524 Sep 22 23:21:37.819 INFO [0] downstairs client at 127.0.0.1:38477 has UUID 225348bb-ea71-4678-9320-8b37dbfcd881
16525 Sep 22 23:21:37.819 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 225348bb-ea71-4678-9320-8b37dbfcd881, encrypted: true, database_read_version: 1, database_write_version: 1 }
16526 Sep 22 23:21:37.819 INFO 1ba80387-235e-4d57-bd6a-ddde20bc88f5 WaitActive WaitActive WaitActive
16527 Sep 22 23:21:37.819 DEBG [2] Read already AckReady 1001, : downstairs
16528 Sep 22 23:21:37.819 INFO [2] downstairs client at 127.0.0.1:58397 has UUID 9278686c-4d89-49de-9739-9711102f9bee
16529 Sep 22 23:21:37.819 DEBG up_ds_listen was notified
16530 Sep 22 23:21:37.819 DEBG up_ds_listen was notified
16531 Sep 22 23:21:37.820 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 9278686c-4d89-49de-9739-9711102f9bee, encrypted: true, database_read_version: 1, database_write_version: 1 }
16532 Sep 22 23:21:37.820 DEBG up_ds_listen process 1001
16533 Sep 22 23:21:37.820 DEBG up_ds_listen process 1001
16534 Sep 22 23:21:37.820 DEBG [A] ack job 1001:2, : downstairs
16535 Sep 22 23:21:37.820 DEBG [A] ack job 1001:2, : downstairs
16536 Sep 22 23:21:37.820 INFO 1ba80387-235e-4d57-bd6a-ddde20bc88f5 WaitActive WaitActive WaitActive
16537 Sep 22 23:21:37.820 DEBG up_ds_listen checked 1 jobs, back to waiting
16538 Sep 22 23:21:37.820 DEBG Write :1001 deps:[JobId(1000)] res:true
16539 Sep 22 23:21:37.820 INFO [1] downstairs client at 127.0.0.1:39308 has UUID 2a37b56b-50b2-4d7c-9cf7-bf9557fd6093
16540 Sep 22 23:21:37.820 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 2a37b56b-50b2-4d7c-9cf7-bf9557fd6093, encrypted: true, database_read_version: 1, database_write_version: 1 }
16541 Sep 22 23:21:37.820 INFO 1ba80387-235e-4d57-bd6a-ddde20bc88f5 WaitActive WaitActive WaitActive
16542 Sep 22 23:21:37.820 DEBG up_ds_listen checked 1 jobs, back to waiting
16543 Sep 22 23:21:37.820 INFO Scrub check for 507208f9-64c5-4508-b304-2a8e206b5aec
16544 Sep 22 23:21:37.820 DEBG IO Read 1002 has deps [JobId(1001)]
16545 Sep 22 23:21:37.820 INFO Scrub for 507208f9-64c5-4508-b304-2a8e206b5aec begins
16546 Sep 22 23:21:37.820 INFO Scrub with total_size:2560 block_size:512
16547 Sep 22 23:21:37.820 INFO Scrub check for 09bdd5d2-3b23-4357-8a7e-90e2cd35ea46
16548 Sep 22 23:21:37.820 INFO Scrubs from block 0 to 5 in (256) 131072 size IOs pm:0
16549 Sep 22 23:21:37.820 INFO Adjust block_count to 5 at offset 0
16550 Sep 22 23:21:37.820 INFO Current flush_numbers [0..12]: [0, 0]
16551 Sep 22 23:21:37.820 INFO Scrub for 09bdd5d2-3b23-4357-8a7e-90e2cd35ea46 begins
16552 Sep 22 23:21:37.820 INFO Scrub with total_size:2560 block_size:512
16553 Sep 22 23:21:37.820 INFO Scrubs from block 0 to 5 in (256) 131072 size IOs pm:0
16554 Sep 22 23:21:37.820 INFO Adjust block_count to 5 at offset 0
16555 Sep 22 23:21:37.820 INFO UUID: f5ea863d-974b-4712-8f07-7666c185979b
16556 Sep 22 23:21:37.820 INFO Blocks per extent:5 Total Extents: 2
16557 Sep 22 23:21:37.820 INFO Downstairs has completed Negotiation, task: proc
16558 Sep 22 23:21:37.820 INFO Crucible Version: Crucible Version: 0.0.1
16559 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16560 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16561 rustc: 1.70.0 stable x86_64-unknown-illumos
16562 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16563 Sep 22 23:21:37.820 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16564 Sep 22 23:21:37.820 INFO Using address: 127.0.0.1:65137, task: main
16565 Sep 22 23:21:37.820 INFO Current flush_numbers [0..12]: [0, 0]
16566 Sep 22 23:21:37.821 INFO Downstairs has completed Negotiation, task: proc
16567 Sep 22 23:21:37.821 INFO Repair listens on 127.0.0.1:0, task: repair
16568 Sep 22 23:21:37.821 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53949, task: repair
16569 Sep 22 23:21:37.821 DEBG Read :1002 deps:[JobId(1001)] res:true
16570 Sep 22 23:21:37.821 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53949, task: repair
16571 Sep 22 23:21:37.821 INFO Current flush_numbers [0..12]: [0, 0]
16572 Sep 22 23:21:37.821 INFO listening, local_addr: 127.0.0.1:53949, task: repair
16573 Sep 22 23:21:37.821 INFO Downstairs has completed Negotiation, task: proc
16574 Sep 22 23:21:37.821 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53949, task: repair
16575 Sep 22 23:21:37.821 INFO Using repair address: 127.0.0.1:53949, task: main
16576 Sep 22 23:21:37.821 INFO No SSL acceptor configured, task: main
16577 Sep 22 23:21:37.821 DEBG Read :1002 deps:[JobId(1001)] res:true
16578 Sep 22 23:21:37.821 INFO [0] 1ba80387-235e-4d57-bd6a-ddde20bc88f5 (225d651b-211c-4da3-aced-2077e6a64214) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
16579 Sep 22 23:21:37.821 INFO [0] Transition from WaitActive to WaitQuorum
16580 Sep 22 23:21:37.821 WARN [0] new RM replaced this: None
16581 Sep 22 23:21:37.821 DEBG Write :1001 deps:[] res:true
16582 Sep 22 23:21:37.821 INFO [0] Starts reconcile loop
16583 Sep 22 23:21:37.821 INFO [2] 1ba80387-235e-4d57-bd6a-ddde20bc88f5 (225d651b-211c-4da3-aced-2077e6a64214) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
16584 Sep 22 23:21:37.821 DEBG [0] Read AckReady 1004, : downstairs
16585 Sep 22 23:21:37.821 INFO [2] Transition from WaitActive to WaitQuorum
16586 Sep 22 23:21:37.821 WARN [2] new RM replaced this: None
16587 Sep 22 23:21:37.821 INFO current number of open files limit 65536 is already the maximum
16588 Sep 22 23:21:37.821 INFO [2] Starts reconcile loop
16589 Sep 22 23:21:37.821 DEBG Read :1002 deps:[JobId(1001)] res:true
16590 Sep 22 23:21:37.822 INFO [1] 1ba80387-235e-4d57-bd6a-ddde20bc88f5 (225d651b-211c-4da3-aced-2077e6a64214) WaitQuorum WaitActive WaitQuorum ds_transition to WaitQuorum
16591 Sep 22 23:21:37.822 INFO [1] Transition from WaitActive to WaitQuorum
16592 Sep 22 23:21:37.822 WARN [1] new RM replaced this: None
16593 Sep 22 23:21:37.822 INFO Created new region file "/tmp/downstairs-RQXy3Y00/region.json"
16594 Sep 22 23:21:37.822 INFO [1] Starts reconcile loop
16595 Sep 22 23:21:37.822 INFO [0] 127.0.0.1:38477 task reports connection:true
16596 Sep 22 23:21:37.822 INFO 1ba80387-235e-4d57-bd6a-ddde20bc88f5 WaitQuorum WaitQuorum WaitQuorum
16597 Sep 22 23:21:37.822 INFO [0]R flush_numbers: [0, 0]
16598 Sep 22 23:21:37.822 INFO [0]R generation: [0, 0]
16599 Sep 22 23:21:37.822 INFO [0]R dirty: [false, false]
16600 Sep 22 23:21:37.822 DEBG Write :1001 deps:[] res:true
16601 Sep 22 23:21:37.822 INFO [1]R flush_numbers: [0, 0]
16602 Sep 22 23:21:37.822 INFO [1]R generation: [0, 0]
16603 Sep 22 23:21:37.822 INFO [1]R dirty: [false, false]
16604 Sep 22 23:21:37.822 INFO [2]R flush_numbers: [0, 0]
16605 Sep 22 23:21:37.822 INFO [2]R generation: [0, 0]
16606 Sep 22 23:21:37.822 INFO [2]R dirty: [false, false]
16607 Sep 22 23:21:37.822 INFO Max found gen is 1
16608 Sep 22 23:21:37.822 INFO Generation requested: 1 >= found:1
16609 Sep 22 23:21:37.822 INFO Next flush: 1
16610 Sep 22 23:21:37.822 DEBG IO Write 1002 has deps [JobId(1000)]
16611 Sep 22 23:21:37.822 INFO All extents match
16612 Sep 22 23:21:37.822 INFO No downstairs repair required
16613 Sep 22 23:21:37.822 INFO No initial repair work was required
16614 Sep 22 23:21:37.822 INFO Set Downstairs and Upstairs active
16615 Sep 22 23:21:37.822 INFO 1ba80387-235e-4d57-bd6a-ddde20bc88f5 is now active with session: 225d651b-211c-4da3-aced-2077e6a64214
16616 Sep 22 23:21:37.822 INFO 1ba80387-235e-4d57-bd6a-ddde20bc88f5 Set Active after no repair
16617 Sep 22 23:21:37.822 INFO Notify all downstairs, region set compare is done.
16618 Sep 22 23:21:37.822 INFO Set check for repair
16619 Sep 22 23:21:37.822 INFO [2] 127.0.0.1:58397 task reports connection:true
16620 Sep 22 23:21:37.822 INFO 1ba80387-235e-4d57-bd6a-ddde20bc88f5 Active Active Active
16621 Sep 22 23:21:37.822 INFO Set check for repair
16622 Sep 22 23:21:37.822 INFO [1] 127.0.0.1:39308 task reports connection:true
16623 Sep 22 23:21:37.822 INFO 1ba80387-235e-4d57-bd6a-ddde20bc88f5 Active Active Active
16624 Sep 22 23:21:37.822 INFO Set check for repair
16625 Sep 22 23:21:37.822 DEBG Write :1001 deps:[] res:true
16626 Sep 22 23:21:37.822 INFO [0] received reconcile message
16627 Sep 22 23:21:37.822 INFO [0] All repairs completed, exit
16628 Sep 22 23:21:37.822 INFO [0] Starts cmd_loop
16629 Sep 22 23:21:37.822 INFO [1] received reconcile message
16630 Sep 22 23:21:37.823 INFO [1] All repairs completed, exit
16631 Sep 22 23:21:37.823 INFO [1] Starts cmd_loop
16632 Sep 22 23:21:37.823 INFO [2] received reconcile message
16633 Sep 22 23:21:37.823 INFO [2] All repairs completed, exit
16634 Sep 22 23:21:37.823 INFO [2] Starts cmd_loop
16635 The guest has finished waiting for activation
16636 Sep 22 23:21:37.823 DEBG [1] Read already AckReady 1004, : downstairs
16637 Sep 22 23:21:37.823 DEBG IO Read 1000 has deps []
16638 Sep 22 23:21:37.824 DEBG IO Write 1002 has deps [JobId(1000)]
16639 Sep 22 23:21:37.824 DEBG [0] Read AckReady 1002, : downstairs
16640 Sep 22 23:21:37.824 DEBG Read :1000 deps:[] res:true
16641 Sep 22 23:21:37.825 DEBG Read :1000 deps:[] res:true
16642 Sep 22 23:21:37.825 DEBG [2] Read already AckReady 1004, : downstairs
16643 Sep 22 23:21:37.825 DEBG up_ds_listen was notified
16644 Sep 22 23:21:37.825 DEBG up_ds_listen process 1004
16645 Sep 22 23:21:37.825 INFO current number of open files limit 65536 is already the maximum
16646 Sep 22 23:21:37.825 DEBG [A] ack job 1004:5, : downstairs
16647 Sep 22 23:21:37.825 INFO Opened existing region file "/tmp/downstairs-RQXy3Y00/region.json"
16648 Sep 22 23:21:37.825 INFO Database read version 1
16649 Sep 22 23:21:37.825 INFO Database write version 1
16650 Sep 22 23:21:37.825 DEBG Read :1000 deps:[] res:true
16651 Sep 22 23:21:37.825 DEBG up_ds_listen was notified
16652 Sep 22 23:21:37.825 DEBG up_ds_listen process 1002
16653 Sep 22 23:21:37.825 DEBG up_ds_listen checked 1 jobs, back to waiting
16654 Sep 22 23:21:37.825 DEBG [A] ack job 1002:3, : downstairs
16655 Sep 22 23:21:37.825 DEBG up_ds_listen checked 1 jobs, back to waiting
16656 Sep 22 23:21:37.826 INFO Scrub at offset 5/5 sp:5
16657 Sep 22 23:21:37.826 DEBG [1] Read already AckReady 1002, : downstairs
16658 Sep 22 23:21:37.826 DEBG [0] Read AckReady 1000, : downstairs
16659 Sep 22 23:21:37.827 DEBG [2] Read already AckReady 1000, : downstairs
16660 Sep 22 23:21:37.827 DEBG [2] Read already AckReady 1002, : downstairs
16661 Sep 22 23:21:37.827 INFO Scrub 507208f9-64c5-4508-b304-2a8e206b5aec done in 0 seconds. Retries:0 scrub_size:2560 size:5 pause_milli:0
16662 Sep 22 23:21:37.827 DEBG up_ds_listen was notified
16663 Sep 22 23:21:37.827 DEBG up_ds_listen process 1002
16664 Sep 22 23:21:37.827 DEBG [1] Read already AckReady 1000, : downstairs
16665 Sep 22 23:21:37.827 DEBG IO Flush 1003 has deps [JobId(1002), JobId(1001), JobId(1000)]
16666 Sep 22 23:21:37.827 DEBG [A] ack job 1002:3, : downstairs
16667 Sep 22 23:21:37.827 DEBG up_ds_listen was notified
16668 Sep 22 23:21:37.827 DEBG up_ds_listen process 1000
16669 Sep 22 23:21:37.827 DEBG [A] ack job 1000:1, : downstairs
16670 Sep 22 23:21:37.827 DEBG up_ds_listen checked 1 jobs, back to waiting
16671 Sep 22 23:21:37.827 INFO UUID: a1447c8e-cf80-4bea-b730-010907ef0c7f
16672 Sep 22 23:21:37.827 DEBG up_ds_listen was notified
16673 Sep 22 23:21:37.827 DEBG up_ds_listen process 1002
16674 Sep 22 23:21:37.827 DEBG up_ds_listen checked 1 jobs, back to waiting
16675 Sep 22 23:21:37.828 INFO Blocks per extent:5 Total Extents: 2
16676 Sep 22 23:21:37.828 DEBG [A] ack job 1002:3, : downstairs
16677 Sep 22 23:21:37.828 DEBG up_ds_listen checked 1 jobs, back to waiting
16678 Sep 22 23:21:37.828 INFO Crucible Version: Crucible Version: 0.0.1
16679 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16680 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16681 rustc: 1.70.0 stable x86_64-unknown-illumos
16682 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16683 Sep 22 23:21:37.828 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16684 Sep 22 23:21:37.828 INFO Using address: 127.0.0.1:44628, task: main
16685 Sep 22 23:21:37.828 INFO Scrub at offset 5/5 sp:5
16686 Sep 22 23:21:37.828 INFO Repair listens on 127.0.0.1:0, task: repair
16687 Sep 22 23:21:37.828 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:42546, task: repair
16688 Sep 22 23:21:37.828 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:42546, task: repair
16689 Sep 22 23:21:37.828 INFO listening, local_addr: 127.0.0.1:42546, task: repair
16690 Sep 22 23:21:37.828 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:42546, task: repair
16691 Sep 22 23:21:37.828 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16692 Sep 22 23:21:37.828 INFO Using repair address: 127.0.0.1:42546, task: main
16693 Sep 22 23:21:37.828 INFO No SSL acceptor configured, task: main
16694 test test::integration_test_scrub ... ok
16695 Sep 22 23:21:37.828 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16696 Sep 22 23:21:37.829 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16697 Sep 22 23:21:37.829 INFO current number of open files limit 65536 is already the maximum
16698 Sep 22 23:21:37.829 INFO Created new region file "/tmp/downstairs-agB7RjDz/region.json"
16699 Sep 22 23:21:37.829 INFO current number of open files limit 65536 is already the maximum
16700 Sep 22 23:21:37.829 INFO Created new region file "/tmp/downstairs-FzhHxHPv/region.json"
16701 Sep 22 23:21:37.829 DEBG up_ds_listen was notified
16702 Sep 22 23:21:37.829 DEBG up_ds_listen process 1003
16703 Sep 22 23:21:37.829 DEBG [A] ack job 1003:4, : downstairs
16704 Sep 22 23:21:37.829 INFO Scrub 09bdd5d2-3b23-4357-8a7e-90e2cd35ea46 done in 0 seconds. Retries:0 scrub_size:2560 size:5 pause_milli:0
16705 Sep 22 23:21:37.829 DEBG [rc] retire 1003 clears [JobId(1000), JobId(1001), JobId(1002), JobId(1003)], : downstairs
16706 Sep 22 23:21:37.829 DEBG up_ds_listen checked 1 jobs, back to waiting
16707 Sep 22 23:21:37.829 DEBG IO Flush 1003 has deps [JobId(1002), JobId(1001), JobId(1000)]
16708 test test::integration_test_scrub_no_rop ... ok
16709 Sep 22 23:21:37.832 INFO current number of open files limit 65536 is already the maximum
16710 Sep 22 23:21:37.832 INFO Created new region file "/tmp/downstairs-UTqRCVgq/region.json"
16711 Sep 22 23:21:37.832 DEBG IO Write 1001 has deps [JobId(1000)]
16712 Sep 22 23:21:37.832 DEBG IO Write 1004 has deps []
16713 Sep 22 23:21:37.832 DEBG up_ds_listen was notified
16714 Sep 22 23:21:37.832 DEBG up_ds_listen process 1001
16715 Sep 22 23:21:37.832 DEBG [A] ack job 1001:2, : downstairs
16716 Sep 22 23:21:37.832 DEBG up_ds_listen checked 1 jobs, back to waiting
16717 Sep 22 23:21:37.832 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16718 Sep 22 23:21:37.833 INFO current number of open files limit 65536 is already the maximum
16719 Sep 22 23:21:37.833 INFO Opened existing region file "/tmp/downstairs-FzhHxHPv/region.json"
16720 Sep 22 23:21:37.833 INFO Database read version 1
16721 Sep 22 23:21:37.833 INFO Database write version 1
16722 Sep 22 23:21:37.833 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16723 Sep 22 23:21:37.833 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16724 Sep 22 23:21:37.833 DEBG up_ds_listen was notified
16725 Sep 22 23:21:37.833 DEBG up_ds_listen process 1003
16726 Sep 22 23:21:37.833 DEBG [A] ack job 1003:4, : downstairs
16727 Sep 22 23:21:37.833 INFO current number of open files limit 65536 is already the maximum
16728 Sep 22 23:21:37.833 INFO Opened existing region file "/tmp/downstairs-agB7RjDz/region.json"
16729 Sep 22 23:21:37.833 INFO Database read version 1
16730 Sep 22 23:21:37.833 DEBG [rc] retire 1003 clears [JobId(1000), JobId(1001), JobId(1002), JobId(1003)], : downstairs
16731 Sep 22 23:21:37.833 INFO Database write version 1
16732 Sep 22 23:21:37.833 INFO Scrub check for 854c46f9-33d9-4450-b666-b1c944841aff
16733 Sep 22 23:21:37.833 DEBG up_ds_listen checked 1 jobs, back to waiting
16734 Sep 22 23:21:37.833 INFO Scrub for 854c46f9-33d9-4450-b666-b1c944841aff begins
16735 Sep 22 23:21:37.833 INFO Scrub with total_size:5120 block_size:512
16736 Sep 22 23:21:37.833 INFO Scrubs from block 0 to 10 in (256) 131072 size IOs pm:0
16737 Sep 22 23:21:37.833 INFO Adjust block_count to 10 at offset 0
16738 Sep 22 23:21:37.833 DEBG IO Read 1004 has deps []
16739 Sep 22 23:21:37.834 DEBG Read :1004 deps:[] res:true
16740 Sep 22 23:21:37.835 INFO UUID: dc8a7c43-ebc6-4a48-b659-c51804dc92bb
16741 Sep 22 23:21:37.835 INFO Blocks per extent:5 Total Extents: 2
16742 Sep 22 23:21:37.835 INFO Crucible Version: Crucible Version: 0.0.1
16743 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16744 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16745 rustc: 1.70.0 stable x86_64-unknown-illumos
16746 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16747 Sep 22 23:21:37.835 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16748 Sep 22 23:21:37.835 INFO Using address: 127.0.0.1:46928, task: main
16749 Sep 22 23:21:37.835 DEBG Read :1004 deps:[] res:true
16750 Sep 22 23:21:37.835 INFO Repair listens on 127.0.0.1:0, task: repair
16751 Sep 22 23:21:37.835 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:54347, task: repair
16752 Sep 22 23:21:37.835 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:54347, task: repair
16753 Sep 22 23:21:37.835 INFO listening, local_addr: 127.0.0.1:54347, task: repair
16754 Sep 22 23:21:37.835 INFO current number of open files limit 65536 is already the maximum
16755 Sep 22 23:21:37.835 INFO Opened existing region file "/tmp/downstairs-UTqRCVgq/region.json"
16756 Sep 22 23:21:37.835 INFO Database read version 1
16757 Sep 22 23:21:37.835 INFO Database write version 1
16758 Sep 22 23:21:37.836 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:54347, task: repair
16759 Sep 22 23:21:37.836 DEBG Read :1004 deps:[] res:true
16760 Sep 22 23:21:37.836 INFO UUID: 1b0c673f-012e-4936-8109-087022151153
16761 Sep 22 23:21:37.836 INFO Blocks per extent:5 Total Extents: 2
16762 Sep 22 23:21:37.836 INFO Using repair address: 127.0.0.1:54347, task: main
16763 Sep 22 23:21:37.836 INFO No SSL acceptor configured, task: main
16764 Sep 22 23:21:37.836 INFO Crucible Version: Crucible Version: 0.0.1
16765 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16766 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16767 rustc: 1.70.0 stable x86_64-unknown-illumos
16768 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16769 Sep 22 23:21:37.836 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16770 Sep 22 23:21:37.836 INFO Using address: 127.0.0.1:38317, task: main
16771 Sep 22 23:21:37.836 INFO current number of open files limit 65536 is already the maximum
16772 Sep 22 23:21:37.836 INFO Created new region file "/tmp/downstairs-slwJmBq9/region.json"
16773 Sep 22 23:21:37.836 INFO Repair listens on 127.0.0.1:0, task: repair
16774 Sep 22 23:21:37.836 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:60003, task: repair
16775 Sep 22 23:21:37.836 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:60003, task: repair
16776 Sep 22 23:21:37.836 INFO listening, local_addr: 127.0.0.1:60003, task: repair
16777 Sep 22 23:21:37.836 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:60003, task: repair
16778 Sep 22 23:21:37.836 INFO Using repair address: 127.0.0.1:60003, task: main
16779 Sep 22 23:21:37.836 DEBG up_ds_listen was notified
16780 Sep 22 23:21:37.836 INFO No SSL acceptor configured, task: main
16781 Sep 22 23:21:37.837 DEBG up_ds_listen process 1004
16782 Sep 22 23:21:37.837 DEBG [A] ack job 1004:5, : downstairs
16783 Sep 22 23:21:37.837 DEBG up_ds_listen checked 1 jobs, back to waiting
16784 Sep 22 23:21:37.837 DEBG Write :1001 deps:[JobId(1000)] res:true
16785 Sep 22 23:21:37.837 INFO UUID: dc9055ff-ffac-4e79-bd57-721a805ccf83
16786 Sep 22 23:21:37.837 INFO Blocks per extent:5 Total Extents: 2
16787 Sep 22 23:21:37.837 DEBG IO Read 1005 has deps [JobId(1004)]
16788 Sep 22 23:21:37.837 INFO Upstairs starts
16789 Sep 22 23:21:37.837 INFO Crucible Version: Crucible Version: 0.0.1
16790 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16791 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16792 rustc: 1.70.0 stable x86_64-unknown-illumos
16793 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16794 Sep 22 23:21:37.837 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16795 Sep 22 23:21:37.837 INFO Crucible Version: BuildInfo {
16796 version: "0.0.1",
16797 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
16798 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
16799 git_branch: "main",
16800 rustc_semver: "1.70.0",
16801 rustc_channel: "stable",
16802 rustc_host_triple: "x86_64-unknown-illumos",
16803 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
16804 cargo_triple: "x86_64-unknown-illumos",
16805 debug: true,
16806 opt_level: 0,
16807 }
16808 Sep 22 23:21:37.837 INFO Using address: 127.0.0.1:34995, task: main
16809 Sep 22 23:21:37.837 INFO Upstairs <-> Downstairs Message Version: 4
16810 Sep 22 23:21:37.837 INFO Crucible stats registered with UUID: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5
16811 Sep 22 23:21:37.837 INFO Crucible 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 has session id: 44811dc3-2625-4058-b159-bbab926f9643
16812 Sep 22 23:21:37.837 INFO Repair listens on 127.0.0.1:0, task: repair
16813 Sep 22 23:21:37.837 INFO listening on 127.0.0.1:0, task: main
16814 Sep 22 23:21:37.837 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:64816, task: repair
16815 Sep 22 23:21:37.837 INFO listening on 127.0.0.1:0, task: main
16816 Sep 22 23:21:37.837 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:64816, task: repair
16817 Sep 22 23:21:37.837 INFO listening, local_addr: 127.0.0.1:64816, task: repair
16818 Sep 22 23:21:37.837 INFO listening on 127.0.0.1:0, task: main
16819 Sep 22 23:21:37.837 INFO [0] connecting to 127.0.0.1:65137, looper: 0
16820 Sep 22 23:21:37.837 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:64816, task: repair
16821 Sep 22 23:21:37.838 INFO Using repair address: 127.0.0.1:64816, task: main
16822 Sep 22 23:21:37.838 INFO No SSL acceptor configured, task: main
16823 Sep 22 23:21:37.838 INFO [1] connecting to 127.0.0.1:44628, looper: 1
16824 Sep 22 23:21:37.838 DEBG Write :1001 deps:[JobId(1000)] res:true
16825 Sep 22 23:21:37.838 INFO [2] connecting to 127.0.0.1:38317, looper: 2
16826 Sep 22 23:21:37.838 DEBG Read :1005 deps:[JobId(1004)] res:true
16827 Sep 22 23:21:37.838 INFO current number of open files limit 65536 is already the maximum
16828 Sep 22 23:21:37.838 INFO up_listen starts, task: up_listen
16829 Sep 22 23:21:37.838 INFO Wait for all three downstairs to come online
16830 Sep 22 23:21:37.838 INFO Flush timeout: 0.5
16831 Sep 22 23:21:37.838 INFO Created new region file "/tmp/downstairs-NmE9CZeD/region.json"
16832 Sep 22 23:21:37.838 INFO accepted connection from 127.0.0.1:43896, task: main
16833 Sep 22 23:21:37.838 DEBG Read :1005 deps:[JobId(1004)] res:true
16834 Sep 22 23:21:37.838 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 looper connected, looper: 1
16835 Sep 22 23:21:37.838 INFO [1] Proc runs for 127.0.0.1:44628 in state New
16836 Sep 22 23:21:37.838 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 looper connected, looper: 2
16837 Sep 22 23:21:37.838 INFO [2] Proc runs for 127.0.0.1:38317 in state New
16838 Sep 22 23:21:37.838 INFO accepted connection from 127.0.0.1:47893, task: main
16839 Sep 22 23:21:37.839 INFO accepted connection from 127.0.0.1:48011, task: main
16840 Sep 22 23:21:37.839 DEBG Write :1001 deps:[JobId(1000)] res:true
16841 Sep 22 23:21:37.839 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 looper connected, looper: 0
16842 Sep 22 23:21:37.839 DEBG Read :1005 deps:[JobId(1004)] res:true
16843 Sep 22 23:21:37.839 INFO [0] Proc runs for 127.0.0.1:65137 in state New
16844 Sep 22 23:21:37.839 INFO current number of open files limit 65536 is already the maximum
16845 Sep 22 23:21:37.839 INFO Opened existing region file "/tmp/downstairs-slwJmBq9/region.json"
16846 Sep 22 23:21:37.839 INFO Database read version 1
16847 Sep 22 23:21:37.839 INFO Database write version 1
16848 Sep 22 23:21:37.839 DEBG [0] Read AckReady 1004, : downstairs
16849 Sep 22 23:21:37.839 INFO Connection request from 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 with version 4, task: proc
16850 Sep 22 23:21:37.839 INFO upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: c9717b37-7d6a-40f7-9030-e7e57914f334, gen: 1 } connected, version 4, task: proc
16851 Sep 22 23:21:37.839 INFO Connection request from 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 with version 4, task: proc
16852 Sep 22 23:21:37.839 INFO upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: c9717b37-7d6a-40f7-9030-e7e57914f334, gen: 1 } connected, version 4, task: proc
16853 Sep 22 23:21:37.839 INFO Connection request from 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 with version 4, task: proc
16854 Sep 22 23:21:37.839 INFO upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: c9717b37-7d6a-40f7-9030-e7e57914f334, gen: 1 } connected, version 4, task: proc
16855 The guest has requested activation
16856 Sep 22 23:21:37.839 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (c9717b37-7d6a-40f7-9030-e7e57914f334) New New New ds_transition to WaitActive
16857 Sep 22 23:21:37.839 INFO [1] Transition from New to WaitActive
16858 Sep 22 23:21:37.840 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (c9717b37-7d6a-40f7-9030-e7e57914f334) New WaitActive New ds_transition to WaitActive
16859 Sep 22 23:21:37.840 INFO [2] Transition from New to WaitActive
16860 Sep 22 23:21:37.840 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (c9717b37-7d6a-40f7-9030-e7e57914f334) New WaitActive WaitActive ds_transition to WaitActive
16861 Sep 22 23:21:37.840 INFO [0] Transition from New to WaitActive
16862 Sep 22 23:21:37.840 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 active request set
16863 Sep 22 23:21:37.840 INFO [0] received activate with gen 1
16864 Sep 22 23:21:37.840 INFO [0] client got ds_active_rx, promote! session c9717b37-7d6a-40f7-9030-e7e57914f334
16865 Sep 22 23:21:37.840 INFO [1] received activate with gen 1
16866 Sep 22 23:21:37.840 INFO [1] client got ds_active_rx, promote! session c9717b37-7d6a-40f7-9030-e7e57914f334
16867 Sep 22 23:21:37.840 INFO [2] received activate with gen 1
16868 Sep 22 23:21:37.840 INFO [2] client got ds_active_rx, promote! session c9717b37-7d6a-40f7-9030-e7e57914f334
16869 Sep 22 23:21:37.840 INFO UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: c9717b37-7d6a-40f7-9030-e7e57914f334, gen: 1 } is now active (read-write)
16870 Sep 22 23:21:37.840 INFO UUID: eb9758aa-5126-4e8c-b409-22a2bcacbe1b
16871 Sep 22 23:21:37.840 INFO Blocks per extent:5 Total Extents: 2
16872 Sep 22 23:21:37.840 INFO UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: c9717b37-7d6a-40f7-9030-e7e57914f334, gen: 1 } is now active (read-write)
16873 Sep 22 23:21:37.840 INFO Crucible Version: Crucible Version: 0.0.1
16874 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16875 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16876 rustc: 1.70.0 stable x86_64-unknown-illumos
16877 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16878 Sep 22 23:21:37.840 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16879 Sep 22 23:21:37.840 INFO Using address: 127.0.0.1:43209, task: main
16880 Sep 22 23:21:37.840 INFO UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: c9717b37-7d6a-40f7-9030-e7e57914f334, gen: 1 } is now active (read-write)
16881 Sep 22 23:21:37.840 DEBG [2] Read already AckReady 1004, : downstairs
16882 Sep 22 23:21:37.841 INFO Repair listens on 127.0.0.1:0, task: repair
16883 Sep 22 23:21:37.841 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51701, task: repair
16884 Sep 22 23:21:37.841 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51701, task: repair
16885 Sep 22 23:21:37.841 INFO listening, local_addr: 127.0.0.1:51701, task: repair
16886 Sep 22 23:21:37.841 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51701, task: repair
16887 Sep 22 23:21:37.841 INFO Using repair address: 127.0.0.1:51701, task: main
16888 Sep 22 23:21:37.841 INFO No SSL acceptor configured, task: main
16889 Sep 22 23:21:37.841 INFO [1] downstairs client at 127.0.0.1:44628 has UUID a1447c8e-cf80-4bea-b730-010907ef0c7f
16890 Sep 22 23:21:37.841 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a1447c8e-cf80-4bea-b730-010907ef0c7f, encrypted: true, database_read_version: 1, database_write_version: 1 }
16891 Sep 22 23:21:37.841 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 WaitActive WaitActive WaitActive
16892 Sep 22 23:21:37.841 INFO [2] downstairs client at 127.0.0.1:38317 has UUID 1b0c673f-012e-4936-8109-087022151153
16893 Sep 22 23:21:37.841 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 1b0c673f-012e-4936-8109-087022151153, encrypted: true, database_read_version: 1, database_write_version: 1 }
16894 Sep 22 23:21:37.841 INFO current number of open files limit 65536 is already the maximum
16895 Sep 22 23:21:37.841 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 WaitActive WaitActive WaitActive
16896 Sep 22 23:21:37.841 INFO current number of open files limit 65536 is already the maximum
16897 Sep 22 23:21:37.841 INFO Opened existing region file "/tmp/downstairs-NmE9CZeD/region.json"
16898 Sep 22 23:21:37.841 INFO Created new region file "/tmp/downstairs-usAyrxwT/region.json"
16899 Sep 22 23:21:37.841 INFO Database read version 1
16900 Sep 22 23:21:37.841 INFO Database write version 1
16901 Sep 22 23:21:37.841 INFO [0] downstairs client at 127.0.0.1:65137 has UUID f5ea863d-974b-4712-8f07-7666c185979b
16902 Sep 22 23:21:37.841 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f5ea863d-974b-4712-8f07-7666c185979b, encrypted: true, database_read_version: 1, database_write_version: 1 }
16903 Sep 22 23:21:37.841 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 WaitActive WaitActive WaitActive
16904 Sep 22 23:21:37.842 INFO Current flush_numbers [0..12]: [0, 0]
16905 Sep 22 23:21:37.842 INFO Downstairs has completed Negotiation, task: proc
16906 Sep 22 23:21:37.842 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
16907 Sep 22 23:21:37.842 INFO Current flush_numbers [0..12]: [0, 0]
16908 Sep 22 23:21:37.842 INFO Downstairs has completed Negotiation, task: proc
16909 Sep 22 23:21:37.842 DEBG [1] Read already AckReady 1004, : downstairs
16910 Sep 22 23:21:37.842 DEBG up_ds_listen was notified
16911 Sep 22 23:21:37.842 DEBG up_ds_listen process 1004
16912 Sep 22 23:21:37.842 DEBG [A] ack job 1004:5, : downstairs
16913 Sep 22 23:21:37.842 INFO Current flush_numbers [0..12]: [0, 0]
16914 Sep 22 23:21:37.843 INFO Downstairs has completed Negotiation, task: proc
16915 Sep 22 23:21:37.843 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (c9717b37-7d6a-40f7-9030-e7e57914f334) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
16916 Sep 22 23:21:37.843 INFO [1] Transition from WaitActive to WaitQuorum
16917 Sep 22 23:21:37.843 WARN [1] new RM replaced this: None
16918 Sep 22 23:21:37.843 DEBG up_ds_listen checked 1 jobs, back to waiting
16919 Sep 22 23:21:37.843 INFO [1] Starts reconcile loop
16920 Sep 22 23:21:37.843 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (c9717b37-7d6a-40f7-9030-e7e57914f334) WaitActive WaitQuorum WaitActive ds_transition to WaitQuorum
16921 Sep 22 23:21:37.843 INFO [2] Transition from WaitActive to WaitQuorum
16922 Sep 22 23:21:37.843 WARN [2] new RM replaced this: None
16923 Sep 22 23:21:37.843 INFO [2] Starts reconcile loop
16924 Sep 22 23:21:37.843 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (c9717b37-7d6a-40f7-9030-e7e57914f334) WaitActive WaitQuorum WaitQuorum ds_transition to WaitQuorum
16925 Sep 22 23:21:37.843 INFO [0] Transition from WaitActive to WaitQuorum
16926 Sep 22 23:21:37.843 WARN [0] new RM replaced this: None
16927 Sep 22 23:21:37.843 DEBG [0] Read AckReady 1005, : downstairs
16928 Sep 22 23:21:37.843 INFO [0] Starts reconcile loop
16929 Sep 22 23:21:37.843 INFO [1] 127.0.0.1:44628 task reports connection:true
16930 Sep 22 23:21:37.843 INFO UUID: 2e1bc31f-575d-410d-bb06-222733a39c1e
16931 Sep 22 23:21:37.843 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 WaitQuorum WaitQuorum WaitQuorum
16932 Sep 22 23:21:37.843 INFO Blocks per extent:5 Total Extents: 2
16933 Sep 22 23:21:37.843 INFO [0]R flush_numbers: [0, 0]
16934 Sep 22 23:21:37.844 INFO [0]R generation: [0, 0]
16935 Sep 22 23:21:37.844 INFO [0]R dirty: [false, false]
16936 Sep 22 23:21:37.844 INFO [1]R flush_numbers: [0, 0]
16937 Sep 22 23:21:37.844 INFO Crucible Version: Crucible Version: 0.0.1
16938 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16939 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16940 rustc: 1.70.0 stable x86_64-unknown-illumos
16941 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16942 Sep 22 23:21:37.844 INFO [1]R generation: [0, 0]
16943 Sep 22 23:21:37.844 INFO [1]R dirty: [false, false]
16944 Sep 22 23:21:37.844 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16945 Sep 22 23:21:37.844 INFO [2]R flush_numbers: [0, 0]
16946 Sep 22 23:21:37.844 INFO Using address: 127.0.0.1:45660, task: main
16947 Sep 22 23:21:37.844 INFO [2]R generation: [0, 0]
16948 Sep 22 23:21:37.844 INFO [2]R dirty: [false, false]
16949 Sep 22 23:21:37.844 INFO Max found gen is 1
16950 Sep 22 23:21:37.844 INFO Generation requested: 1 >= found:1
16951 Sep 22 23:21:37.844 INFO Next flush: 1
16952 Sep 22 23:21:37.844 INFO All extents match
16953 Sep 22 23:21:37.844 INFO No downstairs repair required
16954 Sep 22 23:21:37.844 INFO No initial repair work was required
16955 Sep 22 23:21:37.844 INFO Set Downstairs and Upstairs active
16956 Sep 22 23:21:37.844 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 is now active with session: c9717b37-7d6a-40f7-9030-e7e57914f334
16957 Sep 22 23:21:37.844 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Set Active after no repair
16958 Sep 22 23:21:37.844 INFO Notify all downstairs, region set compare is done.
16959 Sep 22 23:21:37.844 INFO Set check for repair
16960 Sep 22 23:21:37.844 INFO [2] 127.0.0.1:38317 task reports connection:true
16961 Sep 22 23:21:37.844 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Active Active Active
16962 Sep 22 23:21:37.844 INFO Repair listens on 127.0.0.1:0, task: repair
16963 Sep 22 23:21:37.844 INFO Set check for repair
16964 Sep 22 23:21:37.844 INFO [0] 127.0.0.1:65137 task reports connection:true
16965 Sep 22 23:21:37.844 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Active Active Active
16966 Sep 22 23:21:37.844 INFO Set check for repair
16967 Sep 22 23:21:37.844 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33168, task: repair
16968 Sep 22 23:21:37.844 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33168, task: repair
16969 Sep 22 23:21:37.844 INFO current number of open files limit 65536 is already the maximum
16970 Sep 22 23:21:37.844 INFO Opened existing region file "/tmp/downstairs-usAyrxwT/region.json"
16971 Sep 22 23:21:37.844 INFO Database read version 1
16972 Sep 22 23:21:37.844 INFO [0] received reconcile message
16973 Sep 22 23:21:37.844 INFO Database write version 1
16974 Sep 22 23:21:37.844 INFO listening, local_addr: 127.0.0.1:33168, task: repair
16975 Sep 22 23:21:37.844 INFO [0] All repairs completed, exit
16976 Sep 22 23:21:37.844 INFO [0] Starts cmd_loop
16977 Sep 22 23:21:37.844 INFO [1] received reconcile message
16978 Sep 22 23:21:37.844 INFO [1] All repairs completed, exit
16979 Sep 22 23:21:37.844 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33168, task: repair
16980 Sep 22 23:21:37.844 INFO [1] Starts cmd_loop
16981 Sep 22 23:21:37.844 INFO [2] received reconcile message
16982 Sep 22 23:21:37.844 INFO [2] All repairs completed, exit
16983 Sep 22 23:21:37.844 INFO Using repair address: 127.0.0.1:33168, task: main
16984 Sep 22 23:21:37.844 INFO [2] Starts cmd_loop
16985 Sep 22 23:21:37.844 INFO No SSL acceptor configured, task: main
16986 The guest has finished waiting for activation
16987 Sep 22 23:21:37.845 INFO current number of open files limit 65536 is already the maximum
16988 Sep 22 23:21:37.845 INFO Created new region file "/tmp/downstairs-w5PkmiSc/region.json"
16989 Sep 22 23:21:37.846 DEBG [1] Read already AckReady 1005, : downstairs
16990 Sep 22 23:21:37.846 DEBG up_ds_listen was notified
16991 Sep 22 23:21:37.846 DEBG up_ds_listen process 1002
16992 Sep 22 23:21:37.846 DEBG [A] ack job 1002:3, : downstairs
16993 Sep 22 23:21:37.846 DEBG up_ds_listen checked 1 jobs, back to waiting
16994 Sep 22 23:21:37.846 INFO Scrub at offset 10/10 sp:10
16995 Sep 22 23:21:37.847 INFO UUID: 8472a597-92db-4cea-a2e5-658ef1a3d615
16996 Sep 22 23:21:37.847 INFO Blocks per extent:5 Total Extents: 2
16997 Sep 22 23:21:37.847 INFO Crucible Version: Crucible Version: 0.0.1
16998 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16999 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17000 rustc: 1.70.0 stable x86_64-unknown-illumos
17001 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17002 Sep 22 23:21:37.847 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17003 Sep 22 23:21:37.847 INFO Using address: 127.0.0.1:44686, task: main
17004 Sep 22 23:21:37.848 INFO Scrub 854c46f9-33d9-4450-b666-b1c944841aff done in 0 seconds. Retries:0 scrub_size:5120 size:10 pause_milli:0
17005 Sep 22 23:21:37.848 INFO Repair listens on 127.0.0.1:0, task: repair
17006 Sep 22 23:21:37.848 DEBG IO Flush 1003 has deps [JobId(1002), JobId(1001), JobId(1000)]
17007 Sep 22 23:21:37.848 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51964, task: repair
17008 Sep 22 23:21:37.848 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51964, task: repair
17009 Sep 22 23:21:37.848 DEBG IO Write 1000 has deps []
17010 Sep 22 23:21:37.848 INFO listening, local_addr: 127.0.0.1:51964, task: repair
17011 test test::integration_test_scrub_short_sparse ... Sep 22 23:21:37.848 DEBG up_ds_listen was notified
17012 ok
17013 Sep 22 23:21:37.848 DEBG up_ds_listen process 1000
17014 Sep 22 23:21:37.848 DEBG [2] Read already AckReady 1005, : downstairs
17015 Sep 22 23:21:37.848 DEBG [A] ack job 1000:1, : downstairs
17016 Sep 22 23:21:37.848 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51964, task: repair
17017 Sep 22 23:21:37.848 DEBG up_ds_listen checked 1 jobs, back to waiting
17018 Sep 22 23:21:37.848 DEBG up_ds_listen was notified
17019 Sep 22 23:21:37.848 INFO Using repair address: 127.0.0.1:51964, task: main
17020 Sep 22 23:21:37.848 INFO No SSL acceptor configured, task: main
17021 Sep 22 23:21:37.848 DEBG up_ds_listen process 1005
17022 Sep 22 23:21:37.848 DEBG [A] ack job 1005:6, : downstairs
17023 Sep 22 23:21:37.848 INFO current number of open files limit 65536 is already the maximum
17024 Sep 22 23:21:37.848 INFO Created new region file "/tmp/downstairs-6NhFd3Sv/region.json"
17025 Sep 22 23:21:37.849 DEBG up_ds_listen checked 1 jobs, back to waiting
17026 Sep 22 23:21:37.849 INFO current number of open files limit 65536 is already the maximum
17027 Sep 22 23:21:37.849 INFO Opened existing region file "/tmp/downstairs-w5PkmiSc/region.json"
17028 Sep 22 23:21:37.849 INFO Database read version 1
17029 Sep 22 23:21:37.849 INFO Database write version 1
17030 Sep 22 23:21:37.850 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
17031 Sep 22 23:21:37.850 INFO Upstairs starts
17032 Sep 22 23:21:37.850 INFO Crucible Version: BuildInfo {
17033 version: "0.0.1",
17034 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
17035 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
17036 git_branch: "main",
17037 rustc_semver: "1.70.0",
17038 rustc_channel: "stable",
17039 rustc_host_triple: "x86_64-unknown-illumos",
17040 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
17041 cargo_triple: "x86_64-unknown-illumos",
17042 debug: true,
17043 opt_level: 0,
17044 }
17045 Sep 22 23:21:37.850 INFO Upstairs <-> Downstairs Message Version: 4
17046 Sep 22 23:21:37.850 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
17047 Sep 22 23:21:37.850 INFO Crucible stats registered with UUID: ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5
17048 Sep 22 23:21:37.850 INFO Crucible ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 has session id: f0462090-a03a-49ad-a727-20f1717992bc
17049 Sep 22 23:21:37.850 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
17050 Sep 22 23:21:37.850 INFO listening on 127.0.0.1:0, task: main
17051 Sep 22 23:21:37.850 INFO listening on 127.0.0.1:0, task: main
17052 Sep 22 23:21:37.850 INFO listening on 127.0.0.1:0, task: main
17053 Sep 22 23:21:37.850 INFO [0] connecting to 127.0.0.1:46928, looper: 0
17054 Sep 22 23:21:37.850 DEBG up_ds_listen was notified
17055 Sep 22 23:21:37.850 INFO [1] connecting to 127.0.0.1:43209, looper: 1
17056 Sep 22 23:21:37.850 DEBG up_ds_listen process 1003
17057 Sep 22 23:21:37.850 DEBG [A] ack job 1003:4, : downstairs
17058 Sep 22 23:21:37.850 INFO UUID: 32384725-b4ea-4f5d-a53b-d921af2e648c
17059 Sep 22 23:21:37.850 INFO Blocks per extent:5 Total Extents: 2
17060 Sep 22 23:21:37.850 INFO [2] connecting to 127.0.0.1:44686, looper: 2
17061 Sep 22 23:21:37.850 INFO Crucible Version: Crucible Version: 0.0.1
17062 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17063 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17064 rustc: 1.70.0 stable x86_64-unknown-illumos
17065 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17066 Sep 22 23:21:37.850 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17067 Sep 22 23:21:37.850 INFO Request to deactivate this guest
17068 Sep 22 23:21:37.850 INFO Using address: 127.0.0.1:45589, task: main
17069 Sep 22 23:21:37.850 INFO up_listen starts, task: up_listen
17070 Sep 22 23:21:37.850 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 set deactivating.
17071 Sep 22 23:21:37.850 DEBG [rc] retire 1003 clears [JobId(1000), JobId(1001), JobId(1002), JobId(1003)], : downstairs
17072 Sep 22 23:21:37.850 DEBG IO Flush 1001 has deps [JobId(1000)]
17073 Sep 22 23:21:37.850 INFO Wait for all three downstairs to come online
17074 Sep 22 23:21:37.850 INFO Flush timeout: 0.5
17075 Sep 22 23:21:37.850 DEBG up_ds_listen checked 1 jobs, back to waiting
17076 Sep 22 23:21:37.851 INFO Repair listens on 127.0.0.1:0, task: repair
17077 Sep 22 23:21:37.851 INFO [1] ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 looper connected, looper: 1
17078 Sep 22 23:21:37.851 INFO [1] Proc runs for 127.0.0.1:43209 in state New
17079 Sep 22 23:21:37.851 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33220, task: repair
17080 Sep 22 23:21:37.851 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33220, task: repair
17081 Sep 22 23:21:37.851 DEBG IO Read 1004 has deps []
17082 Sep 22 23:21:37.851 INFO listening, local_addr: 127.0.0.1:33220, task: repair
17083 Sep 22 23:21:37.851 INFO accepted connection from 127.0.0.1:42260, task: main
17084 Sep 22 23:21:37.851 INFO [2] ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 looper connected, looper: 2
17085 Sep 22 23:21:37.851 INFO [2] Proc runs for 127.0.0.1:44686 in state New
17086 Sep 22 23:21:37.851 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33220, task: repair
17087 Sep 22 23:21:37.851 INFO Using repair address: 127.0.0.1:33220, task: main
17088 Sep 22 23:21:37.851 INFO No SSL acceptor configured, task: main
17089 Sep 22 23:21:37.851 INFO accepted connection from 127.0.0.1:37386, task: main
17090 Sep 22 23:21:37.851 INFO accepted connection from 127.0.0.1:33056, task: main
17091 Sep 22 23:21:37.851 INFO [0] ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 looper connected, looper: 0
17092 Sep 22 23:21:37.851 INFO [0] Proc runs for 127.0.0.1:46928 in state New
17093 Sep 22 23:21:37.852 DEBG Read :1004 deps:[] res:true
17094 Sep 22 23:21:37.852 INFO Connection request from ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 with version 4, task: proc
17095 Sep 22 23:21:37.852 INFO upstairs UpstairsConnection { upstairs_id: ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5, session_id: 9d285066-2c55-4273-bd21-6cddb01ea677, gen: 1 } connected, version 4, task: proc
17096 Sep 22 23:21:37.852 INFO Connection request from ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 with version 4, task: proc
17097 Sep 22 23:21:37.852 INFO upstairs UpstairsConnection { upstairs_id: ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5, session_id: 9d285066-2c55-4273-bd21-6cddb01ea677, gen: 1 } connected, version 4, task: proc
17098 Sep 22 23:21:37.852 INFO Connection request from ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 with version 4, task: proc
17099 Sep 22 23:21:37.852 INFO upstairs UpstairsConnection { upstairs_id: ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5, session_id: 9d285066-2c55-4273-bd21-6cddb01ea677, gen: 1 } connected, version 4, task: proc
17100 Sep 22 23:21:37.852 INFO [1] ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 (9d285066-2c55-4273-bd21-6cddb01ea677) New New New ds_transition to WaitActive
17101 Sep 22 23:21:37.852 INFO [1] Transition from New to WaitActive
17102 Sep 22 23:21:37.852 DEBG Read :1004 deps:[] res:true
17103 Sep 22 23:21:37.853 INFO [2] ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 (9d285066-2c55-4273-bd21-6cddb01ea677) New WaitActive New ds_transition to WaitActive
17104 Sep 22 23:21:37.853 INFO [2] Transition from New to WaitActive
17105 Sep 22 23:21:37.853 DEBG Write :1000 deps:[] res:true
17106 Sep 22 23:21:37.853 INFO [0] ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 (9d285066-2c55-4273-bd21-6cddb01ea677) New WaitActive WaitActive ds_transition to WaitActive
17107 Sep 22 23:21:37.853 INFO [0] Transition from New to WaitActive
17108 Sep 22 23:21:37.853 INFO Upstairs starts
17109 The guest has requested activation
17110 Sep 22 23:21:37.853 INFO Crucible Version: BuildInfo {
17111 version: "0.0.1",
17112 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
17113 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
17114 git_branch: "main",
17115 rustc_semver: "1.70.0",
17116 rustc_channel: "stable",
17117 rustc_host_triple: "x86_64-unknown-illumos",
17118 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
17119 cargo_triple: "x86_64-unknown-illumos",
17120 debug: true,
17121 opt_level: 0,
17122 }
17123 Sep 22 23:21:37.853 INFO Upstairs <-> Downstairs Message Version: 4
17124 Sep 22 23:21:37.853 INFO ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 active request set
17125 Sep 22 23:21:37.853 INFO Crucible stats registered with UUID: 91b6bfa9-6f83-4cc0-a735-385db0454268
17126 Sep 22 23:21:37.853 DEBG Read :1004 deps:[] res:true
17127 Sep 22 23:21:37.853 INFO Crucible 91b6bfa9-6f83-4cc0-a735-385db0454268 has session id: 240e8c63-755c-4bf6-a701-d7f79a8c0d23
17128 Sep 22 23:21:37.853 INFO [0] received activate with gen 1
17129 Sep 22 23:21:37.853 INFO [0] client got ds_active_rx, promote! session 9d285066-2c55-4273-bd21-6cddb01ea677
17130 Sep 22 23:21:37.853 INFO [1] received activate with gen 1
17131 Sep 22 23:21:37.853 INFO [1] client got ds_active_rx, promote! session 9d285066-2c55-4273-bd21-6cddb01ea677
17132 Sep 22 23:21:37.853 INFO [2] received activate with gen 1
17133 Sep 22 23:21:37.853 INFO [2] client got ds_active_rx, promote! session 9d285066-2c55-4273-bd21-6cddb01ea677
17134 Sep 22 23:21:37.853 INFO listening on 127.0.0.1:0, task: main
17135 Sep 22 23:21:37.853 INFO listening on 127.0.0.1:0, task: main
17136 Sep 22 23:21:37.853 INFO listening on 127.0.0.1:0, task: main
17137 Sep 22 23:21:37.853 INFO UpstairsConnection { upstairs_id: ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5, session_id: 9d285066-2c55-4273-bd21-6cddb01ea677, gen: 1 } is now active (read-write)
17138 Sep 22 23:21:37.853 INFO current number of open files limit 65536 is already the maximum
17139 Sep 22 23:21:37.853 INFO Opened existing region file "/tmp/downstairs-6NhFd3Sv/region.json"
17140 Sep 22 23:21:37.853 INFO Database read version 1
17141 Sep 22 23:21:37.853 INFO [0] connecting to 127.0.0.1:34995, looper: 0
17142 Sep 22 23:21:37.853 INFO Database write version 1
17143 Sep 22 23:21:37.853 INFO UpstairsConnection { upstairs_id: ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5, session_id: 9d285066-2c55-4273-bd21-6cddb01ea677, gen: 1 } is now active (read-write)
17144 Sep 22 23:21:37.854 INFO [1] connecting to 127.0.0.1:45660, looper: 1
17145 Sep 22 23:21:37.854 INFO UpstairsConnection { upstairs_id: ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5, session_id: 9d285066-2c55-4273-bd21-6cddb01ea677, gen: 1 } is now active (read-write)
17146 Sep 22 23:21:37.854 INFO [2] connecting to 127.0.0.1:45589, looper: 2
17147 Sep 22 23:21:37.854 DEBG Write :1000 deps:[] res:true
17148 test test::integration_test_scrub_short ... ok
17149 Sep 22 23:21:37.854 INFO up_listen starts, task: up_listen
17150 Sep 22 23:21:37.854 INFO Wait for all three downstairs to come online
17151 Sep 22 23:21:37.854 INFO Flush timeout: 0.5
17152 Sep 22 23:21:37.854 INFO [1] downstairs client at 127.0.0.1:43209 has UUID eb9758aa-5126-4e8c-b409-22a2bcacbe1b
17153 Sep 22 23:21:37.854 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: eb9758aa-5126-4e8c-b409-22a2bcacbe1b, encrypted: true, database_read_version: 1, database_write_version: 1 }
17154 Sep 22 23:21:37.854 INFO ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 WaitActive WaitActive WaitActive
17155 Sep 22 23:21:37.854 INFO [2] downstairs client at 127.0.0.1:44686 has UUID 8472a597-92db-4cea-a2e5-658ef1a3d615
17156 Sep 22 23:21:37.854 INFO [2] 91b6bfa9-6f83-4cc0-a735-385db0454268 looper connected, looper: 2
17157 Sep 22 23:21:37.854 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 8472a597-92db-4cea-a2e5-658ef1a3d615, encrypted: true, database_read_version: 1, database_write_version: 1 }
17158 Sep 22 23:21:37.854 INFO [2] Proc runs for 127.0.0.1:45589 in state New
17159 Sep 22 23:21:37.854 INFO current number of open files limit 65536 is already the maximum
17160 Sep 22 23:21:37.854 INFO ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 WaitActive WaitActive WaitActive
17161 Sep 22 23:21:37.854 INFO [0] 91b6bfa9-6f83-4cc0-a735-385db0454268 looper connected, looper: 0
17162 Sep 22 23:21:37.854 INFO [0] downstairs client at 127.0.0.1:46928 has UUID dc8a7c43-ebc6-4a48-b659-c51804dc92bb
17163 Sep 22 23:21:37.854 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: dc8a7c43-ebc6-4a48-b659-c51804dc92bb, encrypted: true, database_read_version: 1, database_write_version: 1 }
17164 Sep 22 23:21:37.854 INFO Created new region file "/tmp/downstairs-eu4aQhL5/region.json"
17165 Sep 22 23:21:37.854 INFO [0] Proc runs for 127.0.0.1:34995 in state New
17166 Sep 22 23:21:37.854 INFO UUID: 70aec45a-414c-484a-9aa8-e75f71202205
17167 Sep 22 23:21:37.854 INFO ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 WaitActive WaitActive WaitActive
17168 Sep 22 23:21:37.854 INFO Blocks per extent:5 Total Extents: 2
17169 Sep 22 23:21:37.854 INFO [1] 91b6bfa9-6f83-4cc0-a735-385db0454268 looper connected, looper: 1
17170 Sep 22 23:21:37.855 INFO [1] Proc runs for 127.0.0.1:45660 in state New
17171 Sep 22 23:21:37.855 INFO Crucible Version: Crucible Version: 0.0.1
17172 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17173 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17174 rustc: 1.70.0 stable x86_64-unknown-illumos
17175 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17176 Sep 22 23:21:37.855 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17177 Sep 22 23:21:37.855 INFO Using address: 127.0.0.1:65399, task: main
17178 Sep 22 23:21:37.855 INFO Current flush_numbers [0..12]: [0, 0]
17179 Sep 22 23:21:37.855 INFO accepted connection from 127.0.0.1:51086, task: main
17180 Sep 22 23:21:37.855 DEBG Write :1000 deps:[] res:true
17181 Sep 22 23:21:37.855 INFO accepted connection from 127.0.0.1:33129, task: main
17182 Sep 22 23:21:37.855 INFO Downstairs has completed Negotiation, task: proc
17183 Sep 22 23:21:37.855 INFO accepted connection from 127.0.0.1:34652, task: main
17184 Sep 22 23:21:37.855 INFO Current flush_numbers [0..12]: [0, 0]
17185 Sep 22 23:21:37.855 INFO Repair listens on 127.0.0.1:0, task: repair
17186 Sep 22 23:21:37.855 INFO Downstairs has completed Negotiation, task: proc
17187 Sep 22 23:21:37.855 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:55978, task: repair
17188 Sep 22 23:21:37.855 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:55978, task: repair
17189 Sep 22 23:21:37.855 INFO Connection request from 91b6bfa9-6f83-4cc0-a735-385db0454268 with version 4, task: proc
17190 Sep 22 23:21:37.855 INFO upstairs UpstairsConnection { upstairs_id: 91b6bfa9-6f83-4cc0-a735-385db0454268, session_id: b6befe75-491e-4821-85c4-60905223c3c2, gen: 1 } connected, version 4, task: proc
17191 Sep 22 23:21:37.855 INFO listening, local_addr: 127.0.0.1:55978, task: repair
17192 Sep 22 23:21:37.855 INFO Current flush_numbers [0..12]: [0, 0]
17193 Sep 22 23:21:37.855 INFO [1] deactivate job 1001 not InProgress flush, NO
17194 Sep 22 23:21:37.855 INFO Connection request from 91b6bfa9-6f83-4cc0-a735-385db0454268 with version 4, task: proc
17195 Sep 22 23:21:37.855 INFO [2] deactivate job 1001 not InProgress flush, NO
17196 Sep 22 23:21:37.855 INFO upstairs UpstairsConnection { upstairs_id: 91b6bfa9-6f83-4cc0-a735-385db0454268, session_id: b6befe75-491e-4821-85c4-60905223c3c2, gen: 1 } connected, version 4, task: proc
17197 Sep 22 23:21:37.855 INFO [0] deactivate job 1001 not InProgress flush, NO
17198 Sep 22 23:21:37.855 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:55978, task: repair
17199 Sep 22 23:21:37.855 INFO Downstairs has completed Negotiation, task: proc
17200 Sep 22 23:21:37.855 INFO Using repair address: 127.0.0.1:55978, task: main
17201 Sep 22 23:21:37.855 INFO Connection request from 91b6bfa9-6f83-4cc0-a735-385db0454268 with version 4, task: proc
17202 Sep 22 23:21:37.855 INFO No SSL acceptor configured, task: main
17203 Sep 22 23:21:37.855 INFO upstairs UpstairsConnection { upstairs_id: 91b6bfa9-6f83-4cc0-a735-385db0454268, session_id: b6befe75-491e-4821-85c4-60905223c3c2, gen: 1 } connected, version 4, task: proc
17204 Sep 22 23:21:37.855 INFO [1] ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 (9d285066-2c55-4273-bd21-6cddb01ea677) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
17205 Sep 22 23:21:37.856 INFO [1] Transition from WaitActive to WaitQuorum
17206 Sep 22 23:21:37.856 WARN [1] new RM replaced this: None
17207 Sep 22 23:21:37.856 INFO [1] Starts reconcile loop
17208 The guest has requested activation
17209 Sep 22 23:21:37.856 INFO current number of open files limit 65536 is already the maximum
17210 Sep 22 23:21:37.856 INFO [2] ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 (9d285066-2c55-4273-bd21-6cddb01ea677) WaitActive WaitQuorum WaitActive ds_transition to WaitQuorum
17211 Sep 22 23:21:37.856 INFO [2] Transition from WaitActive to WaitQuorum
17212 Sep 22 23:21:37.856 WARN [2] new RM replaced this: None
17213 Sep 22 23:21:37.856 INFO [2] Starts reconcile loop
17214 Sep 22 23:21:37.856 INFO [2] 91b6bfa9-6f83-4cc0-a735-385db0454268 (b6befe75-491e-4821-85c4-60905223c3c2) New New New ds_transition to WaitActive
17215 Sep 22 23:21:37.856 INFO [2] Transition from New to WaitActive
17216 Sep 22 23:21:37.856 INFO Created new region file "/tmp/downstairs-CuiYNtKt/region.json"
17217 Sep 22 23:21:37.856 INFO [0] ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 (9d285066-2c55-4273-bd21-6cddb01ea677) WaitActive WaitQuorum WaitQuorum ds_transition to WaitQuorum
17218 Sep 22 23:21:37.856 INFO [0] Transition from WaitActive to WaitQuorum
17219 Sep 22 23:21:37.856 WARN [0] new RM replaced this: None
17220 Sep 22 23:21:37.856 INFO [0] Starts reconcile loop
17221 Sep 22 23:21:37.856 INFO [0] 91b6bfa9-6f83-4cc0-a735-385db0454268 (b6befe75-491e-4821-85c4-60905223c3c2) New New WaitActive ds_transition to WaitActive
17222 Sep 22 23:21:37.856 INFO [1] 127.0.0.1:43209 task reports connection:true
17223 Sep 22 23:21:37.856 INFO [0] Transition from New to WaitActive
17224 Sep 22 23:21:37.856 INFO ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 WaitQuorum WaitQuorum WaitQuorum
17225 Sep 22 23:21:37.856 INFO [0]R flush_numbers: [0, 0]
17226 Sep 22 23:21:37.856 INFO [0]R generation: [0, 0]
17227 Sep 22 23:21:37.856 INFO [0]R dirty: [false, false]
17228 Sep 22 23:21:37.856 INFO [1]R flush_numbers: [0, 0]
17229 Sep 22 23:21:37.856 INFO [1]R generation: [0, 0]
17230 Sep 22 23:21:37.856 INFO [1]R dirty: [false, false]
17231 Sep 22 23:21:37.856 INFO [1] 91b6bfa9-6f83-4cc0-a735-385db0454268 (b6befe75-491e-4821-85c4-60905223c3c2) WaitActive New WaitActive ds_transition to WaitActive
17232 Sep 22 23:21:37.856 INFO [2]R flush_numbers: [0, 0]
17233 Sep 22 23:21:37.856 INFO [2]R generation: [0, 0]
17234 Sep 22 23:21:37.856 INFO [1] Transition from New to WaitActive
17235 Sep 22 23:21:37.856 INFO [2]R dirty: [false, false]
17236 Sep 22 23:21:37.856 INFO Max found gen is 1
17237 Sep 22 23:21:37.856 INFO Generation requested: 1 >= found:1
17238 Sep 22 23:21:37.856 INFO Next flush: 1
17239 Sep 22 23:21:37.856 INFO All extents match
17240 Sep 22 23:21:37.856 INFO No downstairs repair required
17241 Sep 22 23:21:37.856 INFO No initial repair work was required
17242 Sep 22 23:21:37.856 INFO 91b6bfa9-6f83-4cc0-a735-385db0454268 active request set
17243 Sep 22 23:21:37.856 INFO Set Downstairs and Upstairs active
17244 Sep 22 23:21:37.856 INFO ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 is now active with session: 9d285066-2c55-4273-bd21-6cddb01ea677
17245 Sep 22 23:21:37.856 INFO ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 Set Active after no repair
17246 Sep 22 23:21:37.856 INFO Notify all downstairs, region set compare is done.
17247 Sep 22 23:21:37.856 INFO [0] received activate with gen 1
17248 Sep 22 23:21:37.856 INFO Set check for repair
17249 Sep 22 23:21:37.856 INFO [0] client got ds_active_rx, promote! session b6befe75-491e-4821-85c4-60905223c3c2
17250 Sep 22 23:21:37.856 INFO [2] 127.0.0.1:44686 task reports connection:true
17251 Sep 22 23:21:37.856 INFO ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 Active Active Active
17252 Sep 22 23:21:37.856 INFO Set check for repair
17253 Sep 22 23:21:37.856 INFO [0] 127.0.0.1:46928 task reports connection:true
17254 Sep 22 23:21:37.856 INFO ea4527bd-0243-45b8-8cd6-7dddcfc8c8f5 Active Active Active
17255 Sep 22 23:21:37.856 INFO [1] received activate with gen 1
17256 Sep 22 23:21:37.856 INFO Set check for repair
17257 Sep 22 23:21:37.856 INFO [1] client got ds_active_rx, promote! session b6befe75-491e-4821-85c4-60905223c3c2
17258 Sep 22 23:21:37.856 INFO [0] received reconcile message
17259 Sep 22 23:21:37.856 INFO [0] All repairs completed, exit
17260 Sep 22 23:21:37.856 INFO [0] Starts cmd_loop
17261 Sep 22 23:21:37.856 INFO [2] received activate with gen 1
17262 Sep 22 23:21:37.856 INFO [2] client got ds_active_rx, promote! session b6befe75-491e-4821-85c4-60905223c3c2
17263 Sep 22 23:21:37.856 INFO [1] received reconcile message
17264 Sep 22 23:21:37.856 INFO [1] All repairs completed, exit
17265 Sep 22 23:21:37.856 INFO [1] Starts cmd_loop
17266 Sep 22 23:21:37.856 INFO [2] received reconcile message
17267 Sep 22 23:21:37.856 INFO UpstairsConnection { upstairs_id: 91b6bfa9-6f83-4cc0-a735-385db0454268, session_id: b6befe75-491e-4821-85c4-60905223c3c2, gen: 1 } is now active (read-write)
17268 Sep 22 23:21:37.856 INFO [2] All repairs completed, exit
17269 Sep 22 23:21:37.857 INFO [2] Starts cmd_loop
17270 Sep 22 23:21:37.857 INFO UpstairsConnection { upstairs_id: 91b6bfa9-6f83-4cc0-a735-385db0454268, session_id: b6befe75-491e-4821-85c4-60905223c3c2, gen: 1 } is now active (read-write)
17271 The guest has finished waiting for activation
17272 Sep 22 23:21:37.857 DEBG [0] Read AckReady 1004, : downstairs
17273 Sep 22 23:21:37.857 INFO UpstairsConnection { upstairs_id: 91b6bfa9-6f83-4cc0-a735-385db0454268, session_id: b6befe75-491e-4821-85c4-60905223c3c2, gen: 1 } is now active (read-write)
17274 Sep 22 23:21:37.857 DEBG IO Read 1000 has deps []
17275 Sep 22 23:21:37.857 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
17276 Sep 22 23:21:37.857 INFO [2] downstairs client at 127.0.0.1:45589 has UUID 32384725-b4ea-4f5d-a53b-d921af2e648c
17277 Sep 22 23:21:37.857 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
17278 Sep 22 23:21:37.857 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 32384725-b4ea-4f5d-a53b-d921af2e648c, encrypted: true, database_read_version: 1, database_write_version: 1 }
17279 Sep 22 23:21:37.857 INFO 91b6bfa9-6f83-4cc0-a735-385db0454268 WaitActive WaitActive WaitActive
17280 Sep 22 23:21:37.857 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
17281 Sep 22 23:21:37.857 INFO [0] downstairs client at 127.0.0.1:34995 has UUID dc9055ff-ffac-4e79-bd57-721a805ccf83
17282 Sep 22 23:21:37.857 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: dc9055ff-ffac-4e79-bd57-721a805ccf83, encrypted: true, database_read_version: 1, database_write_version: 1 }
17283 Sep 22 23:21:37.857 INFO 91b6bfa9-6f83-4cc0-a735-385db0454268 WaitActive WaitActive WaitActive
17284 Sep 22 23:21:37.858 INFO [1] downstairs client at 127.0.0.1:45660 has UUID 2e1bc31f-575d-410d-bb06-222733a39c1e
17285 Sep 22 23:21:37.858 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 2e1bc31f-575d-410d-bb06-222733a39c1e, encrypted: true, database_read_version: 1, database_write_version: 1 }
17286 Sep 22 23:21:37.858 INFO [1] check deactivate YES
17287 Sep 22 23:21:37.858 INFO 91b6bfa9-6f83-4cc0-a735-385db0454268 WaitActive WaitActive WaitActive
17288 Sep 22 23:21:37.858 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (c9717b37-7d6a-40f7-9030-e7e57914f334) Active Active Active ds_transition to Deactivated
17289 Sep 22 23:21:37.858 INFO [1] Transition from Active to Deactivated
17290 Sep 22 23:21:37.858 INFO [2] check deactivate YES
17291 Sep 22 23:21:37.858 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (c9717b37-7d6a-40f7-9030-e7e57914f334) Active Deactivated Active ds_transition to Deactivated
17292 Sep 22 23:21:37.858 INFO [2] Transition from Active to Deactivated
17293 Sep 22 23:21:37.858 DEBG [0] deactivate flush 1001 done, : downstairs
17294 Sep 22 23:21:37.858 INFO Current flush_numbers [0..12]: [0, 0]
17295 Sep 22 23:21:37.858 INFO [0] check deactivate YES
17296 Sep 22 23:21:37.858 DEBG Read :1000 deps:[] res:true
17297 Sep 22 23:21:37.858 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (c9717b37-7d6a-40f7-9030-e7e57914f334) Active Deactivated Deactivated ds_transition to Deactivated
17298 Sep 22 23:21:37.858 INFO [0] Transition from Active to Deactivated
17299 Sep 22 23:21:37.858 INFO current number of open files limit 65536 is already the maximum
17300 Sep 22 23:21:37.858 INFO Opened existing region file "/tmp/downstairs-eu4aQhL5/region.json"
17301 Sep 22 23:21:37.858 INFO Database read version 1
17302 Sep 22 23:21:37.858 ERRO 127.0.0.1:44628: proc: [1] client work task ended, Ok(Err([1] exits after deactivation)), so we end too, looper: 1
17303 Sep 22 23:21:37.858 INFO Database write version 1
17304 Sep 22 23:21:37.858 INFO Downstairs has completed Negotiation, task: proc
17305 Sep 22 23:21:37.858 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Gone missing, transition from Deactivated to New
17306 Sep 22 23:21:37.858 INFO deactivate transition checking...
17307 Sep 22 23:21:37.858 INFO deactivate_transition Deactivated NO
17308 Sep 22 23:21:37.858 INFO deactivate_transition New Maybe
17309 Sep 22 23:21:37.858 INFO deactivate_transition Deactivated NO
17310 Sep 22 23:21:37.858 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 connection to 127.0.0.1:44628 closed, looper: 1
17311 Sep 22 23:21:37.858 DEBG Read :1000 deps:[] res:true
17312 Sep 22 23:21:37.858 ERRO 127.0.0.1:38317: proc: [2] client work task ended, Ok(Err([2] exits after deactivation)), so we end too, looper: 2
17313 Sep 22 23:21:37.858 INFO Current flush_numbers [0..12]: [0, 0]
17314 Sep 22 23:21:37.858 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Gone missing, transition from Deactivated to New
17315 Sep 22 23:21:37.858 INFO deactivate transition checking...
17316 Sep 22 23:21:37.858 INFO deactivate_transition Deactivated NO
17317 Sep 22 23:21:37.858 INFO deactivate_transition New Maybe
17318 Sep 22 23:21:37.858 INFO deactivate_transition New Maybe
17319 Sep 22 23:21:37.858 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 connection to 127.0.0.1:38317 closed, looper: 2
17320 Sep 22 23:21:37.858 DEBG up_ds_listen was notified
17321 Sep 22 23:21:37.858 INFO Downstairs has completed Negotiation, task: proc
17322 Sep 22 23:21:37.858 DEBG up_ds_listen process 1001
17323 Sep 22 23:21:37.858 DEBG [A] ack job 1001:2, : downstairs
17324 Sep 22 23:21:37.859 DEBG Read :1000 deps:[] res:true
17325 Sep 22 23:21:37.859 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
17326 Sep 22 23:21:37.859 DEBG up_ds_listen checked 1 jobs, back to waiting
17327 Sep 22 23:21:37.859 INFO Current flush_numbers [0..12]: [0, 0]
17328 Sep 22 23:21:37.859 ERRO 127.0.0.1:65137: proc: [0] client work task ended, Ok(Err([0] exits after deactivation)), so we end too, looper: 0
17329 Sep 22 23:21:37.859 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Gone missing, transition from Deactivated to New
17330 Sep 22 23:21:37.859 INFO deactivate transition checking...
17331 Sep 22 23:21:37.859 INFO deactivate_transition New Maybe
17332 Sep 22 23:21:37.859 INFO deactivate_transition New Maybe
17333 Sep 22 23:21:37.859 INFO deactivate_transition New Maybe
17334 Sep 22 23:21:37.859 INFO All DS in the proper state! -> INIT
17335 Sep 22 23:21:37.859 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 connection to 127.0.0.1:65137 closed, looper: 0
17336 Sep 22 23:21:37.859 INFO Downstairs has completed Negotiation, task: proc
17337 Sep 22 23:21:37.859 INFO [1] 127.0.0.1:44628 task reports connection:false
17338 Sep 22 23:21:37.859 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 New New New
17339 Sep 22 23:21:37.859 INFO [1] 127.0.0.1:44628 task reports offline
17340 Sep 22 23:21:37.859 INFO [2] 127.0.0.1:38317 task reports connection:false
17341 Sep 22 23:21:37.859 INFO [2] 91b6bfa9-6f83-4cc0-a735-385db0454268 (b6befe75-491e-4821-85c4-60905223c3c2) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
17342 Sep 22 23:21:37.859 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 New New New
17343 Sep 22 23:21:37.859 INFO [2] Transition from WaitActive to WaitQuorum
17344 Sep 22 23:21:37.859 INFO [2] 127.0.0.1:38317 task reports offline
17345 Sep 22 23:21:37.859 WARN [2] new RM replaced this: None
17346 Sep 22 23:21:37.859 INFO [0] 127.0.0.1:65137 task reports connection:false
17347 Sep 22 23:21:37.859 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 New New New
17348 Sep 22 23:21:37.859 INFO [2] Starts reconcile loop
17349 Sep 22 23:21:37.859 INFO current number of open files limit 65536 is already the maximum
17350 Sep 22 23:21:37.859 INFO [0] 127.0.0.1:65137 task reports offline
17351 Sep 22 23:21:37.859 INFO Opened existing region file "/tmp/downstairs-CuiYNtKt/region.json"
17352 Sep 22 23:21:37.859 INFO Database read version 1
17353 Sep 22 23:21:37.859 INFO Database write version 1
17354 Sep 22 23:21:37.859 INFO [0] 91b6bfa9-6f83-4cc0-a735-385db0454268 (b6befe75-491e-4821-85c4-60905223c3c2) WaitActive WaitActive WaitQuorum ds_transition to WaitQuorum
17355 Sep 22 23:21:37.859 DEBG [2] Read already AckReady 1004, : downstairs
17356 Sep 22 23:21:37.859 INFO [0] Transition from WaitActive to WaitQuorum
17357 Sep 22 23:21:37.859 WARN [0] new RM replaced this: None
17358 Sep 22 23:21:37.859 INFO [0] Starts reconcile loop
17359 Sep 22 23:21:37.859 INFO current number of open files limit 65536 is already the maximum
17360 Sep 22 23:21:37.859 INFO Opened existing region file "/tmp/downstairs-bxfe8IV2/region.json"
17361 Sep 22 23:21:37.859 DEBG [1] Read AckReady 1000, : downstairs
17362 Sep 22 23:21:37.859 INFO Database read version 1
17363 Sep 22 23:21:37.859 INFO [1] 91b6bfa9-6f83-4cc0-a735-385db0454268 (b6befe75-491e-4821-85c4-60905223c3c2) WaitQuorum WaitActive WaitQuorum ds_transition to WaitQuorum
17364 Sep 22 23:21:37.859 INFO Database write version 1
17365 Sep 22 23:21:37.859 INFO [1] Transition from WaitActive to WaitQuorum
17366 Sep 22 23:21:37.859 WARN [1] new RM replaced this: None
17367 Sep 22 23:21:37.859 INFO [1] Starts reconcile loop
17368 Sep 22 23:21:37.859 INFO [2] 127.0.0.1:45589 task reports connection:true
17369 Sep 22 23:21:37.859 DEBG [2] Read already AckReady 1000, : downstairs
17370 Sep 22 23:21:37.859 INFO 91b6bfa9-6f83-4cc0-a735-385db0454268 WaitQuorum WaitQuorum WaitQuorum
17371 Sep 22 23:21:37.859 INFO [0]R flush_numbers: [0, 0]
17372 Sep 22 23:21:37.860 INFO [0]R generation: [0, 0]
17373 Sep 22 23:21:37.860 INFO [0]R dirty: [false, false]
17374 Sep 22 23:21:37.860 INFO [1]R flush_numbers: [0, 0]
17375 Sep 22 23:21:37.860 INFO [1]R generation: [0, 0]
17376 Sep 22 23:21:37.860 INFO [1]R dirty: [false, false]
17377 Sep 22 23:21:37.860 INFO [2]R flush_numbers: [0, 0]
17378 Sep 22 23:21:37.860 DEBG [0] Read already AckReady 1000, : downstairs
17379 Sep 22 23:21:37.860 INFO [2]R generation: [0, 0]
17380 Sep 22 23:21:37.860 INFO [2]R dirty: [false, false]
17381 Sep 22 23:21:37.860 INFO Max found gen is 1
17382 Sep 22 23:21:37.860 DEBG up_ds_listen was notified
17383 Sep 22 23:21:37.860 INFO Generation requested: 1 >= found:1
17384 Sep 22 23:21:37.860 DEBG up_ds_listen process 1000
17385 Sep 22 23:21:37.860 INFO Next flush: 1
17386 Sep 22 23:21:37.860 DEBG [A] ack job 1000:1, : downstairs
17387 Sep 22 23:21:37.860 INFO All extents match
17388 Sep 22 23:21:37.860 INFO No downstairs repair required
17389 Sep 22 23:21:37.860 INFO No initial repair work was required
17390 Sep 22 23:21:37.860 INFO Set Downstairs and Upstairs active
17391 Sep 22 23:21:37.860 INFO 91b6bfa9-6f83-4cc0-a735-385db0454268 is now active with session: b6befe75-491e-4821-85c4-60905223c3c2
17392 Sep 22 23:21:37.860 INFO 91b6bfa9-6f83-4cc0-a735-385db0454268 Set Active after no repair
17393 Sep 22 23:21:37.860 INFO Notify all downstairs, region set compare is done.
17394 Sep 22 23:21:37.860 INFO Set check for repair
17395 Sep 22 23:21:37.860 INFO [0] 127.0.0.1:34995 task reports connection:true
17396 Sep 22 23:21:37.860 INFO 91b6bfa9-6f83-4cc0-a735-385db0454268 Active Active Active
17397 Sep 22 23:21:37.860 INFO Set check for repair
17398 Sep 22 23:21:37.860 INFO [1] 127.0.0.1:45660 task reports connection:true
17399 Sep 22 23:21:37.860 DEBG up_ds_listen checked 1 jobs, back to waiting
17400 Sep 22 23:21:37.860 INFO UUID: c731e991-4a16-42af-828a-390914aa5a24
17401 Sep 22 23:21:37.860 INFO 91b6bfa9-6f83-4cc0-a735-385db0454268 Active Active Active
17402 Sep 22 23:21:37.860 INFO Blocks per extent:5 Total Extents: 2
17403 Sep 22 23:21:37.860 INFO Set check for repair
17404 Sep 22 23:21:37.860 INFO Crucible Version: Crucible Version: 0.0.1
17405 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17406 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17407 rustc: 1.70.0 stable x86_64-unknown-illumos
17408 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17409 Sep 22 23:21:37.860 INFO [0] received reconcile message
17410 Sep 22 23:21:37.860 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17411 Sep 22 23:21:37.860 INFO [0] All repairs completed, exit
17412 Sep 22 23:21:37.860 INFO Using address: 127.0.0.1:64599, task: main
17413 Sep 22 23:21:37.860 INFO [0] Starts cmd_loop
17414 Sep 22 23:21:37.860 INFO [1] received reconcile message
17415 Sep 22 23:21:37.860 INFO [1] All repairs completed, exit
17416 Sep 22 23:21:37.860 INFO [1] Starts cmd_loop
17417 Sep 22 23:21:37.860 INFO [2] received reconcile message
17418 Sep 22 23:21:37.860 INFO [2] All repairs completed, exit
17419 Sep 22 23:21:37.860 INFO [2] Starts cmd_loop
17420 The guest has finished waiting for activation
17421 Sep 22 23:21:37.861 INFO Repair listens on 127.0.0.1:0, task: repair
17422 Sep 22 23:21:37.861 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52902, task: repair
17423 Sep 22 23:21:37.861 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52902, task: repair
17424 Sep 22 23:21:37.861 INFO listening, local_addr: 127.0.0.1:52902, task: repair
17425 Sep 22 23:21:37.861 DEBG IO Read 1000 has deps []
17426 Sep 22 23:21:37.861 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52902, task: repair
17427 Sep 22 23:21:37.861 INFO Using repair address: 127.0.0.1:52902, task: main
17428 Sep 22 23:21:37.861 INFO No SSL acceptor configured, task: main
17429 Sep 22 23:21:37.861 INFO UUID: 136e8694-ad8f-4812-bbb6-93a5da7b3a3e
17430 Sep 22 23:21:37.861 INFO Blocks per extent:5 Total Extents: 2
17431 Sep 22 23:21:37.861 INFO Crucible Version: Crucible Version: 0.0.1
17432 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17433 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17434 rustc: 1.70.0 stable x86_64-unknown-illumos
17435 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17436 Sep 22 23:21:37.861 INFO current number of open files limit 65536 is already the maximum
17437 Sep 22 23:21:37.861 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17438 Sep 22 23:21:37.861 INFO Using address: 127.0.0.1:54774, task: main
17439 Sep 22 23:21:37.861 INFO Created new region file "/tmp/downstairs-UPdbxzP8/region.json"
17440 Sep 22 23:21:37.862 INFO UUID: f5ea863d-974b-4712-8f07-7666c185979b
17441 Sep 22 23:21:37.862 INFO Blocks per extent:5 Total Extents: 2
17442 Sep 22 23:21:37.862 DEBG [1] Read already AckReady 1004, : downstairs
17443 Sep 22 23:21:37.862 INFO Repair listens on 127.0.0.1:0, task: repair
17444 Sep 22 23:21:37.862 INFO Crucible Version: Crucible Version: 0.0.1
17445 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17446 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17447 rustc: 1.70.0 stable x86_64-unknown-illumos
17448 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17449 Sep 22 23:21:37.862 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17450 Sep 22 23:21:37.862 DEBG up_ds_listen was notified
17451 Sep 22 23:21:37.862 INFO Using address: 127.0.0.1:48404, task: main
17452 Sep 22 23:21:37.862 DEBG Read :1000 deps:[] res:true
17453 Sep 22 23:21:37.862 DEBG up_ds_listen process 1004
17454 Sep 22 23:21:37.862 DEBG [A] ack job 1004:5, : downstairs
17455 Sep 22 23:21:37.862 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51281, task: repair
17456 Sep 22 23:21:37.862 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51281, task: repair
17457 Sep 22 23:21:37.862 INFO listening, local_addr: 127.0.0.1:51281, task: repair
17458 Sep 22 23:21:37.862 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51281, task: repair
17459 Sep 22 23:21:37.862 INFO Repair listens on 127.0.0.1:0, task: repair
17460 Sep 22 23:21:37.862 DEBG Read :1000 deps:[] res:true
17461 Sep 22 23:21:37.862 INFO Using repair address: 127.0.0.1:51281, task: main
17462 Sep 22 23:21:37.862 INFO No SSL acceptor configured, task: main
17463 Sep 22 23:21:37.862 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33274, task: repair
17464 Sep 22 23:21:37.862 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33274, task: repair
17465 Sep 22 23:21:37.862 DEBG up_ds_listen checked 1 jobs, back to waiting
17466 Sep 22 23:21:37.862 INFO listening, local_addr: 127.0.0.1:33274, task: repair
17467 Sep 22 23:21:37.862 INFO current number of open files limit 65536 is already the maximum
17468 Sep 22 23:21:37.863 INFO Created new region file "/tmp/downstairs-HaYagj8S/region.json"
17469 Sep 22 23:21:37.863 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33274, task: repair
17470 Sep 22 23:21:37.863 INFO Using repair address: 127.0.0.1:33274, task: main
17471 Sep 22 23:21:37.863 INFO No SSL acceptor configured, task: main
17472 Sep 22 23:21:37.863 DEBG Read :1000 deps:[] res:true
17473 Sep 22 23:21:37.863 INFO current number of open files limit 65536 is already the maximum
17474 Sep 22 23:21:37.863 INFO Opened existing region file "/tmp/downstairs-RQXy3Y00/region.json"
17475 Sep 22 23:21:37.863 INFO Database read version 1
17476 Sep 22 23:21:37.863 INFO Database write version 1
17477 Sep 22 23:21:37.863 DEBG IO Write 1001 has deps [JobId(1000)]
17478 Sep 22 23:21:37.863 DEBG up_ds_listen was notified
17479 Sep 22 23:21:37.863 DEBG up_ds_listen process 1001
17480 Sep 22 23:21:37.863 DEBG [A] ack job 1001:2, : downstairs
17481 Sep 22 23:21:37.863 DEBG up_ds_listen checked 1 jobs, back to waiting
17482 Sep 22 23:21:37.864 DEBG [2] Read AckReady 1000, : downstairs
17483 Sep 22 23:21:37.864 DEBG [0] Read already AckReady 1000, : downstairs
17484 Sep 22 23:21:37.864 DEBG [1] Read already AckReady 1000, : downstairs
17485 Sep 22 23:21:37.864 DEBG up_ds_listen was notified
17486 Sep 22 23:21:37.864 DEBG up_ds_listen process 1000
17487 Sep 22 23:21:37.864 DEBG [A] ack job 1000:1, : downstairs
17488 Sep 22 23:21:37.865 DEBG up_ds_listen checked 1 jobs, back to waiting
17489 Sep 22 23:21:37.866 INFO UUID: a1447c8e-cf80-4bea-b730-010907ef0c7f
17490 Sep 22 23:21:37.866 DEBG Write :1001 deps:[JobId(1000)] res:true
17491 Sep 22 23:21:37.866 INFO Blocks per extent:5 Total Extents: 2
17492 Sep 22 23:21:37.866 INFO Crucible Version: Crucible Version: 0.0.1
17493 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17494 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17495 rustc: 1.70.0 stable x86_64-unknown-illumos
17496 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17497 Sep 22 23:21:37.866 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17498 Sep 22 23:21:37.866 INFO Using address: 127.0.0.1:54505, task: main
17499 test test::integration_test_scrub_useless ... ok
17500 Sep 22 23:21:37.866 INFO current number of open files limit 65536 is already the maximum
17501 Sep 22 23:21:37.866 INFO Opened existing region file "/tmp/downstairs-UPdbxzP8/region.json"
17502 Sep 22 23:21:37.866 INFO Database read version 1
17503 Sep 22 23:21:37.866 INFO Database write version 1
17504 Sep 22 23:21:37.867 INFO current number of open files limit 65536 is already the maximum
17505 Sep 22 23:21:37.867 INFO Repair listens on 127.0.0.1:0, task: repair
17506 Sep 22 23:21:37.867 INFO Created new region file "/tmp/downstairs-gL8cXuVs/region.json"
17507 Sep 22 23:21:37.867 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36554, task: repair
17508 Sep 22 23:21:37.867 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36554, task: repair
17509 Sep 22 23:21:37.867 DEBG Write :1001 deps:[JobId(1000)] res:true
17510 Sep 22 23:21:37.867 INFO listening, local_addr: 127.0.0.1:36554, task: repair
17511 Sep 22 23:21:37.867 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36554, task: repair
17512 Sep 22 23:21:37.867 INFO Using repair address: 127.0.0.1:36554, task: main
17513 Sep 22 23:21:37.867 INFO No SSL acceptor configured, task: main
17514 Sep 22 23:21:37.867 INFO current number of open files limit 65536 is already the maximum
17515 Sep 22 23:21:37.867 INFO Opened existing region file "/tmp/downstairs-agB7RjDz/region.json"
17516 Sep 22 23:21:37.867 INFO Database read version 1
17517 Sep 22 23:21:37.867 INFO Database write version 1
17518 Sep 22 23:21:37.868 DEBG Write :1001 deps:[JobId(1000)] res:true
17519 Sep 22 23:21:37.868 INFO current number of open files limit 65536 is already the maximum
17520 Sep 22 23:21:37.868 INFO Opened existing region file "/tmp/downstairs-HaYagj8S/region.json"
17521 Sep 22 23:21:37.868 INFO Database read version 1
17522 Sep 22 23:21:37.868 INFO Database write version 1
17523 Sep 22 23:21:37.868 DEBG IO Read 1002 has deps [JobId(1001)]
17524 Sep 22 23:21:37.868 INFO UUID: e615c980-e0be-48be-967d-b06ac35abc56
17525 Sep 22 23:21:37.868 INFO Blocks per extent:5 Total Extents: 2
17526 Sep 22 23:21:37.868 INFO Crucible Version: Crucible Version: 0.0.1
17527 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17528 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17529 rustc: 1.70.0 stable x86_64-unknown-illumos
17530 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17531 Sep 22 23:21:37.868 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17532 Sep 22 23:21:37.868 INFO Using address: 127.0.0.1:62465, task: main
17533 Sep 22 23:21:37.868 INFO Repair listens on 127.0.0.1:0, task: repair
17534 Sep 22 23:21:37.869 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:49827, task: repair
17535 Sep 22 23:21:37.869 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:49827, task: repair
17536 Sep 22 23:21:37.869 DEBG IO Write 1001 has deps [JobId(1000)]
17537 Sep 22 23:21:37.869 INFO listening, local_addr: 127.0.0.1:49827, task: repair
17538 Sep 22 23:21:37.869 DEBG up_ds_listen was notified
17539 Sep 22 23:21:37.869 DEBG up_ds_listen process 1001
17540 Sep 22 23:21:37.869 DEBG [A] ack job 1001:2, : downstairs
17541 Sep 22 23:21:37.869 DEBG Read :1002 deps:[JobId(1001)] res:true
17542 Sep 22 23:21:37.869 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:49827, task: repair
17543 Sep 22 23:21:37.869 DEBG up_ds_listen checked 1 jobs, back to waiting
17544 Sep 22 23:21:37.869 INFO Using repair address: 127.0.0.1:49827, task: main
17545 Sep 22 23:21:37.869 INFO No SSL acceptor configured, task: main
17546 Sep 22 23:21:37.869 DEBG Read :1002 deps:[JobId(1001)] res:true
17547 Sep 22 23:21:37.870 INFO UUID: 1b0c673f-012e-4936-8109-087022151153
17548 Sep 22 23:21:37.870 INFO current number of open files limit 65536 is already the maximum
17549 Sep 22 23:21:37.870 INFO Blocks per extent:5 Total Extents: 2
17550 Sep 22 23:21:37.870 DEBG Read :1002 deps:[JobId(1001)] res:true
17551 Sep 22 23:21:37.870 INFO Crucible Version: Crucible Version: 0.0.1
17552 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17553 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17554 rustc: 1.70.0 stable x86_64-unknown-illumos
17555 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17556 Sep 22 23:21:37.870 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17557 Sep 22 23:21:37.870 INFO Created new region file "/tmp/downstairs-AeRxtKo6/region.json"
17558 Sep 22 23:21:37.870 INFO Using address: 127.0.0.1:38410, task: main
17559 Sep 22 23:21:37.870 INFO Repair listens on 127.0.0.1:0, task: repair
17560 Sep 22 23:21:37.870 INFO current number of open files limit 65536 is already the maximum
17561 Sep 22 23:21:37.870 INFO Opened existing region file "/tmp/downstairs-gL8cXuVs/region.json"
17562 Sep 22 23:21:37.870 INFO Database read version 1
17563 Sep 22 23:21:37.870 INFO Database write version 1
17564 Sep 22 23:21:37.870 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36658, task: repair
17565 Sep 22 23:21:37.870 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36658, task: repair
17566 Sep 22 23:21:37.870 INFO UUID: fee4220d-c596-4fac-bca8-6109f63683be
17567 Sep 22 23:21:37.870 INFO Blocks per extent:5 Total Extents: 2
17568 Sep 22 23:21:37.870 INFO listening, local_addr: 127.0.0.1:36658, task: repair
17569 Sep 22 23:21:37.870 INFO Crucible Version: Crucible Version: 0.0.1
17570 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17571 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17572 rustc: 1.70.0 stable x86_64-unknown-illumos
17573 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17574 Sep 22 23:21:37.870 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17575 Sep 22 23:21:37.871 INFO Using address: 127.0.0.1:34215, task: main
17576 Sep 22 23:21:37.871 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36658, task: repair
17577 Sep 22 23:21:37.871 INFO Using repair address: 127.0.0.1:36658, task: main
17578 Sep 22 23:21:37.871 INFO No SSL acceptor configured, task: main
17579 Sep 22 23:21:37.871 INFO Upstairs starts
17580 Sep 22 23:21:37.871 INFO Crucible Version: BuildInfo {
17581 version: "0.0.1",
17582 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
17583 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
17584 git_branch: "main",
17585 rustc_semver: "1.70.0",
17586 rustc_channel: "stable",
17587 rustc_host_triple: "x86_64-unknown-illumos",
17588 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
17589 cargo_triple: "x86_64-unknown-illumos",
17590 debug: true,
17591 opt_level: 0,
17592 }
17593 Sep 22 23:21:37.871 INFO Repair listens on 127.0.0.1:0, task: repair
17594 Sep 22 23:21:37.871 INFO Upstairs <-> Downstairs Message Version: 4
17595 Sep 22 23:21:37.871 INFO Crucible stats registered with UUID: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5
17596 Sep 22 23:21:37.871 INFO Crucible 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 has session id: 71cfa847-8b3e-4c1a-ab28-9b5cea9dd9a5
17597 Sep 22 23:21:37.871 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:49207, task: repair
17598 Sep 22 23:21:37.871 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:49207, task: repair
17599 Sep 22 23:21:37.871 INFO listening, local_addr: 127.0.0.1:49207, task: repair
17600 Sep 22 23:21:37.871 WARN upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: c9717b37-7d6a-40f7-9030-e7e57914f334, gen: 1 } disconnected, 0 jobs left, task: main
17601 Sep 22 23:21:37.871 WARN upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: c9717b37-7d6a-40f7-9030-e7e57914f334, gen: 1 } was previously active, clearing, task: main
17602 Sep 22 23:21:37.871 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:49207, task: repair
17603 Sep 22 23:21:37.871 INFO connection (127.0.0.1:43896): all done
17604 Sep 22 23:21:37.871 INFO Using repair address: 127.0.0.1:49207, task: main
17605 Sep 22 23:21:37.871 INFO No SSL acceptor configured, task: main
17606 Sep 22 23:21:37.871 WARN upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: c9717b37-7d6a-40f7-9030-e7e57914f334, gen: 1 } disconnected, 0 jobs left, task: main
17607 Sep 22 23:21:37.871 WARN upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: c9717b37-7d6a-40f7-9030-e7e57914f334, gen: 1 } was previously active, clearing, task: main
17608 Sep 22 23:21:37.871 INFO connection (127.0.0.1:47893): all done
17609 Sep 22 23:21:37.871 WARN upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: c9717b37-7d6a-40f7-9030-e7e57914f334, gen: 1 } disconnected, 0 jobs left, task: main
17610 Sep 22 23:21:37.872 WARN upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: c9717b37-7d6a-40f7-9030-e7e57914f334, gen: 1 } was previously active, clearing, task: main
17611 Sep 22 23:21:37.872 INFO connection (127.0.0.1:48011): all done
17612 Sep 22 23:21:37.872 INFO listening on 127.0.0.1:0, task: main
17613 Sep 22 23:21:37.872 INFO listening on 127.0.0.1:0, task: main
17614 Sep 22 23:21:37.872 INFO listening on 127.0.0.1:0, task: main
17615 Sep 22 23:21:37.872 INFO [0] connecting to 127.0.0.1:48404, looper: 0
17616 Sep 22 23:21:37.872 INFO [1] connecting to 127.0.0.1:54505, looper: 1
17617 Sep 22 23:21:37.872 INFO [2] connecting to 127.0.0.1:38410, looper: 2
17618 Sep 22 23:21:37.872 INFO up_listen starts, task: up_listen
17619 Sep 22 23:21:37.872 INFO Wait for all three downstairs to come online
17620 Sep 22 23:21:37.872 INFO Flush timeout: 0.5
17621 Sep 22 23:21:37.872 INFO UUID: f146ddc8-437e-48f6-aa56-398321fa6a78
17622 Sep 22 23:21:37.872 INFO Blocks per extent:5 Total Extents: 2
17623 Sep 22 23:21:37.872 INFO Crucible Version: Crucible Version: 0.0.1
17624 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17625 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17626 rustc: 1.70.0 stable x86_64-unknown-illumos
17627 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17628 Sep 22 23:21:37.872 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17629 Sep 22 23:21:37.872 INFO Using address: 127.0.0.1:65200, task: main
17630 Sep 22 23:21:37.873 INFO Upstairs starts
17631 Sep 22 23:21:37.873 INFO Crucible Version: BuildInfo {
17632 version: "0.0.1",
17633 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
17634 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
17635 git_branch: "main",
17636 rustc_semver: "1.70.0",
17637 rustc_channel: "stable",
17638 rustc_host_triple: "x86_64-unknown-illumos",
17639 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
17640 cargo_triple: "x86_64-unknown-illumos",
17641 debug: true,
17642 opt_level: 0,
17643 }
17644 Sep 22 23:21:37.873 INFO accepted connection from 127.0.0.1:50540, task: main
17645 Sep 22 23:21:37.873 INFO Upstairs <-> Downstairs Message Version: 4
17646 Sep 22 23:21:37.873 INFO Crucible stats registered with UUID: 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d
17647 Sep 22 23:21:37.873 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 looper connected, looper: 1
17648 Sep 22 23:21:37.873 INFO Crucible 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d has session id: 9b529882-d0fe-4613-b03a-e8885917f615
17649 Sep 22 23:21:37.873 INFO Repair listens on 127.0.0.1:0, task: repair
17650 Sep 22 23:21:37.873 INFO [1] Proc runs for 127.0.0.1:54505 in state New
17651 Sep 22 23:21:37.873 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 looper connected, looper: 2
17652 Sep 22 23:21:37.873 INFO listening on 127.0.0.1:0, task: main
17653 Sep 22 23:21:37.873 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36525, task: repair
17654 Sep 22 23:21:37.873 INFO [2] Proc runs for 127.0.0.1:38410 in state New
17655 Sep 22 23:21:37.873 INFO listening on 127.0.0.1:0, task: main
17656 Sep 22 23:21:37.873 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36525, task: repair
17657 Sep 22 23:21:37.873 DEBG Write :1001 deps:[JobId(1000)] res:true
17658 Sep 22 23:21:37.873 INFO listening, local_addr: 127.0.0.1:36525, task: repair
17659 Sep 22 23:21:37.873 INFO listening on 127.0.0.1:0, task: main
17660 Sep 22 23:21:37.873 INFO accepted connection from 127.0.0.1:51558, task: main
17661 Sep 22 23:21:37.873 INFO [0] connecting to 127.0.0.1:65399, looper: 0
17662 Sep 22 23:21:37.873 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 looper connected, looper: 0
17663 Sep 22 23:21:37.873 INFO [0] Proc runs for 127.0.0.1:48404 in state New
17664 Sep 22 23:21:37.873 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36525, task: repair
17665 Sep 22 23:21:37.873 INFO Using repair address: 127.0.0.1:36525, task: main
17666 Sep 22 23:21:37.873 INFO No SSL acceptor configured, task: main
17667 Sep 22 23:21:37.873 INFO accepted connection from 127.0.0.1:34528, task: main
17668 Sep 22 23:21:37.873 INFO [1] connecting to 127.0.0.1:54774, looper: 1
17669 Sep 22 23:21:37.873 INFO [2] connecting to 127.0.0.1:34215, looper: 2
17670 Sep 22 23:21:37.873 INFO current number of open files limit 65536 is already the maximum
17671 Sep 22 23:21:37.874 INFO Connection request from 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 with version 4, task: proc
17672 Sep 22 23:21:37.874 INFO up_listen starts, task: up_listen
17673 Sep 22 23:21:37.874 INFO upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 8442ae1d-103c-45db-9a72-ab1fbfb096dd, gen: 2 } connected, version 4, task: proc
17674 Sep 22 23:21:37.874 INFO Wait for all three downstairs to come online
17675 Sep 22 23:21:37.874 INFO Opened existing region file "/tmp/downstairs-AeRxtKo6/region.json"
17676 Sep 22 23:21:37.874 INFO Flush timeout: 0.5
17677 Sep 22 23:21:37.874 INFO Database read version 1
17678 Sep 22 23:21:37.874 INFO Database write version 1
17679 Sep 22 23:21:37.873 INFO current number of open files limit 65536 is already the maximum
17680 Sep 22 23:21:37.874 INFO Connection request from 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 with version 4, task: proc
17681 Sep 22 23:21:37.874 INFO upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 8442ae1d-103c-45db-9a72-ab1fbfb096dd, gen: 2 } connected, version 4, task: proc
17682 Sep 22 23:21:37.874 DEBG Write :1001 deps:[JobId(1000)] res:true
17683 Sep 22 23:21:37.874 INFO Created new region file "/tmp/downstairs-NWKRhaFi/region.json"
17684 Sep 22 23:21:37.874 INFO Connection request from 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 with version 4, task: proc
17685 Sep 22 23:21:37.874 INFO upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 8442ae1d-103c-45db-9a72-ab1fbfb096dd, gen: 2 } connected, version 4, task: proc
17686 Sep 22 23:21:37.874 INFO [2] 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d looper connected, looper: 2
17687 Sep 22 23:21:37.874 INFO [2] Proc runs for 127.0.0.1:34215 in state New
17688 Sep 22 23:21:37.874 INFO accepted connection from 127.0.0.1:56564, task: main
17689 The guest has requested activation
17690 Sep 22 23:21:37.874 DEBG [1] Read AckReady 1002, : downstairs
17691 Sep 22 23:21:37.874 INFO accepted connection from 127.0.0.1:61438, task: main
17692 Sep 22 23:21:37.874 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (8442ae1d-103c-45db-9a72-ab1fbfb096dd) New New New ds_transition to WaitActive
17693 Sep 22 23:21:37.874 INFO [1] Transition from New to WaitActive
17694 Sep 22 23:21:37.874 INFO accepted connection from 127.0.0.1:62233, task: main
17695 Sep 22 23:21:37.874 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (8442ae1d-103c-45db-9a72-ab1fbfb096dd) New WaitActive New ds_transition to WaitActive
17696 Sep 22 23:21:37.874 INFO [0] 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d looper connected, looper: 0
17697 Sep 22 23:21:37.874 INFO [2] Transition from New to WaitActive
17698 Sep 22 23:21:37.874 INFO [0] Proc runs for 127.0.0.1:65399 in state New
17699 Sep 22 23:21:37.874 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (8442ae1d-103c-45db-9a72-ab1fbfb096dd) New WaitActive WaitActive ds_transition to WaitActive
17700 Sep 22 23:21:37.874 DEBG Write :1001 deps:[JobId(1000)] res:true
17701 Sep 22 23:21:37.874 INFO [1] 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d looper connected, looper: 1
17702 Sep 22 23:21:37.874 INFO [0] Transition from New to WaitActive
17703 Sep 22 23:21:37.874 INFO [1] Proc runs for 127.0.0.1:54774 in state New
17704 Sep 22 23:21:37.874 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 active request set
17705 Sep 22 23:21:37.875 INFO [0] received activate with gen 2
17706 Sep 22 23:21:37.875 INFO [0] client got ds_active_rx, promote! session 8442ae1d-103c-45db-9a72-ab1fbfb096dd
17707 Sep 22 23:21:37.875 DEBG IO Read 1002 has deps [JobId(1001)]
17708 Sep 22 23:21:37.875 INFO [1] received activate with gen 2
17709 Sep 22 23:21:37.875 INFO [1] client got ds_active_rx, promote! session 8442ae1d-103c-45db-9a72-ab1fbfb096dd
17710 Sep 22 23:21:37.875 INFO Connection request from 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d with version 4, task: proc
17711 Sep 22 23:21:37.875 INFO upstairs UpstairsConnection { upstairs_id: 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d, session_id: 39026538-33df-47fa-a1cb-bac551559eaf, gen: 1 } connected, version 4, task: proc
17712 Sep 22 23:21:37.875 INFO [2] received activate with gen 2
17713 Sep 22 23:21:37.875 INFO [2] client got ds_active_rx, promote! session 8442ae1d-103c-45db-9a72-ab1fbfb096dd
17714 Sep 22 23:21:37.875 INFO Connection request from 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d with version 4, task: proc
17715 Sep 22 23:21:37.875 INFO upstairs UpstairsConnection { upstairs_id: 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d, session_id: 39026538-33df-47fa-a1cb-bac551559eaf, gen: 1 } connected, version 4, task: proc
17716 Sep 22 23:21:37.875 INFO Connection request from 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d with version 4, task: proc
17717 Sep 22 23:21:37.875 INFO upstairs UpstairsConnection { upstairs_id: 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d, session_id: 39026538-33df-47fa-a1cb-bac551559eaf, gen: 1 } connected, version 4, task: proc
17718 The guest has requested activation
17719 Sep 22 23:21:37.875 INFO [2] 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d (39026538-33df-47fa-a1cb-bac551559eaf) New New New ds_transition to WaitActive
17720 Sep 22 23:21:37.875 INFO [2] Transition from New to WaitActive
17721 Sep 22 23:21:37.875 INFO [0] 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d (39026538-33df-47fa-a1cb-bac551559eaf) New New WaitActive ds_transition to WaitActive
17722 Sep 22 23:21:37.875 INFO [0] Transition from New to WaitActive
17723 Sep 22 23:21:37.875 DEBG Read :1002 deps:[JobId(1001)] res:true
17724 Sep 22 23:21:37.875 INFO [1] 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d (39026538-33df-47fa-a1cb-bac551559eaf) WaitActive New WaitActive ds_transition to WaitActive
17725 Sep 22 23:21:37.875 INFO [1] Transition from New to WaitActive
17726 Sep 22 23:21:37.875 INFO 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d active request set
17727 Sep 22 23:21:37.875 INFO [1] downstairs client at 127.0.0.1:54505 has UUID a1447c8e-cf80-4bea-b730-010907ef0c7f
17728 Sep 22 23:21:37.875 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a1447c8e-cf80-4bea-b730-010907ef0c7f, encrypted: true, database_read_version: 1, database_write_version: 1 }
17729 Sep 22 23:21:37.876 INFO [0] received activate with gen 1
17730 Sep 22 23:21:37.876 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 WaitActive WaitActive WaitActive
17731 Sep 22 23:21:37.876 INFO [0] client got ds_active_rx, promote! session 39026538-33df-47fa-a1cb-bac551559eaf
17732 Sep 22 23:21:37.876 DEBG Read :1002 deps:[JobId(1001)] res:true
17733 Sep 22 23:21:37.876 INFO UUID: 8e6d336f-2489-4c93-9267-791b6b43d94e
17734 Sep 22 23:21:37.876 INFO Blocks per extent:5 Total Extents: 2
17735 Sep 22 23:21:37.876 INFO [1] received activate with gen 1
17736 Sep 22 23:21:37.876 INFO [2] downstairs client at 127.0.0.1:38410 has UUID 1b0c673f-012e-4936-8109-087022151153
17737 Sep 22 23:21:37.876 INFO [1] client got ds_active_rx, promote! session 39026538-33df-47fa-a1cb-bac551559eaf
17738 Sep 22 23:21:37.876 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 1b0c673f-012e-4936-8109-087022151153, encrypted: true, database_read_version: 1, database_write_version: 1 }
17739 Sep 22 23:21:37.876 INFO Crucible Version: Crucible Version: 0.0.1
17740 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17741 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17742 rustc: 1.70.0 stable x86_64-unknown-illumos
17743 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17744 Sep 22 23:21:37.876 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17745 Sep 22 23:21:37.876 INFO Using address: 127.0.0.1:56505, task: main
17746 Sep 22 23:21:37.876 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 WaitActive WaitActive WaitActive
17747 Sep 22 23:21:37.876 INFO [2] received activate with gen 1
17748 Sep 22 23:21:37.876 INFO [2] client got ds_active_rx, promote! session 39026538-33df-47fa-a1cb-bac551559eaf
17749 Sep 22 23:21:37.876 INFO [0] downstairs client at 127.0.0.1:48404 has UUID f5ea863d-974b-4712-8f07-7666c185979b
17750 Sep 22 23:21:37.876 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f5ea863d-974b-4712-8f07-7666c185979b, encrypted: true, database_read_version: 1, database_write_version: 1 }
17751 Sep 22 23:21:37.876 DEBG Read :1002 deps:[JobId(1001)] res:true
17752 Sep 22 23:21:37.876 INFO UpstairsConnection { upstairs_id: 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d, session_id: 39026538-33df-47fa-a1cb-bac551559eaf, gen: 1 } is now active (read-write)
17753 Sep 22 23:21:37.876 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 WaitActive WaitActive WaitActive
17754 Sep 22 23:21:37.876 INFO UpstairsConnection { upstairs_id: 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d, session_id: 39026538-33df-47fa-a1cb-bac551559eaf, gen: 1 } is now active (read-write)
17755 Sep 22 23:21:37.876 INFO Repair listens on 127.0.0.1:0, task: repair
17756 Sep 22 23:21:37.876 INFO UpstairsConnection { upstairs_id: 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d, session_id: 39026538-33df-47fa-a1cb-bac551559eaf, gen: 1 } is now active (read-write)
17757 Sep 22 23:21:37.876 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38700, task: repair
17758 Sep 22 23:21:37.876 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38700, task: repair
17759 Sep 22 23:21:37.876 INFO Current flush_numbers [0..12]: [1, 1]
17760 Sep 22 23:21:37.876 INFO listening, local_addr: 127.0.0.1:38700, task: repair
17761 Sep 22 23:21:37.877 INFO Downstairs has completed Negotiation, task: proc
17762 Sep 22 23:21:37.877 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38700, task: repair
17763 Sep 22 23:21:37.877 INFO Using repair address: 127.0.0.1:38700, task: main
17764 Sep 22 23:21:37.877 INFO No SSL acceptor configured, task: main
17765 Sep 22 23:21:37.877 DEBG [2] Read already AckReady 1002, : downstairs
17766 Sep 22 23:21:37.877 INFO [2] downstairs client at 127.0.0.1:34215 has UUID fee4220d-c596-4fac-bca8-6109f63683be
17767 Sep 22 23:21:37.877 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: fee4220d-c596-4fac-bca8-6109f63683be, encrypted: true, database_read_version: 1, database_write_version: 1 }
17768 Sep 22 23:21:37.877 INFO Current flush_numbers [0..12]: [1, 1]
17769 Sep 22 23:21:37.877 INFO 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d WaitActive WaitActive WaitActive
17770 Sep 22 23:21:37.877 INFO [0] downstairs client at 127.0.0.1:65399 has UUID 70aec45a-414c-484a-9aa8-e75f71202205
17771 Sep 22 23:21:37.877 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 70aec45a-414c-484a-9aa8-e75f71202205, encrypted: true, database_read_version: 1, database_write_version: 1 }
17772 Sep 22 23:21:37.877 INFO 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d WaitActive WaitActive WaitActive
17773 Sep 22 23:21:37.877 INFO Downstairs has completed Negotiation, task: proc
17774 Sep 22 23:21:37.877 INFO [1] downstairs client at 127.0.0.1:54774 has UUID 136e8694-ad8f-4812-bbb6-93a5da7b3a3e
17775 Sep 22 23:21:37.877 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 136e8694-ad8f-4812-bbb6-93a5da7b3a3e, encrypted: true, database_read_version: 1, database_write_version: 1 }
17776 Sep 22 23:21:37.877 INFO 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d WaitActive WaitActive WaitActive
17777 Sep 22 23:21:37.877 INFO Current flush_numbers [0..12]: [1, 1]
17778 Sep 22 23:21:37.878 INFO Upstairs starts
17779 Sep 22 23:21:37.878 INFO Current flush_numbers [0..12]: [0, 0]
17780 Sep 22 23:21:37.878 INFO Crucible Version: BuildInfo {
17781 version: "0.0.1",
17782 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
17783 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
17784 git_branch: "main",
17785 rustc_semver: "1.70.0",
17786 rustc_channel: "stable",
17787 rustc_host_triple: "x86_64-unknown-illumos",
17788 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
17789 cargo_triple: "x86_64-unknown-illumos",
17790 debug: true,
17791 opt_level: 0,
17792 }
17793 Sep 22 23:21:37.878 INFO Upstairs <-> Downstairs Message Version: 4
17794 Sep 22 23:21:37.878 INFO Crucible stats registered with UUID: a334e2c5-02d5-47e7-a7d4-82e25a58ab9c
17795 Sep 22 23:21:37.878 INFO Crucible a334e2c5-02d5-47e7-a7d4-82e25a58ab9c has session id: b24e6603-575e-47f3-85c3-9a1a62d154a4
17796 Sep 22 23:21:37.878 INFO Downstairs has completed Negotiation, task: proc
17797 Sep 22 23:21:37.878 INFO Downstairs has completed Negotiation, task: proc
17798 Sep 22 23:21:37.878 INFO current number of open files limit 65536 is already the maximum
17799 Sep 22 23:21:37.878 INFO listening on 127.0.0.1:0, task: main
17800 Sep 22 23:21:37.878 INFO Opened existing region file "/tmp/downstairs-NWKRhaFi/region.json"
17801 Sep 22 23:21:37.878 INFO listening on 127.0.0.1:0, task: main
17802 Sep 22 23:21:37.878 INFO Database read version 1
17803 Sep 22 23:21:37.878 INFO listening on 127.0.0.1:0, task: main
17804 Sep 22 23:21:37.878 INFO Database write version 1
17805 Sep 22 23:21:37.878 INFO [0] connecting to 127.0.0.1:64599, looper: 0
17806 Sep 22 23:21:37.878 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (8442ae1d-103c-45db-9a72-ab1fbfb096dd) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
17807 Sep 22 23:21:37.878 INFO [1] Transition from WaitActive to WaitQuorum
17808 Sep 22 23:21:37.878 WARN [1] new RM replaced this: None
17809 Sep 22 23:21:37.878 INFO Current flush_numbers [0..12]: [0, 0]
17810 Sep 22 23:21:37.878 INFO [1] Starts reconcile loop
17811 Sep 22 23:21:37.878 INFO [1] connecting to 127.0.0.1:62465, looper: 1
17812 Sep 22 23:21:37.878 INFO [2] connecting to 127.0.0.1:56505, looper: 2
17813 Sep 22 23:21:37.878 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (8442ae1d-103c-45db-9a72-ab1fbfb096dd) WaitActive WaitQuorum WaitActive ds_transition to WaitQuorum
17814 Sep 22 23:21:37.878 INFO Downstairs has completed Negotiation, task: proc
17815 Sep 22 23:21:37.878 INFO [2] Transition from WaitActive to WaitQuorum
17816 Sep 22 23:21:37.878 INFO up_listen starts, task: up_listen
17817 Sep 22 23:21:37.878 INFO Wait for all three downstairs to come online
17818 Sep 22 23:21:37.878 WARN [2] new RM replaced this: None
17819 Sep 22 23:21:37.878 INFO Flush timeout: 0.5
17820 Sep 22 23:21:37.878 INFO [2] Starts reconcile loop
17821 Sep 22 23:21:37.878 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (8442ae1d-103c-45db-9a72-ab1fbfb096dd) WaitActive WaitQuorum WaitQuorum ds_transition to WaitQuorum
17822 Sep 22 23:21:37.878 INFO [0] Transition from WaitActive to WaitQuorum
17823 Sep 22 23:21:37.878 WARN [0] new RM replaced this: None
17824 Sep 22 23:21:37.878 INFO Current flush_numbers [0..12]: [0, 0]
17825 Sep 22 23:21:37.878 INFO accepted connection from 127.0.0.1:44432, task: main
17826 Sep 22 23:21:37.878 INFO [0] Starts reconcile loop
17827 Sep 22 23:21:37.878 INFO [1] 127.0.0.1:54505 task reports connection:true
17828 Sep 22 23:21:37.878 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 WaitQuorum WaitQuorum WaitQuorum
17829 Sep 22 23:21:37.879 INFO [0]R flush_numbers: [1, 1]
17830 Sep 22 23:21:37.879 INFO accepted connection from 127.0.0.1:60279, task: main
17831 Sep 22 23:21:37.879 INFO [0]R generation: [1, 1]
17832 Sep 22 23:21:37.879 INFO Downstairs has completed Negotiation, task: proc
17833 Sep 22 23:21:37.879 INFO [0]R dirty: [false, false]
17834 Sep 22 23:21:37.879 INFO accepted connection from 127.0.0.1:52426, task: main
17835 Sep 22 23:21:37.879 INFO [1]R flush_numbers: [1, 1]
17836 Sep 22 23:21:37.879 INFO [1]R generation: [1, 1]
17837 Sep 22 23:21:37.879 INFO [1]R dirty: [false, false]
17838 Sep 22 23:21:37.879 INFO [2]R flush_numbers: [1, 1]
17839 Sep 22 23:21:37.879 INFO [0] a334e2c5-02d5-47e7-a7d4-82e25a58ab9c looper connected, looper: 0
17840 Sep 22 23:21:37.879 INFO [2]R generation: [1, 1]
17841 Sep 22 23:21:37.879 INFO [2]R dirty: [false, false]
17842 Sep 22 23:21:37.879 INFO [0] Proc runs for 127.0.0.1:64599 in state New
17843 Sep 22 23:21:37.879 INFO Max found gen is 2
17844 Sep 22 23:21:37.879 INFO Generation requested: 2 >= found:2
17845 Sep 22 23:21:37.879 INFO Next flush: 2
17846 Sep 22 23:21:37.879 INFO [1] a334e2c5-02d5-47e7-a7d4-82e25a58ab9c looper connected, looper: 1
17847 Sep 22 23:21:37.879 INFO All extents match
17848 Sep 22 23:21:37.879 INFO [2] 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d (39026538-33df-47fa-a1cb-bac551559eaf) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
17849 Sep 22 23:21:37.879 INFO No downstairs repair required
17850 Sep 22 23:21:37.879 INFO [2] Transition from WaitActive to WaitQuorum
17851 Sep 22 23:21:37.879 INFO [1] Proc runs for 127.0.0.1:62465 in state New
17852 Sep 22 23:21:37.879 INFO No initial repair work was required
17853 Sep 22 23:21:37.879 WARN [2] new RM replaced this: None
17854 Sep 22 23:21:37.879 INFO Set Downstairs and Upstairs active
17855 Sep 22 23:21:37.879 INFO [2] a334e2c5-02d5-47e7-a7d4-82e25a58ab9c looper connected, looper: 2
17856 Sep 22 23:21:37.879 INFO [2] Starts reconcile loop
17857 Sep 22 23:21:37.879 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 is now active with session: 8442ae1d-103c-45db-9a72-ab1fbfb096dd
17858 Sep 22 23:21:37.879 INFO [2] Proc runs for 127.0.0.1:56505 in state New
17859 Sep 22 23:21:37.879 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Set Active after no repair
17860 Sep 22 23:21:37.879 INFO Notify all downstairs, region set compare is done.
17861 Sep 22 23:21:37.879 INFO Set check for repair
17862 Sep 22 23:21:37.879 INFO [0] 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d (39026538-33df-47fa-a1cb-bac551559eaf) WaitActive WaitActive WaitQuorum ds_transition to WaitQuorum
17863 Sep 22 23:21:37.879 INFO [0] Transition from WaitActive to WaitQuorum
17864 Sep 22 23:21:37.879 INFO [2] 127.0.0.1:38410 task reports connection:true
17865 Sep 22 23:21:37.879 WARN [0] new RM replaced this: None
17866 Sep 22 23:21:37.879 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Active Active Active
17867 Sep 22 23:21:37.879 INFO [0] Starts reconcile loop
17868 Sep 22 23:21:37.879 INFO Set check for repair
17869 Sep 22 23:21:37.879 INFO [0] 127.0.0.1:48404 task reports connection:true
17870 Sep 22 23:21:37.879 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Active Active Active
17871 Sep 22 23:21:37.879 INFO [1] 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d (39026538-33df-47fa-a1cb-bac551559eaf) WaitQuorum WaitActive WaitQuorum ds_transition to WaitQuorum
17872 Sep 22 23:21:37.879 INFO Set check for repair
17873 Sep 22 23:21:37.879 INFO [1] Transition from WaitActive to WaitQuorum
17874 Sep 22 23:21:37.879 INFO Connection request from a334e2c5-02d5-47e7-a7d4-82e25a58ab9c with version 4, task: proc
17875 Sep 22 23:21:37.879 INFO upstairs UpstairsConnection { upstairs_id: a334e2c5-02d5-47e7-a7d4-82e25a58ab9c, session_id: 71f2b629-5d8b-450e-858e-3d0aaedbefc8, gen: 1 } connected, version 4, task: proc
17876 Sep 22 23:21:37.879 WARN [1] new RM replaced this: None
17877 Sep 22 23:21:37.879 INFO [0] received reconcile message
17878 Sep 22 23:21:37.879 INFO [1] Starts reconcile loop
17879 Sep 22 23:21:37.879 INFO [0] All repairs completed, exit
17880 Sep 22 23:21:37.879 INFO Connection request from a334e2c5-02d5-47e7-a7d4-82e25a58ab9c with version 4, task: proc
17881 Sep 22 23:21:37.879 INFO upstairs UpstairsConnection { upstairs_id: a334e2c5-02d5-47e7-a7d4-82e25a58ab9c, session_id: 71f2b629-5d8b-450e-858e-3d0aaedbefc8, gen: 1 } connected, version 4, task: proc
17882 Sep 22 23:21:37.879 INFO [0] Starts cmd_loop
17883 Sep 22 23:21:37.879 INFO [2] 127.0.0.1:34215 task reports connection:true
17884 Sep 22 23:21:37.879 INFO 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d WaitQuorum WaitQuorum WaitQuorum
17885 Sep 22 23:21:37.879 INFO [0]R flush_numbers: [0, 0]
17886 Sep 22 23:21:37.879 INFO Connection request from a334e2c5-02d5-47e7-a7d4-82e25a58ab9c with version 4, task: proc
17887 Sep 22 23:21:37.879 INFO [0]R generation: [0, 0]
17888 Sep 22 23:21:37.879 INFO upstairs UpstairsConnection { upstairs_id: a334e2c5-02d5-47e7-a7d4-82e25a58ab9c, session_id: 71f2b629-5d8b-450e-858e-3d0aaedbefc8, gen: 1 } connected, version 4, task: proc
17889 Sep 22 23:21:37.879 INFO [1] received reconcile message
17890 Sep 22 23:21:37.879 INFO [0]R dirty: [false, false]
17891 Sep 22 23:21:37.879 INFO [1]R flush_numbers: [0, 0]
17892 Sep 22 23:21:37.879 INFO [1]R generation: [0, 0]
17893 Sep 22 23:21:37.879 INFO [1] All repairs completed, exit
17894 Sep 22 23:21:37.879 INFO [1]R dirty: [false, false]
17895 Sep 22 23:21:37.879 INFO [2]R flush_numbers: [0, 0]
17896 Sep 22 23:21:37.879 INFO [1] Starts cmd_loop
17897 Sep 22 23:21:37.879 INFO [2]R generation: [0, 0]
17898 Sep 22 23:21:37.879 INFO [2]R dirty: [false, false]
17899 Sep 22 23:21:37.879 INFO Max found gen is 1
17900 The guest has requested activation
17901 Sep 22 23:21:37.879 INFO Generation requested: 1 >= found:1
17902 Sep 22 23:21:37.879 INFO [2] received reconcile message
17903 Sep 22 23:21:37.879 INFO Next flush: 1
17904 Sep 22 23:21:37.879 INFO [2] All repairs completed, exit
17905 Sep 22 23:21:37.879 INFO All extents match
17906 Sep 22 23:21:37.879 INFO No downstairs repair required
17907 Sep 22 23:21:37.879 INFO [0] a334e2c5-02d5-47e7-a7d4-82e25a58ab9c (71f2b629-5d8b-450e-858e-3d0aaedbefc8) New New New ds_transition to WaitActive
17908 Sep 22 23:21:37.879 INFO [2] Starts cmd_loop
17909 Sep 22 23:21:37.880 INFO [0] Transition from New to WaitActive
17910 Sep 22 23:21:37.880 INFO No initial repair work was required
17911 Sep 22 23:21:37.880 INFO Set Downstairs and Upstairs active
17912 Sep 22 23:21:37.880 INFO 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d is now active with session: 39026538-33df-47fa-a1cb-bac551559eaf
17913 Sep 22 23:21:37.880 INFO [1] a334e2c5-02d5-47e7-a7d4-82e25a58ab9c (71f2b629-5d8b-450e-858e-3d0aaedbefc8) WaitActive New New ds_transition to WaitActive
17914 Sep 22 23:21:37.880 INFO 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d Set Active after no repair
17915 Sep 22 23:21:37.880 INFO [1] Transition from New to WaitActive
17916 The guest has finished waiting for activation
17917 Sep 22 23:21:37.880 INFO Notify all downstairs, region set compare is done.
17918 Sep 22 23:21:37.880 INFO Set check for repair
17919 Sep 22 23:21:37.880 INFO [2] a334e2c5-02d5-47e7-a7d4-82e25a58ab9c (71f2b629-5d8b-450e-858e-3d0aaedbefc8) WaitActive WaitActive New ds_transition to WaitActive
17920 Sep 22 23:21:37.880 INFO [2] Transition from New to WaitActive
17921 Sep 22 23:21:37.880 INFO [0] 127.0.0.1:65399 task reports connection:true
17922 Sep 22 23:21:37.880 INFO 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d Active Active Active
17923 Sep 22 23:21:37.880 DEBG [0] Read already AckReady 1002, : downstairs
17924 Sep 22 23:21:37.880 INFO Set check for repair
17925 Sep 22 23:21:37.880 INFO a334e2c5-02d5-47e7-a7d4-82e25a58ab9c active request set
17926 Sep 22 23:21:37.880 INFO [1] 127.0.0.1:54774 task reports connection:true
17927 Sep 22 23:21:37.880 INFO 1ec1b14d-7169-4b28-b3ed-c4eece97cf0d Active Active Active
17928 Sep 22 23:21:37.880 DEBG up_ds_listen was notified
17929 Sep 22 23:21:37.880 INFO [0] received activate with gen 1
17930 Sep 22 23:21:37.880 INFO [0] client got ds_active_rx, promote! session 71f2b629-5d8b-450e-858e-3d0aaedbefc8
17931 Sep 22 23:21:37.880 INFO Set check for repair
17932 Sep 22 23:21:37.880 DEBG up_ds_listen process 1002
17933 Sep 22 23:21:37.880 DEBG [A] ack job 1002:3, : downstairs
17934 Sep 22 23:21:37.880 INFO [1] received activate with gen 1
17935 Sep 22 23:21:37.880 INFO [0] received reconcile message
17936 Sep 22 23:21:37.880 INFO [1] client got ds_active_rx, promote! session 71f2b629-5d8b-450e-858e-3d0aaedbefc8
17937 Sep 22 23:21:37.880 INFO [0] All repairs completed, exit
17938 Sep 22 23:21:37.880 INFO [2] received activate with gen 1
17939 Sep 22 23:21:37.880 INFO [0] Starts cmd_loop
17940 Sep 22 23:21:37.880 INFO [2] client got ds_active_rx, promote! session 71f2b629-5d8b-450e-858e-3d0aaedbefc8
17941 Sep 22 23:21:37.880 INFO [1] received reconcile message
17942 Sep 22 23:21:37.880 INFO [1] All repairs completed, exit
17943 Sep 22 23:21:37.880 INFO UpstairsConnection { upstairs_id: a334e2c5-02d5-47e7-a7d4-82e25a58ab9c, session_id: 71f2b629-5d8b-450e-858e-3d0aaedbefc8, gen: 1 } is now active (read-write)
17944 Sep 22 23:21:37.880 INFO [1] Starts cmd_loop
17945 Sep 22 23:21:37.880 DEBG [2] Read AckReady 1002, : downstairs
17946 Sep 22 23:21:37.880 DEBG IO Read 1000 has deps []
17947 Sep 22 23:21:37.880 INFO UpstairsConnection { upstairs_id: a334e2c5-02d5-47e7-a7d4-82e25a58ab9c, session_id: 71f2b629-5d8b-450e-858e-3d0aaedbefc8, gen: 1 } is now active (read-write)
17948 Sep 22 23:21:37.880 INFO UUID: 4cf53139-503c-4a75-a5fe-71758d156435
17949 Sep 22 23:21:37.880 INFO [2] received reconcile message
17950 Sep 22 23:21:37.880 INFO Blocks per extent:5 Total Extents: 2
17951 Sep 22 23:21:37.880 INFO [2] All repairs completed, exit
17952 Sep 22 23:21:37.880 INFO [2] Starts cmd_loop
17953 Sep 22 23:21:37.880 INFO UpstairsConnection { upstairs_id: a334e2c5-02d5-47e7-a7d4-82e25a58ab9c, session_id: 71f2b629-5d8b-450e-858e-3d0aaedbefc8, gen: 1 } is now active (read-write)
17954 Sep 22 23:21:37.880 INFO Crucible Version: Crucible Version: 0.0.1
17955 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17956 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17957 rustc: 1.70.0 stable x86_64-unknown-illumos
17958 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17959 Sep 22 23:21:37.880 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17960 Sep 22 23:21:37.880 INFO Using address: 127.0.0.1:34169, task: main
17961 The guest has finished waiting for activation
17962 Sep 22 23:21:37.880 DEBG up_ds_listen checked 1 jobs, back to waiting
17963 Sep 22 23:21:37.881 INFO [0] downstairs client at 127.0.0.1:64599 has UUID c731e991-4a16-42af-828a-390914aa5a24
17964 Sep 22 23:21:37.881 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c731e991-4a16-42af-828a-390914aa5a24, encrypted: true, database_read_version: 1, database_write_version: 1 }
17965 Sep 22 23:21:37.881 INFO a334e2c5-02d5-47e7-a7d4-82e25a58ab9c WaitActive WaitActive WaitActive
17966 Sep 22 23:21:37.881 INFO Repair listens on 127.0.0.1:0, task: repair
17967 Sep 22 23:21:37.881 INFO [1] downstairs client at 127.0.0.1:62465 has UUID e615c980-e0be-48be-967d-b06ac35abc56
17968 Sep 22 23:21:37.881 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e615c980-e0be-48be-967d-b06ac35abc56, encrypted: true, database_read_version: 1, database_write_version: 1 }
17969 Sep 22 23:21:37.881 INFO a334e2c5-02d5-47e7-a7d4-82e25a58ab9c WaitActive WaitActive WaitActive
17970 Sep 22 23:21:37.881 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36924, task: repair
17971 Sep 22 23:21:37.881 INFO [2] downstairs client at 127.0.0.1:56505 has UUID 8e6d336f-2489-4c93-9267-791b6b43d94e
17972 Sep 22 23:21:37.881 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 8e6d336f-2489-4c93-9267-791b6b43d94e, encrypted: true, database_read_version: 1, database_write_version: 1 }
17973 Sep 22 23:21:37.881 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36924, task: repair
17974 Sep 22 23:21:37.881 INFO a334e2c5-02d5-47e7-a7d4-82e25a58ab9c WaitActive WaitActive WaitActive
17975 Sep 22 23:21:37.881 INFO listening, local_addr: 127.0.0.1:36924, task: repair
17976 Sep 22 23:21:37.881 DEBG IO Read 1000 has deps []
17977 Sep 22 23:21:37.881 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36924, task: repair
17978 Sep 22 23:21:37.881 INFO Current flush_numbers [0..12]: [0, 0]
17979 Sep 22 23:21:37.881 INFO Using repair address: 127.0.0.1:36924, task: main
17980 Sep 22 23:21:37.881 INFO No SSL acceptor configured, task: main
17981 Sep 22 23:21:37.881 DEBG Read :1000 deps:[] res:true
17982 Sep 22 23:21:37.881 INFO Downstairs has completed Negotiation, task: proc
17983 Sep 22 23:21:37.881 INFO current number of open files limit 65536 is already the maximum
17984 Sep 22 23:21:37.881 INFO Current flush_numbers [0..12]: [0, 0]
17985 Sep 22 23:21:37.881 INFO Created new region file "/tmp/downstairs-gpKNfyva/region.json"
17986 Sep 22 23:21:37.881 INFO Downstairs has completed Negotiation, task: proc
17987 Sep 22 23:21:37.882 INFO Current flush_numbers [0..12]: [0, 0]
17988 Sep 22 23:21:37.882 DEBG Read :1000 deps:[] res:true
17989 Sep 22 23:21:37.882 DEBG Read :1000 deps:[] res:true
17990 Sep 22 23:21:37.882 INFO Downstairs has completed Negotiation, task: proc
17991 Sep 22 23:21:37.882 INFO [0] a334e2c5-02d5-47e7-a7d4-82e25a58ab9c (71f2b629-5d8b-450e-858e-3d0aaedbefc8) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
17992 Sep 22 23:21:37.882 INFO [0] Transition from WaitActive to WaitQuorum
17993 Sep 22 23:21:37.882 WARN [0] new RM replaced this: None
17994 Sep 22 23:21:37.882 DEBG Read :1000 deps:[] res:true
17995 Sep 22 23:21:37.882 INFO [0] Starts reconcile loop
17996 Sep 22 23:21:37.882 INFO [1] a334e2c5-02d5-47e7-a7d4-82e25a58ab9c (71f2b629-5d8b-450e-858e-3d0aaedbefc8) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
17997 Sep 22 23:21:37.882 INFO [1] Transition from WaitActive to WaitQuorum
17998 Sep 22 23:21:37.882 WARN [1] new RM replaced this: None
17999 Sep 22 23:21:37.882 INFO [1] Starts reconcile loop
18000 Sep 22 23:21:37.882 INFO [2] a334e2c5-02d5-47e7-a7d4-82e25a58ab9c (71f2b629-5d8b-450e-858e-3d0aaedbefc8) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
18001 Sep 22 23:21:37.882 INFO [2] Transition from WaitActive to WaitQuorum
18002 Sep 22 23:21:37.883 WARN [2] new RM replaced this: None
18003 Sep 22 23:21:37.883 INFO [2] Starts reconcile loop
18004 Sep 22 23:21:37.883 DEBG Read :1000 deps:[] res:true
18005 Sep 22 23:21:37.883 INFO [0] 127.0.0.1:64599 task reports connection:true
18006 Sep 22 23:21:37.883 INFO a334e2c5-02d5-47e7-a7d4-82e25a58ab9c WaitQuorum WaitQuorum WaitQuorum
18007 Sep 22 23:21:37.883 DEBG Read :1000 deps:[] res:true
18008 Sep 22 23:21:37.883 INFO [0]R flush_numbers: [0, 0]
18009 Sep 22 23:21:37.883 INFO [0]R generation: [0, 0]
18010 Sep 22 23:21:37.883 INFO [0]R dirty: [false, false]
18011 Sep 22 23:21:37.883 INFO [1]R flush_numbers: [0, 0]
18012 Sep 22 23:21:37.883 INFO [1]R generation: [0, 0]
18013 Sep 22 23:21:37.883 INFO [1]R dirty: [false, false]
18014 Sep 22 23:21:37.883 INFO [2]R flush_numbers: [0, 0]
18015 Sep 22 23:21:37.883 INFO [2]R generation: [0, 0]
18016 Sep 22 23:21:37.883 INFO [2]R dirty: [false, false]
18017 Sep 22 23:21:37.883 INFO Max found gen is 1
18018 Sep 22 23:21:37.883 INFO Generation requested: 1 >= found:1
18019 Sep 22 23:21:37.883 INFO Next flush: 1
18020 Sep 22 23:21:37.883 INFO All extents match
18021 Sep 22 23:21:37.883 INFO No downstairs repair required
18022 Sep 22 23:21:37.883 INFO No initial repair work was required
18023 Sep 22 23:21:37.883 INFO Set Downstairs and Upstairs active
18024 Sep 22 23:21:37.883 INFO a334e2c5-02d5-47e7-a7d4-82e25a58ab9c is now active with session: 71f2b629-5d8b-450e-858e-3d0aaedbefc8
18025 Sep 22 23:21:37.883 DEBG [0] Read already AckReady 1002, : downstairs
18026 Sep 22 23:21:37.883 INFO a334e2c5-02d5-47e7-a7d4-82e25a58ab9c Set Active after no repair
18027 Sep 22 23:21:37.883 INFO Notify all downstairs, region set compare is done.
18028 Sep 22 23:21:37.883 INFO Set check for repair
18029 Sep 22 23:21:37.883 INFO [1] 127.0.0.1:62465 task reports connection:true
18030 Sep 22 23:21:37.883 INFO a334e2c5-02d5-47e7-a7d4-82e25a58ab9c Active Active Active
18031 Sep 22 23:21:37.883 INFO Set check for repair
18032 Sep 22 23:21:37.883 INFO [2] 127.0.0.1:56505 task reports connection:true
18033 Sep 22 23:21:37.883 INFO a334e2c5-02d5-47e7-a7d4-82e25a58ab9c Active Active Active
18034 Sep 22 23:21:37.883 INFO Set check for repair
18035 Sep 22 23:21:37.883 INFO [0] received reconcile message
18036 Sep 22 23:21:37.883 INFO [0] All repairs completed, exit
18037 Sep 22 23:21:37.883 INFO [0] Starts cmd_loop
18038 Sep 22 23:21:37.883 INFO [1] received reconcile message
18039 Sep 22 23:21:37.883 INFO [1] All repairs completed, exit
18040 Sep 22 23:21:37.883 INFO [1] Starts cmd_loop
18041 Sep 22 23:21:37.883 INFO [2] received reconcile message
18042 Sep 22 23:21:37.884 INFO [2] All repairs completed, exit
18043 Sep 22 23:21:37.884 INFO [2] Starts cmd_loop
18044 Sep 22 23:21:37.884 DEBG [2] Read AckReady 1000, : downstairs
18045 The guest has finished waiting for activation
18046 Sep 22 23:21:37.884 DEBG [0] Read already AckReady 1000, : downstairs
18047 Sep 22 23:21:37.884 DEBG IO Read 1000 has deps []
18048 Sep 22 23:21:37.884 DEBG [1] Read already AckReady 1000, : downstairs
18049 Sep 22 23:21:37.884 DEBG up_ds_listen was notified
18050 Sep 22 23:21:37.884 DEBG up_ds_listen process 1000
18051 Sep 22 23:21:37.884 DEBG [A] ack job 1000:1, : downstairs
18052 Sep 22 23:21:37.885 DEBG up_ds_listen checked 1 jobs, back to waiting
18053 Sep 22 23:21:37.885 DEBG [1] Read already AckReady 1002, : downstairs
18054 Sep 22 23:21:37.885 DEBG up_ds_listen was notified
18055 Sep 22 23:21:37.885 DEBG up_ds_listen process 1002
18056 Sep 22 23:21:37.885 DEBG [A] ack job 1002:3, : downstairs
18057 Sep 22 23:21:37.885 DEBG Read :1000 deps:[] res:true
18058 Sep 22 23:21:37.885 DEBG up_ds_listen checked 1 jobs, back to waiting
18059 Sep 22 23:21:37.886 INFO current number of open files limit 65536 is already the maximum
18060 Sep 22 23:21:37.886 INFO Opened existing region file "/tmp/downstairs-gpKNfyva/region.json"
18061 Sep 22 23:21:37.886 INFO Database read version 1
18062 Sep 22 23:21:37.886 INFO Database write version 1
18063 Sep 22 23:21:37.886 DEBG Read :1000 deps:[] res:true
18064 test test::integration_test_three_layers ... ok
18065 Sep 22 23:21:37.886 DEBG Read :1000 deps:[] res:true
18066 Sep 22 23:21:37.886 INFO current number of open files limit 65536 is already the maximum
18067 Sep 22 23:21:37.886 INFO Created new region file "/tmp/downstairs-0tv4fhPa/region.json"
18068 Sep 22 23:21:37.887 INFO UUID: 814ff37e-28f4-46b8-bc55-126a66e61fb7
18069 Sep 22 23:21:37.887 INFO Blocks per extent:5 Total Extents: 2
18070 Sep 22 23:21:37.887 DEBG [1] Read AckReady 1000, : downstairs
18071 Sep 22 23:21:37.887 INFO Crucible Version: Crucible Version: 0.0.1
18072 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18073 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18074 rustc: 1.70.0 stable x86_64-unknown-illumos
18075 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18076 Sep 22 23:21:37.887 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18077 Sep 22 23:21:37.887 INFO Using address: 127.0.0.1:51872, task: main
18078 Sep 22 23:21:37.887 INFO Repair listens on 127.0.0.1:0, task: repair
18079 Sep 22 23:21:37.887 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52092, task: repair
18080 Sep 22 23:21:37.887 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52092, task: repair
18081 Sep 22 23:21:37.887 DEBG [0] Read AckReady 1000, : downstairs
18082 Sep 22 23:21:37.887 INFO listening, local_addr: 127.0.0.1:52092, task: repair
18083 Sep 22 23:21:37.887 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52092, task: repair
18084 Sep 22 23:21:37.887 DEBG [1] Read already AckReady 1000, : downstairs
18085 Sep 22 23:21:37.887 INFO Using repair address: 127.0.0.1:52092, task: main
18086 Sep 22 23:21:37.888 INFO No SSL acceptor configured, task: main
18087 Sep 22 23:21:37.888 DEBG [2] Read already AckReady 1000, : downstairs
18088 Sep 22 23:21:37.888 DEBG up_ds_listen was notified
18089 Sep 22 23:21:37.888 DEBG up_ds_listen process 1000
18090 Sep 22 23:21:37.888 DEBG [A] ack job 1000:1, : downstairs
18091 Sep 22 23:21:37.888 DEBG up_ds_listen checked 1 jobs, back to waiting
18092 Sep 22 23:21:37.888 DEBG IO Write 1001 has deps [JobId(1000)]
18093 Sep 22 23:21:37.889 DEBG [2] Read already AckReady 1000, : downstairs
18094 test test::integration_test_two_layers ... ok
18095 Sep 22 23:21:37.890 INFO Upstairs starts
18096 Sep 22 23:21:37.890 INFO Crucible Version: BuildInfo {
18097 version: "0.0.1",
18098 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
18099 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
18100 git_branch: "main",
18101 rustc_semver: "1.70.0",
18102 rustc_channel: "stable",
18103 rustc_host_triple: "x86_64-unknown-illumos",
18104 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
18105 cargo_triple: "x86_64-unknown-illumos",
18106 debug: true,
18107 opt_level: 0,
18108 }
18109 Sep 22 23:21:37.890 INFO Upstairs <-> Downstairs Message Version: 4
18110 Sep 22 23:21:37.890 INFO Crucible stats registered with UUID: 9062e12f-0869-4fd7-b0b2-a9dea3922b3a
18111 Sep 22 23:21:37.890 INFO Crucible 9062e12f-0869-4fd7-b0b2-a9dea3922b3a has session id: 3b2b1948-9af4-49ad-a2ba-c5c3035b4dc4
18112 Sep 22 23:21:37.890 INFO current number of open files limit 65536 is already the maximum
18113 Sep 22 23:21:37.890 INFO listening on 127.0.0.1:0, task: main
18114 Sep 22 23:21:37.890 INFO Created new region file "/tmp/downstairs-HAffqMEZ/region.json"
18115 Sep 22 23:21:37.890 INFO listening on 127.0.0.1:0, task: main
18116 Sep 22 23:21:37.890 INFO listening on 127.0.0.1:0, task: main
18117 Sep 22 23:21:37.890 INFO [0] connecting to 127.0.0.1:65200, looper: 0
18118 Sep 22 23:21:37.890 INFO [1] connecting to 127.0.0.1:34169, looper: 1
18119 Sep 22 23:21:37.890 INFO [2] connecting to 127.0.0.1:51872, looper: 2
18120 Sep 22 23:21:37.891 INFO current number of open files limit 65536 is already the maximum
18121 Sep 22 23:21:37.891 INFO up_listen starts, task: up_listen
18122 Sep 22 23:21:37.891 INFO Opened existing region file "/tmp/downstairs-0tv4fhPa/region.json"
18123 Sep 22 23:21:37.891 INFO Database read version 1
18124 Sep 22 23:21:37.891 INFO Database write version 1
18125 Sep 22 23:21:37.891 INFO Wait for all three downstairs to come online
18126 Sep 22 23:21:37.891 INFO Flush timeout: 0.5
18127 Sep 22 23:21:37.891 DEBG IO Write 1001 has deps [JobId(1000)]
18128 Sep 22 23:21:37.891 DEBG up_ds_listen was notified
18129 Sep 22 23:21:37.891 DEBG up_ds_listen process 1001
18130 Sep 22 23:21:37.891 INFO [0] 9062e12f-0869-4fd7-b0b2-a9dea3922b3a looper connected, looper: 0
18131 Sep 22 23:21:37.891 DEBG [A] ack job 1001:2, : downstairs
18132 Sep 22 23:21:37.891 INFO [0] Proc runs for 127.0.0.1:65200 in state New
18133 Sep 22 23:21:37.891 DEBG up_ds_listen checked 1 jobs, back to waiting
18134 Sep 22 23:21:37.891 INFO accepted connection from 127.0.0.1:43906, task: main
18135 Sep 22 23:21:37.891 INFO [1] 9062e12f-0869-4fd7-b0b2-a9dea3922b3a looper connected, looper: 1
18136 Sep 22 23:21:37.891 INFO [1] Proc runs for 127.0.0.1:34169 in state New
18137 Sep 22 23:21:37.891 INFO [2] 9062e12f-0869-4fd7-b0b2-a9dea3922b3a looper connected, looper: 2
18138 Sep 22 23:21:37.891 INFO [2] Proc runs for 127.0.0.1:51872 in state New
18139 Sep 22 23:21:37.891 INFO accepted connection from 127.0.0.1:39800, task: main
18140 Sep 22 23:21:37.892 INFO accepted connection from 127.0.0.1:53303, task: main
18141 Sep 22 23:21:37.892 INFO Connection request from 9062e12f-0869-4fd7-b0b2-a9dea3922b3a with version 4, task: proc
18142 Sep 22 23:21:37.892 INFO upstairs UpstairsConnection { upstairs_id: 9062e12f-0869-4fd7-b0b2-a9dea3922b3a, session_id: 2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf, gen: 1 } connected, version 4, task: proc
18143 Sep 22 23:21:37.892 INFO Connection request from 9062e12f-0869-4fd7-b0b2-a9dea3922b3a with version 4, task: proc
18144 Sep 22 23:21:37.892 DEBG [0] Read already AckReady 1000, : downstairs
18145 Sep 22 23:21:37.892 INFO upstairs UpstairsConnection { upstairs_id: 9062e12f-0869-4fd7-b0b2-a9dea3922b3a, session_id: 2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf, gen: 1 } connected, version 4, task: proc
18146 Sep 22 23:21:37.892 DEBG up_ds_listen was notified
18147 Sep 22 23:21:37.892 DEBG up_ds_listen process 1000
18148 Sep 22 23:21:37.892 INFO Connection request from 9062e12f-0869-4fd7-b0b2-a9dea3922b3a with version 4, task: proc
18149 Sep 22 23:21:37.892 DEBG [A] ack job 1000:1, : downstairs
18150 Sep 22 23:21:37.892 INFO upstairs UpstairsConnection { upstairs_id: 9062e12f-0869-4fd7-b0b2-a9dea3922b3a, session_id: 2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf, gen: 1 } connected, version 4, task: proc
18151 The guest has requested activation
18152 Sep 22 23:21:37.892 INFO [0] 9062e12f-0869-4fd7-b0b2-a9dea3922b3a (2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf) New New New ds_transition to WaitActive
18153 Sep 22 23:21:37.892 INFO [0] Transition from New to WaitActive
18154 Sep 22 23:21:37.892 INFO UUID: 08d72c33-1ae3-4984-93cb-e5924e7608cb
18155 Sep 22 23:21:37.892 INFO Blocks per extent:5 Total Extents: 2
18156 Sep 22 23:21:37.892 INFO [1] 9062e12f-0869-4fd7-b0b2-a9dea3922b3a (2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf) WaitActive New New ds_transition to WaitActive
18157 Sep 22 23:21:37.892 INFO [1] Transition from New to WaitActive
18158 Sep 22 23:21:37.892 DEBG up_ds_listen checked 1 jobs, back to waiting
18159 Sep 22 23:21:37.893 INFO Crucible Version: Crucible Version: 0.0.1
18160 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18161 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18162 rustc: 1.70.0 stable x86_64-unknown-illumos
18163 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18164 Sep 22 23:21:37.893 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18165 Sep 22 23:21:37.893 INFO Using address: 127.0.0.1:50727, task: main
18166 Sep 22 23:21:37.893 INFO [2] 9062e12f-0869-4fd7-b0b2-a9dea3922b3a (2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf) WaitActive WaitActive New ds_transition to WaitActive
18167 Sep 22 23:21:37.893 INFO [2] Transition from New to WaitActive
18168 Sep 22 23:21:37.893 INFO 9062e12f-0869-4fd7-b0b2-a9dea3922b3a active request set
18169 Sep 22 23:21:37.893 INFO [0] received activate with gen 1
18170 Sep 22 23:21:37.893 INFO [0] client got ds_active_rx, promote! session 2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf
18171 Sep 22 23:21:37.893 INFO [1] received activate with gen 1
18172 Sep 22 23:21:37.893 INFO [1] client got ds_active_rx, promote! session 2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf
18173 Sep 22 23:21:37.893 DEBG up_ds_listen was notified
18174 Sep 22 23:21:37.893 INFO Repair listens on 127.0.0.1:0, task: repair
18175 Sep 22 23:21:37.893 DEBG up_ds_listen process 1001
18176 Sep 22 23:21:37.893 DEBG [A] ack job 1001:2, : downstairs
18177 Sep 22 23:21:37.893 DEBG IO Flush 1001 has deps [JobId(1000)]
18178 Sep 22 23:21:37.893 INFO [2] received activate with gen 1
18179 Sep 22 23:21:37.893 DEBG up_ds_listen checked 1 jobs, back to waiting
18180 Sep 22 23:21:37.893 INFO [2] client got ds_active_rx, promote! session 2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf
18181 Sep 22 23:21:37.893 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:47905, task: repair
18182 Sep 22 23:21:37.893 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:47905, task: repair
18183 Sep 22 23:21:37.893 INFO listening, local_addr: 127.0.0.1:47905, task: repair
18184 Sep 22 23:21:37.893 INFO UpstairsConnection { upstairs_id: 9062e12f-0869-4fd7-b0b2-a9dea3922b3a, session_id: 2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf, gen: 1 } is now active (read-write)
18185 Sep 22 23:21:37.893 DEBG IO Read 1002 has deps [JobId(1001)]
18186 Sep 22 23:21:37.893 INFO UpstairsConnection { upstairs_id: 9062e12f-0869-4fd7-b0b2-a9dea3922b3a, session_id: 2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf, gen: 1 } is now active (read-write)
18187 Sep 22 23:21:37.893 INFO current number of open files limit 65536 is already the maximum
18188 Sep 22 23:21:37.893 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:47905, task: repair
18189 Sep 22 23:21:37.893 INFO Opened existing region file "/tmp/downstairs-HAffqMEZ/region.json"
18190 Sep 22 23:21:37.893 INFO Database read version 1
18191 Sep 22 23:21:37.893 INFO Database write version 1
18192 Sep 22 23:21:37.893 INFO Using repair address: 127.0.0.1:47905, task: main
18193 Sep 22 23:21:37.893 INFO No SSL acceptor configured, task: main
18194 Sep 22 23:21:37.893 INFO UpstairsConnection { upstairs_id: 9062e12f-0869-4fd7-b0b2-a9dea3922b3a, session_id: 2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf, gen: 1 } is now active (read-write)
18195 Sep 22 23:21:37.894 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:2 g:2
18196 Sep 22 23:21:37.894 INFO current number of open files limit 65536 is already the maximum
18197 Sep 22 23:21:37.894 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:2 g:2
18198 Sep 22 23:21:37.894 INFO Created new region file "/tmp/downstairs-lpBbEHsY/region.json"
18199 Sep 22 23:21:37.894 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:2 g:2
18200 Sep 22 23:21:37.894 INFO [0] downstairs client at 127.0.0.1:65200 has UUID f146ddc8-437e-48f6-aa56-398321fa6a78
18201 Sep 22 23:21:37.894 DEBG Read :1002 deps:[JobId(1001)] res:true
18202 Sep 22 23:21:37.894 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f146ddc8-437e-48f6-aa56-398321fa6a78, encrypted: true, database_read_version: 1, database_write_version: 1 }
18203 Sep 22 23:21:37.894 INFO 9062e12f-0869-4fd7-b0b2-a9dea3922b3a WaitActive WaitActive WaitActive
18204 Sep 22 23:21:37.894 INFO [1] downstairs client at 127.0.0.1:34169 has UUID 4cf53139-503c-4a75-a5fe-71758d156435
18205 Sep 22 23:21:37.894 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 4cf53139-503c-4a75-a5fe-71758d156435, encrypted: true, database_read_version: 1, database_write_version: 1 }
18206 Sep 22 23:21:37.894 DEBG Write :1001 deps:[JobId(1000)] res:true
18207 Sep 22 23:21:37.894 INFO 9062e12f-0869-4fd7-b0b2-a9dea3922b3a WaitActive WaitActive WaitActive
18208 Sep 22 23:21:37.894 DEBG up_ds_listen was notified
18209 Sep 22 23:21:37.894 DEBG Read :1002 deps:[JobId(1001)] res:true
18210 Sep 22 23:21:37.894 INFO [2] downstairs client at 127.0.0.1:51872 has UUID 814ff37e-28f4-46b8-bc55-126a66e61fb7
18211 Sep 22 23:21:37.894 DEBG up_ds_listen process 1001
18212 Sep 22 23:21:37.894 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 814ff37e-28f4-46b8-bc55-126a66e61fb7, encrypted: true, database_read_version: 1, database_write_version: 1 }
18213 Sep 22 23:21:37.894 DEBG [A] ack job 1001:2, : downstairs
18214 Sep 22 23:21:37.894 INFO 9062e12f-0869-4fd7-b0b2-a9dea3922b3a WaitActive WaitActive WaitActive
18215 Sep 22 23:21:37.894 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
18216 Sep 22 23:21:37.894 DEBG up_ds_listen checked 1 jobs, back to waiting
18217 Sep 22 23:21:37.894 INFO current number of open files limit 65536 is already the maximum
18218 Sep 22 23:21:37.894 INFO Current flush_numbers [0..12]: [0, 0]
18219 Sep 22 23:21:37.894 DEBG Read :1002 deps:[JobId(1001)] res:true
18220 Sep 22 23:21:37.895 INFO Created new region file "/tmp/downstairs-n0ieH1oY/region.json"
18221 Sep 22 23:21:37.895 INFO Downstairs has completed Negotiation, task: proc
18222 Sep 22 23:21:37.895 DEBG Write :1001 deps:[JobId(1000)] res:true
18223 Sep 22 23:21:37.895 INFO Current flush_numbers [0..12]: [0, 0]
18224 Sep 22 23:21:37.895 INFO Downstairs has completed Negotiation, task: proc
18225 Sep 22 23:21:37.896 INFO Current flush_numbers [0..12]: [0, 0]
18226 Sep 22 23:21:37.896 INFO UUID: d300473c-ae2a-4754-900f-58509b1b9961
18227 Sep 22 23:21:37.896 INFO Blocks per extent:5 Total Extents: 2
18228 Sep 22 23:21:37.896 INFO Crucible Version: Crucible Version: 0.0.1
18229 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18230 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18231 rustc: 1.70.0 stable x86_64-unknown-illumos
18232 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18233 Sep 22 23:21:37.896 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18234 Sep 22 23:21:37.896 INFO Downstairs has completed Negotiation, task: proc
18235 Sep 22 23:21:37.896 INFO Using address: 127.0.0.1:33968, task: main
18236 Sep 22 23:21:37.896 DEBG Write :1001 deps:[JobId(1000)] res:true
18237 Sep 22 23:21:37.896 INFO [0] 9062e12f-0869-4fd7-b0b2-a9dea3922b3a (2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
18238 Sep 22 23:21:37.896 INFO [0] Transition from WaitActive to WaitQuorum
18239 Sep 22 23:21:37.896 WARN [0] new RM replaced this: None
18240 Sep 22 23:21:37.896 INFO [0] Starts reconcile loop
18241 Sep 22 23:21:37.896 INFO [1] 9062e12f-0869-4fd7-b0b2-a9dea3922b3a (2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
18242 Sep 22 23:21:37.896 INFO [1] Transition from WaitActive to WaitQuorum
18243 Sep 22 23:21:37.896 DEBG IO Read 1002 has deps [JobId(1001)]
18244 Sep 22 23:21:37.896 INFO Repair listens on 127.0.0.1:0, task: repair
18245 Sep 22 23:21:37.896 WARN [1] new RM replaced this: None
18246 Sep 22 23:21:37.896 INFO [1] Starts reconcile loop
18247 Sep 22 23:21:37.896 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:34919, task: repair
18248 Sep 22 23:21:37.896 INFO [2] 9062e12f-0869-4fd7-b0b2-a9dea3922b3a (2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
18249 Sep 22 23:21:37.896 INFO [2] Transition from WaitActive to WaitQuorum
18250 Sep 22 23:21:37.896 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:34919, task: repair
18251 Sep 22 23:21:37.896 WARN [2] new RM replaced this: None
18252 Sep 22 23:21:37.896 INFO [2] Starts reconcile loop
18253 Sep 22 23:21:37.896 INFO listening, local_addr: 127.0.0.1:34919, task: repair
18254 Sep 22 23:21:37.896 INFO [0] 127.0.0.1:65200 task reports connection:true
18255 Sep 22 23:21:37.896 INFO 9062e12f-0869-4fd7-b0b2-a9dea3922b3a WaitQuorum WaitQuorum WaitQuorum
18256 Sep 22 23:21:37.896 INFO [0]R flush_numbers: [0, 0]
18257 Sep 22 23:21:37.896 INFO [0]R generation: [0, 0]
18258 Sep 22 23:21:37.897 INFO [0]R dirty: [false, false]
18259 Sep 22 23:21:37.897 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:34919, task: repair
18260 Sep 22 23:21:37.897 INFO [1]R flush_numbers: [0, 0]
18261 Sep 22 23:21:37.897 INFO [1]R generation: [0, 0]
18262 Sep 22 23:21:37.897 INFO [1]R dirty: [false, false]
18263 Sep 22 23:21:37.897 INFO Using repair address: 127.0.0.1:34919, task: main
18264 Sep 22 23:21:37.897 INFO [2]R flush_numbers: [0, 0]
18265 Sep 22 23:21:37.897 INFO No SSL acceptor configured, task: main
18266 Sep 22 23:21:37.897 INFO [2]R generation: [0, 0]
18267 Sep 22 23:21:37.897 INFO [2]R dirty: [false, false]
18268 Sep 22 23:21:37.897 INFO Max found gen is 1
18269 Sep 22 23:21:37.897 INFO Generation requested: 1 >= found:1
18270 Sep 22 23:21:37.897 INFO Next flush: 1
18271 Sep 22 23:21:37.897 INFO All extents match
18272 Sep 22 23:21:37.897 INFO No downstairs repair required
18273 Sep 22 23:21:37.897 INFO No initial repair work was required
18274 Sep 22 23:21:37.897 INFO Set Downstairs and Upstairs active
18275 Sep 22 23:21:37.897 INFO 9062e12f-0869-4fd7-b0b2-a9dea3922b3a is now active with session: 2a9cbe7b-00ff-4e9a-a06a-da8422b4d8bf
18276 Sep 22 23:21:37.897 INFO 9062e12f-0869-4fd7-b0b2-a9dea3922b3a Set Active after no repair
18277 Sep 22 23:21:37.897 INFO Notify all downstairs, region set compare is done.
18278 Sep 22 23:21:37.897 INFO Set check for repair
18279 Sep 22 23:21:37.897 INFO [1] 127.0.0.1:34169 task reports connection:true
18280 Sep 22 23:21:37.897 INFO 9062e12f-0869-4fd7-b0b2-a9dea3922b3a Active Active Active
18281 Sep 22 23:21:37.897 INFO current number of open files limit 65536 is already the maximum
18282 Sep 22 23:21:37.897 INFO Set check for repair
18283 Sep 22 23:21:37.897 INFO [2] 127.0.0.1:51872 task reports connection:true
18284 Sep 22 23:21:37.897 INFO 9062e12f-0869-4fd7-b0b2-a9dea3922b3a Active Active Active
18285 Sep 22 23:21:37.897 INFO Created new region file "/tmp/downstairs-KmXVVDA0/region.json"
18286 Sep 22 23:21:37.897 INFO Set check for repair
18287 Sep 22 23:21:37.897 INFO [0] received reconcile message
18288 Sep 22 23:21:37.897 INFO [0] All repairs completed, exit
18289 Sep 22 23:21:37.897 INFO [0] Starts cmd_loop
18290 Sep 22 23:21:37.897 INFO [1] received reconcile message
18291 Sep 22 23:21:37.897 INFO [1] All repairs completed, exit
18292 Sep 22 23:21:37.897 DEBG Read :1002 deps:[JobId(1001)] res:true
18293 Sep 22 23:21:37.897 INFO [1] Starts cmd_loop
18294 Sep 22 23:21:37.897 INFO [2] received reconcile message
18295 Sep 22 23:21:37.897 INFO [2] All repairs completed, exit
18296 Sep 22 23:21:37.897 INFO [2] Starts cmd_loop
18297 Sep 22 23:21:37.897 DEBG [2] Read AckReady 1002, : downstairs
18298 The guest has finished waiting for activation
18299 Sep 22 23:21:37.898 DEBG Read :1002 deps:[JobId(1001)] res:true
18300 Sep 22 23:21:37.898 DEBG IO Read 1000 has deps []
18301 Sep 22 23:21:37.898 INFO current number of open files limit 65536 is already the maximum
18302 Sep 22 23:21:37.898 DEBG Read :1002 deps:[JobId(1001)] res:true
18303 Sep 22 23:21:37.898 INFO Opened existing region file "/tmp/downstairs-lpBbEHsY/region.json"
18304 Sep 22 23:21:37.898 INFO Database read version 1
18305 Sep 22 23:21:37.898 INFO Database write version 1
18306 Sep 22 23:21:37.899 DEBG Read :1000 deps:[] res:true
18307 Sep 22 23:21:37.899 DEBG Read :1000 deps:[] res:true
18308 Sep 22 23:21:37.899 DEBG Read :1000 deps:[] res:true
18309 Sep 22 23:21:37.900 INFO current number of open files limit 65536 is already the maximum
18310 Sep 22 23:21:37.900 INFO Opened existing region file "/tmp/downstairs-n0ieH1oY/region.json"
18311 Sep 22 23:21:37.900 INFO Database read version 1
18312 Sep 22 23:21:37.900 INFO Database write version 1
18313 Sep 22 23:21:37.900 DEBG [0] Read already AckReady 1002, : downstairs
18314 Sep 22 23:21:37.901 DEBG [0] Read AckReady 1000, : downstairs
18315 Sep 22 23:21:37.901 DEBG [1] Read already AckReady 1000, : downstairs
18316 Sep 22 23:21:37.901 INFO UUID: f233f1e5-b968-4e9a-959c-120f95afdd47
18317 Sep 22 23:21:37.901 INFO Blocks per extent:5 Total Extents: 2
18318 Sep 22 23:21:37.901 INFO Crucible Version: Crucible Version: 0.0.1
18319 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18320 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18321 rustc: 1.70.0 stable x86_64-unknown-illumos
18322 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18323 Sep 22 23:21:37.901 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18324 Sep 22 23:21:37.901 INFO Using address: 127.0.0.1:50019, task: main
18325 Sep 22 23:21:37.901 DEBG [2] Read already AckReady 1000, : downstairs
18326 Sep 22 23:21:37.901 DEBG up_ds_listen was notified
18327 Sep 22 23:21:37.901 DEBG up_ds_listen process 1000
18328 Sep 22 23:21:37.901 DEBG [A] ack job 1000:1, : downstairs
18329 Sep 22 23:21:37.901 INFO Repair listens on 127.0.0.1:0, task: repair
18330 Sep 22 23:21:37.901 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38365, task: repair
18331 Sep 22 23:21:37.901 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38365, task: repair
18332 Sep 22 23:21:37.902 INFO listening, local_addr: 127.0.0.1:38365, task: repair
18333 Sep 22 23:21:37.902 DEBG up_ds_listen checked 1 jobs, back to waiting
18334 Sep 22 23:21:37.902 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38365, task: repair
18335 Sep 22 23:21:37.902 INFO Using repair address: 127.0.0.1:38365, task: main
18336 Sep 22 23:21:37.902 INFO No SSL acceptor configured, task: main
18337 Sep 22 23:21:37.902 INFO UUID: e1b44f76-91da-4986-a06d-4f047931e7eb
18338 Sep 22 23:21:37.902 INFO Blocks per extent:5 Total Extents: 2
18339 Sep 22 23:21:37.902 INFO Crucible Version: Crucible Version: 0.0.1
18340 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18341 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18342 rustc: 1.70.0 stable x86_64-unknown-illumos
18343 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18344 Sep 22 23:21:37.902 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18345 Sep 22 23:21:37.902 INFO Using address: 127.0.0.1:51174, task: main
18346 Sep 22 23:21:37.902 INFO current number of open files limit 65536 is already the maximum
18347 Sep 22 23:21:37.902 INFO current number of open files limit 65536 is already the maximum
18348 Sep 22 23:21:37.902 INFO Opened existing region file "/tmp/downstairs-KmXVVDA0/region.json"
18349 Sep 22 23:21:37.902 INFO Database read version 1
18350 Sep 22 23:21:37.902 INFO Database write version 1
18351 Sep 22 23:21:37.902 INFO Created new region file "/tmp/downstairs-hTrUSJqe/region.json"
18352 Sep 22 23:21:37.902 DEBG [0] Read AckReady 1002, : downstairs
18353 Sep 22 23:21:37.902 INFO Repair listens on 127.0.0.1:0, task: repair
18354 Sep 22 23:21:37.902 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:40366, task: repair
18355 Sep 22 23:21:37.902 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:40366, task: repair
18356 Sep 22 23:21:37.902 INFO listening, local_addr: 127.0.0.1:40366, task: repair
18357 Sep 22 23:21:37.903 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:40366, task: repair
18358 Sep 22 23:21:37.903 INFO Using repair address: 127.0.0.1:40366, task: main
18359 Sep 22 23:21:37.903 INFO No SSL acceptor configured, task: main
18360 Sep 22 23:21:37.903 DEBG [1] Read already AckReady 1002, : downstairs
18361 Sep 22 23:21:37.903 DEBG up_ds_listen was notified
18362 Sep 22 23:21:37.903 DEBG up_ds_listen process 1002
18363 Sep 22 23:21:37.903 DEBG [A] ack job 1002:3, : downstairs
18364 Sep 22 23:21:37.903 INFO current number of open files limit 65536 is already the maximum
18365 Sep 22 23:21:37.903 DEBG up_ds_listen checked 1 jobs, back to waiting
18366 Sep 22 23:21:37.903 INFO Created new region file "/tmp/downstairs-QR6s6qBv/region.json"
18367 Sep 22 23:21:37.904 DEBG [1] Read already AckReady 1002, : downstairs
18368 Sep 22 23:21:37.905 INFO UUID: 84d77caf-cd99-479d-b329-65e270f8e6d5
18369 Sep 22 23:21:37.905 INFO Blocks per extent:5 Total Extents: 2
18370 Sep 22 23:21:37.905 INFO Crucible Version: Crucible Version: 0.0.1
18371 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18372 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18373 rustc: 1.70.0 stable x86_64-unknown-illumos
18374 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18375 Sep 22 23:21:37.905 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18376 Sep 22 23:21:37.905 INFO Using address: 127.0.0.1:40280, task: main
18377 Sep 22 23:21:37.905 INFO Repair listens on 127.0.0.1:0, task: repair
18378 Sep 22 23:21:37.905 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:41802, task: repair
18379 Sep 22 23:21:37.905 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:41802, task: repair
18380 Sep 22 23:21:37.905 INFO listening, local_addr: 127.0.0.1:41802, task: repair
18381 Sep 22 23:21:37.906 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:41802, task: repair
18382 Sep 22 23:21:37.906 INFO Using repair address: 127.0.0.1:41802, task: main
18383 Sep 22 23:21:37.906 INFO No SSL acceptor configured, task: main
18384 Sep 22 23:21:37.906 DEBG IO Write 1001 has deps [JobId(1000)]
18385 Sep 22 23:21:37.906 DEBG [2] Read already AckReady 1002, : downstairs
18386 Sep 22 23:21:37.906 INFO current number of open files limit 65536 is already the maximum
18387 Sep 22 23:21:37.906 DEBG up_ds_listen was notified
18388 Sep 22 23:21:37.906 DEBG up_ds_listen process 1002
18389 Sep 22 23:21:37.906 DEBG [A] ack job 1002:3, : downstairs
18390 Sep 22 23:21:37.906 INFO Created new region file "/tmp/downstairs-sbyjS0xN/region.json"
18391 Sep 22 23:21:37.906 DEBG up_ds_listen checked 1 jobs, back to waiting
18392 Sep 22 23:21:37.907 INFO current number of open files limit 65536 is already the maximum
18393 Sep 22 23:21:37.907 INFO Opened existing region file "/tmp/downstairs-hTrUSJqe/region.json"
18394 Sep 22 23:21:37.907 INFO Database read version 1
18395 Sep 22 23:21:37.907 INFO Database write version 1
18396 test test::integration_test_two_layers_parent_smaller ... ok
18397 Sep 22 23:21:37.910 INFO current number of open files limit 65536 is already the maximum
18398 Sep 22 23:21:37.910 INFO Created new region file "/tmp/downstairs-E6VpKmdW/region.json"
18399 Sep 22 23:21:37.910 INFO UUID: 5ad4cc94-cc5d-4745-8f62-94ec4413158f
18400 Sep 22 23:21:37.910 INFO Blocks per extent:5 Total Extents: 2
18401 Sep 22 23:21:37.910 INFO Crucible Version: Crucible Version: 0.0.1
18402 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18403 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18404 rustc: 1.70.0 stable x86_64-unknown-illumos
18405 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18406 Sep 22 23:21:37.910 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18407 Sep 22 23:21:37.910 INFO Using address: 127.0.0.1:35408, task: main
18408 Sep 22 23:21:37.911 INFO Repair listens on 127.0.0.1:0, task: repair
18409 Sep 22 23:21:37.911 INFO current number of open files limit 65536 is already the maximum
18410 Sep 22 23:21:37.911 INFO Opened existing region file "/tmp/downstairs-QR6s6qBv/region.json"
18411 Sep 22 23:21:37.911 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:54039, task: repair
18412 Sep 22 23:21:37.911 INFO Database read version 1
18413 Sep 22 23:21:37.911 INFO Database write version 1
18414 Sep 22 23:21:37.911 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:54039, task: repair
18415 Sep 22 23:21:37.911 INFO listening, local_addr: 127.0.0.1:54039, task: repair
18416 test test::integration_test_two_layers_parent_smaller_unwritten ... ok
18417 Sep 22 23:21:37.911 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:54039, task: repair
18418 Sep 22 23:21:37.911 INFO Using repair address: 127.0.0.1:54039, task: main
18419 Sep 22 23:21:37.911 INFO No SSL acceptor configured, task: main
18420 Sep 22 23:21:37.911 INFO current number of open files limit 65536 is already the maximum
18421 Sep 22 23:21:37.911 INFO Opened existing region file "/tmp/downstairs-sbyjS0xN/region.json"
18422 Sep 22 23:21:37.911 INFO Database read version 1
18423 Sep 22 23:21:37.911 INFO Database write version 1
18424 Sep 22 23:21:37.911 INFO current number of open files limit 65536 is already the maximum
18425 Sep 22 23:21:37.912 INFO Created new region file "/tmp/downstairs-HlXYD8jF/region.json"
184262023-09-22T23:21:37.912ZINFOcrucible: Upstairs starts
184272023-09-22T23:21:37.912ZINFOcrucible: Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
184282023-09-22T23:21:37.912ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4
18429 {"msg":"The guest has requested activation
18430 Crucible stats registered with UUID: 5890b7ae-9696-4b48-a320-1c616f83f794","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.912523208Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
18431 {"msg":"Crucible 5890b7ae-9696-4b48-a320-1c616f83f794 has session id: 697e29ca-b1d6-4e8f-9151-6ed6ab5a7f62","v":0,"name":"crucible","level":30,"time":"Sep 22 23:21:37.912 INFO listening on 127.0.0.1:0, task: main
18432 2023-09-22T23:21:37.91256345Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
18433 Sep 22 23:21:37.912 INFO listening on 127.0.0.1:0, task: main
18434 Sep 22 23:21:37.912 INFO listening on 127.0.0.1:0, task: main
184352023-09-22T23:21:37.912ZINFOcrucible: [0] connecting to 127.0.0.1:50727 looper = 0
184362023-09-22T23:21:37.912ZINFOcrucible: [1] connecting to 127.0.0.1:50019 looper = 1
18437 Sep 22 23:21:37.913 INFO UUID: fb1574ce-cb6c-41db-a1fc-e45b9d42517f
18438 {"msg":"Sep 22 23:21:37.913 INFO Blocks per extent:5 Total Extents: 2
18439 [2] connecting to 127.0.0.1:35408","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.91311039Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"2"}
18440 Sep 22 23:21:37.913 INFO Crucible Version: Crucible Version: 0.0.1
18441 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18442 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18443 rustc: 1.70.0 stable x86_64-unknown-illumos
18444 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18445 {"msg":"up_listen starts","v":0,"name":"crucible","level":30Sep 22 23:21:37.913 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18446 ,"time":"2023-09-22T23:21:37.913197938Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","Sep 22 23:21:37.913 INFO Using address: 127.0.0.1:59981, task: main
18447 pid":4769,"task":"up_listen"}
184482023-09-22T23:21:37.913ZINFOcrucible: Wait for all three downstairs to come online
184492023-09-22T23:21:37.913ZINFOcrucible: Flush timeout: 0.5
184502023-09-22T23:21:37.913ZINFOcrucible: 5890b7ae-9696-4b48-a320-1c616f83f794 active request set
18451 Sep 22 23:21:37.913 INFO accepted connection from 127.0.0.1:33366, task: main
18452 Sep 22 23:21:37.913 INFO accepted connection from 127.0.0.1:50854, task: main
18453 Sep 22 23:21:37.913 INFO Repair listens on 127.0.0.1:0, task: repair
18454 {"msg":"[0] 5890b7ae-9696-4b48-a320-1c616f83f794 looper connected","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.91360681Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769Sep 22 23:21:37.913 DEBG up_ds_listen was notified
18455 Sep 22 23:21:37.913 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:37110, task: repair
18456 ,"looper":"0"}
18457 {"msg":"[0] Proc runs for 127.0.0.1:50727 in state New"Sep 22 23:21:37.913 DEBG up_ds_listen process 1001
18458 ,"v":Sep 22 23:21:37.913 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:37110, task: repair
18459 0,"name":"crucible","level":30Sep 22 23:21:37.913 DEBG [A] ack job 1001:2, : downstairs
18460 ,"time":"2023-09-22T23:21:37.91369746Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
18461 Sep 22 23:21:37.913 INFO listening, local_addr: 127.0.0.1:37110, task: repair
18462 {"msg":"[2] 5890b7ae-9696-4b48-a320-1c616f83f794 looper connected","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.913746902Z"Sep 22 23:21:37.913 DEBG up_ds_listen checked 1 jobs, back to waiting
18463 ,"hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"2"}
18464 {"msg":"[2] Proc runs for 127.0.0.1:35408 in state New","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.913797532Z","hostnameSep 22 23:21:37.913 INFO UUID: 1104ac55-f7a8-4cef-b93e-5cab8f0588d7
18465 ":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
18466 {"msg":"Sep 22 23:21:37.913 INFO Blocks per extent:5 Total Extents: 2
18467 [1] 5890b7ae-9696-4b48-a320-1c616f83f794 looper connected","v":0,"name":"crucible","level":30Sep 22 23:21:37.913 INFO accepted connection from 127.0.0.1:36315, task: main
18468 ,"time":"2023-09-22T23:21:37.91384397Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"looper":"1"}
184692023-09-22T23:21:37.913ZINFOcrucible: [1] Proc runs for 127.0.0.1:50019 in state New
18470 Sep 22 23:21:37.913 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:37110, task: repair
18471 Sep 22 23:21:37.913 INFO Crucible Version: Crucible Version: 0.0.1
18472 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18473 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18474 rustc: 1.70.0 stable x86_64-unknown-illumos
18475 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18476 Sep 22 23:21:37.913 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18477 Sep 22 23:21:37.913 INFO Using repair address: 127.0.0.1:37110, task: main
18478 Sep 22 23:21:37.913 INFO current number of open files limit 65536 is already the maximum
18479 Sep 22 23:21:37.913 INFO No SSL acceptor configured, task: main
18480 Sep 22 23:21:37.913 INFO Using address: 127.0.0.1:36377, task: main
18481 Sep 22 23:21:37.913 INFO Opened existing region file "/tmp/downstairs-E6VpKmdW/region.json"
18482 Sep 22 23:21:37.913 INFO Database read version 1
18483 Sep 22 23:21:37.913 INFO Database write version 1
18484 Sep 22 23:21:37.914 INFO Connection request from 5890b7ae-9696-4b48-a320-1c616f83f794 with version 4, task: proc
18485 Sep 22 23:21:37.914 INFO upstairs UpstairsConnection { upstairs_id: 5890b7ae-9696-4b48-a320-1c616f83f794, session_id: 2ccf0845-edb5-4c6d-8f10-f9f1a2498395, gen: 1 } connected, version 4, task: proc
18486 Sep 22 23:21:37.914 INFO Connection request from 5890b7ae-9696-4b48-a320-1c616f83f794 with version 4, task: proc
18487 Sep 22 23:21:37.914 INFO Repair listens on 127.0.0.1:0, task: repair
18488 Sep 22 23:21:37.914 INFO upstairs UpstairsConnection { upstairs_id: 5890b7ae-9696-4b48-a320-1c616f83f794, session_id: 2ccf0845-edb5-4c6d-8f10-f9f1a2498395, gen: 1 } connected, version 4, task: proc
18489 Sep 22 23:21:37.914 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57088, task: repair
18490 Sep 22 23:21:37.914 INFO Connection request from 5890b7ae-9696-4b48-a320-1c616f83f794 with version 4, task: proc
18491 Sep 22 23:21:37.914 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57088, task: repair
18492 Sep 22 23:21:37.914 INFO upstairs UpstairsConnection { upstairs_id: 5890b7ae-9696-4b48-a320-1c616f83f794, session_id: 2ccf0845-edb5-4c6d-8f10-f9f1a2498395, gen: 1 } connected, version 4, task: proc
18493 Sep 22 23:21:37.914 INFO listening, local_addr: 127.0.0.1:57088, task: repair
18494 Sep 22 23:21:37.914 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57088, task: repair
18495 {"msg":"[0] 5890b7ae-9696-4b48-a320-1c616f83f794 (2ccf0845-edb5-4c6d-8f10-f9f1a2498395) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":Sep 22 23:21:37.914 INFO Using repair address: 127.0.0.1:57088, task: main
18496 30Sep 22 23:21:37.914 INFO No SSL acceptor configured, task: main
18497 ,"time":"2023-09-22T23:21:37.914545378Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
18498 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"Sep 22 23:21:37.914 DEBG IO Read 1002 has deps [JobId(1001)]
18499 time":"2023-09-22T23:21:37.914593007Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
185002023-09-22T23:21:37.914ZINFOcrucible: [0] client is_active_req TRUE, promote! session 2ccf0845-edb5-4c6d-8f10-f9f1a2498395
185012023-09-22T23:21:37.914ZINFOcrucible: [2] 5890b7ae-9696-4b48-a320-1c616f83f794 (2ccf0845-edb5-4c6d-8f10-f9f1a2498395) WaitActive New New ds_transition to WaitActive
185022023-09-22T23:21:37.914ZINFOcrucible: [2] Transition from New to WaitActive
185032023-09-22T23:21:37.914ZINFOcrucible: [2] client is_active_req TRUE, promote! session 2ccf0845-edb5-4c6d-8f10-f9f1a2498395
185042023-09-22T23:21:37.914ZINFOcrucible: [1] 5890b7ae-9696-4b48-a320-1c616f83f794 (2ccf0845-edb5-4c6d-8f10-f9f1a2498395) WaitActive New WaitActive ds_transition to WaitActive
185052023-09-22T23:21:37.914ZINFOcrucible: [1] Transition from New to WaitActive
18506 {Sep 22 23:21:37.914 INFO current number of open files limit 65536 is already the maximum
18507 "msg":"[1] client is_active_req TRUE, promote! session 2ccf0845-edb5-4c6d-8f10-f9f1a2498395","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.914847028Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
18508 Sep 22 23:21:37.914 INFO Created new region file "/tmp/downstairs-RaXizMDx/region.json"
18509 Sep 22 23:21:37.915 DEBG Read :1002 deps:[JobId(1001)] res:true
185102023-09-22T23:21:37.915ZINFOcrucible: [0] downstairs client at 127.0.0.1:50727 has UUID 08d72c33-1ae3-4984-93cb-e5924e7608cb
185112023-09-22T23:21:37.915ZINFOcrucible: [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 08d72c33-1ae3-4984-93cb-e5924e7608cb, encrypted: true, database_read_version: 1, database_write_version: 1 }
185122023-09-22T23:21:37.915ZINFOcrucible: 5890b7ae-9696-4b48-a320-1c616f83f794 WaitActive WaitActive WaitActive
18513 {"msg":"[2] downstairs client at 127.0.0.1:35408 has UUID 5ad4cc94-cc5d-4745-8f62-94ec4413158f","v":0,"name":"crucible","level":30Sep 22 23:21:37.915 INFO Upstairs starts
18514 ,"time":"2023-09-22T23:21:37.915684256Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
18515 {"msg":"[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 5ad4cc94-cc5d-4745-8f62-94ec4413158f, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible","level":30Sep 22 23:21:37.915 INFO Crucible Version: BuildInfo {
18516 version: "0.0.1",
18517 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
18518 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
18519 git_branch: "main",
18520 rustc_semver: "1.70.0",
18521 rustc_channel: "stable",
18522 rustc_host_triple: "x86_64-unknown-illumos",
18523 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
18524 cargo_triple: "x86_64-unknown-illumos",
18525 debug: true,
18526 opt_level: 0,
18527 }
18528 ,"time":"2023-09-22T23:21:37.915729142Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
18529 {Sep 22 23:21:37.915 INFO Upstairs <-> Downstairs Message Version: 4
18530 "msg":"5890b7ae-9696-4b48-a320-1c616f83f794 WaitActive WaitActive WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.91576738Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
18531 Sep 22 23:21:37.915 INFO Crucible stats registered with UUID: d15c167b-c380-46a4-a953-864fc02495ed
18532 Sep 22 23:21:37.915 INFO Current flush_numbers [0..12]: [0, 0]
18533 {"msg":"[1] downstairs client at 127.0.0.1:50019 has UUID f233f1e5-b968-4e9a-959c-120f95afdd47","v":0,"name":"crucible","level":30,"time":"Sep 22 23:21:37.915 INFO Crucible d15c167b-c380-46a4-a953-864fc02495ed has session id: d5a13612-c3c3-4fc0-8a00-21ce9f97f75d
18534 2023-09-22T23:21:37.915816005Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
18535 {"msg":"[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f233f1e5-b968-4e9a-959c-120f95afdd47, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible","level":30Sep 22 23:21:37.915 DEBG Read :1002 deps:[JobId(1001)] res:true
18536 ,"time":"2023-09-22T23:21:37.915862975Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
185372023-09-22T23:21:37.915ZINFOcrucible: 5890b7ae-9696-4b48-a320-1c616f83f794 WaitActive WaitActive WaitActive
18538 Sep 22 23:21:37.915 INFO Downstairs has completed Negotiation, task: proc
18539 Sep 22 23:21:37.915 INFO listening on 127.0.0.1:0, task: main
18540 Sep 22 23:21:37.916 INFO listening on 127.0.0.1:0, task: main
18541 Sep 22 23:21:37.916 INFO listening on 127.0.0.1:0, task: main
18542 Sep 22 23:21:37.916 INFO [0] connecting to 127.0.0.1:33968, looper: 0
18543 Sep 22 23:21:37.916 INFO Current flush_numbers [0..12]: [0, 0]
18544 Sep 22 23:21:37.916 INFO [1] connecting to 127.0.0.1:40280, looper: 1
18545 Sep 22 23:21:37.916 DEBG Read :1002 deps:[JobId(1001)] res:true
18546 Sep 22 23:21:37.916 INFO Downstairs has completed Negotiation, task: proc
18547 Sep 22 23:21:37.916 INFO [2] connecting to 127.0.0.1:36377, looper: 2
18548 Sep 22 23:21:37.916 INFO up_listen starts, task: up_listen
18549 Sep 22 23:21:37.916 INFO Wait for all three downstairs to come online
18550 Sep 22 23:21:37.916 INFO Flush timeout: 0.5
18551 Sep 22 23:21:37.916 INFO UUID: 80c20ebc-7f9f-4109-87bd-ff9af4ac5180
18552 Sep 22 23:21:37.916 INFO Blocks per extent:5 Total Extents: 2
18553 Sep 22 23:21:37.916 INFO Current flush_numbers [0..12]: [0, 0]
18554 Sep 22 23:21:37.916 INFO Crucible Version: Crucible Version: 0.0.1
18555 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18556 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18557 rustc: 1.70.0 stable x86_64-unknown-illumos
18558 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18559 Sep 22 23:21:37.916 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18560 Sep 22 23:21:37.916 INFO Using address: 127.0.0.1:60187, task: main
18561 Sep 22 23:21:37.916 INFO accepted connection from 127.0.0.1:40391, task: main
18562 Sep 22 23:21:37.916 INFO accepted connection from 127.0.0.1:50010, task: main
18563 Sep 22 23:21:37.916 INFO current number of open files limit 65536 is already the maximum
18564 Sep 22 23:21:37.916 INFO Downstairs has completed Negotiation, task: proc
18565 Sep 22 23:21:37.916 INFO accepted connection from 127.0.0.1:57192, task: main
18566 Sep 22 23:21:37.916 INFO Opened existing region file "/tmp/downstairs-HlXYD8jF/region.json"
18567 Sep 22 23:21:37.916 INFO Database read version 1
18568 Sep 22 23:21:37.916 INFO Database write version 1
18569 Sep 22 23:21:37.916 INFO [0] d15c167b-c380-46a4-a953-864fc02495ed looper connected, looper: 0
18570 Sep 22 23:21:37.916 INFO [0] Proc runs for 127.0.0.1:33968 in state New
18571 Sep 22 23:21:37.917 INFO [1] d15c167b-c380-46a4-a953-864fc02495ed looper connected, looper: 1
18572 Sep 22 23:21:37.917 INFO Repair listens on 127.0.0.1:0, task: repair
18573 {"msg":"Sep 22 23:21:37.917 INFO [1] Proc runs for 127.0.0.1:40280 in state New
18574 [0] 5890b7ae-9696-4b48-a320-1c616f83f794 (2ccf0845-edb5-4c6d-8f10-f9f1a2498395) WaitActive WaitActive WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.917122802Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
18575 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30Sep 22 23:21:37.917 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50831, task: repair
18576 ,"Sep 22 23:21:37.917 INFO [2] d15c167b-c380-46a4-a953-864fc02495ed looper connected, looper: 2
18577 time":"2023-09-22T23:21:37.917174668Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
18578 {"msg":"[0] new RM replaced this: NoneSep 22 23:21:37.917 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50831, task: repair
18579 ","v":0,"name":"crucible","level":40Sep 22 23:21:37.917 INFO [2] Proc runs for 127.0.0.1:36377 in state New
18580 ,"time":"2023-09-22T23:21:37.917224605Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
18581 {"msg":"[0] Starts reconcile loop","v":0,"name":"crucible","level":30Sep 22 23:21:37.917 INFO listening, local_addr: 127.0.0.1:50831, task: repair
18582 ,"time":"2023-09-22T23:21:37.917258341Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
185832023-09-22T23:21:37.917ZINFOcrucible: [2] 5890b7ae-9696-4b48-a320-1c616f83f794 (2ccf0845-edb5-4c6d-8f10-f9f1a2498395) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
185842023-09-22T23:21:37.917ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
185852023-09-22T23:21:37.917ZWARNcrucible: [2] new RM replaced this: None
185862023-09-22T23:21:37.917ZINFOcrucible: [2] Starts reconcile loop
18587 {"msg":"[1] 5890b7ae-9696-4b48-a320-1c616f83f794 (2ccf0845-edb5-4c6d-8f10-f9f1a2498395) WaitQuorum WaitActive WaitQuorum ds_transition to WaitQuorum","v":0,"name":"Sep 22 23:21:37.917 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50831, task: repair
18588 crucible","level":30,"time":"2023-09-22T23:21:37.91744913Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
18589 {"msg":"[1] Transition from WaitActive to WaitQuorum","v":0Sep 22 23:21:37.917 INFO Using repair address: 127.0.0.1:50831, task: main
18590 ,"name":"crucible","level":30Sep 22 23:21:37.917 INFO No SSL acceptor configured, task: main
18591 ,"time":"2023-09-22T23:21:37.917491801Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
185922023-09-22T23:21:37.917ZWARNcrucible: [1] new RM replaced this: None
185932023-09-22T23:21:37.917ZINFOcrucible: [1] Starts reconcile loop
185942023-09-22T23:21:37.917ZINFOcrucible: [0] 127.0.0.1:50727 task reports connection:true
185952023-09-22T23:21:37.917ZINFOcrucible: 5890b7ae-9696-4b48-a320-1c616f83f794 WaitQuorum WaitQuorum WaitQuorum
185962023-09-22T23:21:37.917ZINFOcrucible: [0]R flush_numbers: [0, 0]
185972023-09-22T23:21:37.917ZINFOcrucible: [0]R generation: [0, 0]
18598 {"msg":"[0]R dirty: [false, false]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:21:37.917783606Z","The guest has finished waiting for activation
18599 hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
186002023-09-22T23:21:37.917ZINFOcrucible: [1]R flush_numbers: [0, 0]
186012023-09-22T23:21:37.917ZINFOcrucible: [1]R generation: [0, 0]
186022023-09-22T23:21:37.917ZINFOcrucible: [1]R dirty: [false, false]
186032023-09-22T23:21:37.917ZINFOcrucible: [2]R flush_numbers: [0, 0]
186042023-09-22T23:21:37.917ZINFOcrucible: [2]R generation: [0, 0]
186052023-09-22T23:21:37.917ZINFOcrucible: [2]R dirty: [false, false]
186062023-09-22T23:21:37.918ZINFOcrucible: Max found gen is 1
18607 {"msg":"Generation requested: 1 >= found:1","v":0,"name":"crucible","level":30Sep 22 23:21:37.918 INFO current number of open files limit 65536 is already the maximum
18608 ,"time":"2023-09-22T23:21:37.918042411Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769}
186092023-09-22T23:21:37.918ZINFOcrucible: Next flush: 1
186102023-09-22T23:21:37.918ZINFOcrucible: All extents match
186112023-09-22T23:21:37.918ZINFOcrucible: No downstairs repair required
18612 Sep 22 23:21:37.918 INFO Created new region file "/tmp/downstairs-gtLUOPnz/region.json"
186132023-09-22T23:21:37.918ZINFOcrucible: No initial repair work was required
186142023-09-22T23:21:37.918ZINFOcrucible: Set Downstairs and Upstairs active
186152023-09-22T23:21:37.918ZINFOcrucible: 5890b7ae-9696-4b48-a320-1c616f83f794 is now active with session: 2ccf0845-edb5-4c6d-8f10-f9f1a2498395
186162023-09-22T23:21:37.918ZINFOcrucible: 5890b7ae-9696-4b48-a320-1c616f83f794 Set Active after no repair
186172023-09-22T23:21:37.918ZINFOcrucible: Notify all downstairs, region set compare is done.
186182023-09-22T23:21:37.918ZINFOcrucible: Set check for repair
186192023-09-22T23:21:37.918ZINFOcrucible: [2] 127.0.0.1:35408 task reports connection:true
186202023-09-22T23:21:37.918ZINFOcrucible: 5890b7ae-9696-4b48-a320-1c616f83f794 Active Active Active
186212023-09-22T23:21:37.918ZINFOcrucible: Set check for repair
186222023-09-22T23:21:37.918ZINFOcrucible: [1] 127.0.0.1:50019 task reports connection:true
186232023-09-22T23:21:37.918ZINFOcrucible: 5890b7ae-9696-4b48-a320-1c616f83f794 Active Active Active
186242023-09-22T23:21:37.918ZINFOcrucible: Set check for repair
186252023-09-22T23:21:37.918ZINFOcrucible: [0] received reconcile message
186262023-09-22T23:21:37.918ZINFOcrucible: [0] All repairs completed, exit
186272023-09-22T23:21:37.918ZINFOcrucible: [0] Starts cmd_loop
186282023-09-22T23:21:37.918ZINFOcrucible: [1] received reconcile message
186292023-09-22T23:21:37.918ZINFOcrucible: [1] All repairs completed, exit
186302023-09-22T23:21:37.918ZINFOcrucible: [1] Starts cmd_loop
186312023-09-22T23:21:37.918ZINFOcrucible: [2] received reconcile message
186322023-09-22T23:21:37.918ZINFOcrucible: [2] All repairs completed, exit
186332023-09-22T23:21:37.918ZINFOcrucible: [2] Starts cmd_loop
18634 Sep 22 23:21:37.919 INFO UUID: b708efc7-8172-4645-a89a-1dd5a6e86ff4
18635 Sep 22 23:21:37.919 INFO Blocks per extent:5 Total Extents: 2
18636 Sep 22 23:21:37.919 INFO Crucible Version: Crucible Version: 0.0.1
18637 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18638 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18639 rustc: 1.70.0 stable x86_64-unknown-illumos
18640 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18641 Sep 22 23:21:37.919 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18642 Sep 22 23:21:37.919 INFO Using address: 127.0.0.1:41387, task: main
18643 Sep 22 23:21:37.920 INFO Repair listens on 127.0.0.1:0, task: repair
18644 Sep 22 23:21:37.920 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:48043, task: repair
18645 Sep 22 23:21:37.920 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:48043, task: repair
18646 Sep 22 23:21:37.920 INFO listening, local_addr: 127.0.0.1:48043, task: repair
18647 Sep 22 23:21:37.920 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:48043, task: repair
18648 Sep 22 23:21:37.920 INFO Using repair address: 127.0.0.1:48043, task: main
18649 Sep 22 23:21:37.920 INFO No SSL acceptor configured, task: main
18650 Sep 22 23:21:37.920 DEBG [0] Read AckReady 1002, : downstairs
18651 Sep 22 23:21:37.920 INFO current number of open files limit 65536 is already the maximum
18652 Sep 22 23:21:37.920 INFO Opened existing region file "/tmp/downstairs-RaXizMDx/region.json"
18653 Sep 22 23:21:37.921 INFO Database read version 1
18654 Sep 22 23:21:37.921 INFO Database write version 1
18655 Sep 22 23:21:37.922 INFO current number of open files limit 65536 is already the maximum
18656 Sep 22 23:21:37.922 INFO Created new region file "/tmp/downstairs-vbQXcM4a/region.json"
18657 Sep 22 23:21:37.922 DEBG [1] Read already AckReady 1002, : downstairs
18658 Sep 22 23:21:37.922 INFO UUID: ab64c062-c03c-4bae-8346-acfa3ee4c8e7
18659 Sep 22 23:21:37.922 INFO Blocks per extent:5 Total Extents: 2
18660 Sep 22 23:21:37.922 INFO Crucible Version: Crucible Version: 0.0.1
18661 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18662 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18663 rustc: 1.70.0 stable x86_64-unknown-illumos
18664 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18665 Sep 22 23:21:37.923 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18666 Sep 22 23:21:37.923 INFO Using address: 127.0.0.1:40994, task: main
18667 Sep 22 23:21:37.923 INFO Repair listens on 127.0.0.1:0, task: repair
18668 Sep 22 23:21:37.923 INFO current number of open files limit 65536 is already the maximum
18669 Sep 22 23:21:37.923 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33303, task: repair
18670 Sep 22 23:21:37.923 INFO Opened existing region file "/tmp/downstairs-gtLUOPnz/region.json"
18671 Sep 22 23:21:37.923 INFO Database read version 1
18672 Sep 22 23:21:37.923 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33303, task: repair
18673 Sep 22 23:21:37.923 INFO Database write version 1
18674 Sep 22 23:21:37.923 INFO listening, local_addr: 127.0.0.1:33303, task: repair
18675 Sep 22 23:21:37.923 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33303, task: repair
18676 Sep 22 23:21:37.923 INFO Using repair address: 127.0.0.1:33303, task: main
18677 Sep 22 23:21:37.923 INFO No SSL acceptor configured, task: main
18678 Sep 22 23:21:37.924 INFO Upstairs starts
18679 Sep 22 23:21:37.924 INFO Crucible Version: BuildInfo {
18680 version: "0.0.1",
18681 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
18682 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
18683 git_branch: "main",
18684 rustc_semver: "1.70.0",
18685 rustc_channel: "stable",
18686 rustc_host_triple: "x86_64-unknown-illumos",
18687 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
18688 cargo_triple: "x86_64-unknown-illumos",
18689 debug: true,
18690 opt_level: 0,
18691 }
18692 Sep 22 23:21:37.924 INFO Upstairs <-> Downstairs Message Version: 4
18693 Sep 22 23:21:37.924 INFO Crucible stats registered with UUID: 517f6bf3-86c2-4511-bc4e-8840c2dfa85c
18694 Sep 22 23:21:37.924 INFO Crucible 517f6bf3-86c2-4511-bc4e-8840c2dfa85c has session id: 60473939-7206-48a5-b1d3-23c365304810
18695 Sep 22 23:21:37.924 INFO listening on 127.0.0.1:0, task: main
18696 Sep 22 23:21:37.924 INFO listening on 127.0.0.1:0, task: main
18697 Sep 22 23:21:37.925 INFO listening on 127.0.0.1:0, task: main
18698 Sep 22 23:21:37.925 INFO [0] connecting to 127.0.0.1:51174, looper: 0
18699 Sep 22 23:21:37.925 DEBG [2] Read already AckReady 1002, : downstairs
18700 Sep 22 23:21:37.925 INFO [1] connecting to 127.0.0.1:59981, looper: 1
18701 Sep 22 23:21:37.925 INFO [2] connecting to 127.0.0.1:40994, looper: 2
18702 Sep 22 23:21:37.925 DEBG up_ds_listen was notified
18703 Sep 22 23:21:37.925 INFO up_listen starts, task: up_listen
18704 Sep 22 23:21:37.925 INFO Wait for all three downstairs to come online
18705 Sep 22 23:21:37.925 INFO Flush timeout: 0.5
18706 Sep 22 23:21:37.925 DEBG up_ds_listen process 1002
18707 Sep 22 23:21:37.925 DEBG [A] ack job 1002:3, : downstairs
18708 Sep 22 23:21:37.925 INFO accepted connection from 127.0.0.1:56403, task: main
18709 Sep 22 23:21:37.925 INFO accepted connection from 127.0.0.1:57221, task: main
18710 Sep 22 23:21:37.925 INFO accepted connection from 127.0.0.1:38511, task: main
18711 Sep 22 23:21:37.925 INFO [0] 517f6bf3-86c2-4511-bc4e-8840c2dfa85c looper connected, looper: 0
18712 Sep 22 23:21:37.925 INFO [0] Proc runs for 127.0.0.1:51174 in state New
18713 Sep 22 23:21:37.925 INFO [1] 517f6bf3-86c2-4511-bc4e-8840c2dfa85c looper connected, looper: 1
18714 Sep 22 23:21:37.925 INFO [1] Proc runs for 127.0.0.1:59981 in state New
18715 Sep 22 23:21:37.925 DEBG up_ds_listen checked 1 jobs, back to waiting
18716 Sep 22 23:21:37.925 INFO [2] 517f6bf3-86c2-4511-bc4e-8840c2dfa85c looper connected, looper: 2
18717 Sep 22 23:21:37.925 INFO [2] Proc runs for 127.0.0.1:40994 in state New
18718 Sep 22 23:21:37.926 INFO Upstairs starts
18719 Sep 22 23:21:37.926 INFO Crucible Version: BuildInfo {
18720 version: "0.0.1",
18721 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
18722 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
18723 git_branch: "main",
18724 rustc_semver: "1.70.0",
18725 rustc_channel: "stable",
18726 rustc_host_triple: "x86_64-unknown-illumos",
18727 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
18728 cargo_triple: "x86_64-unknown-illumos",
18729 debug: true,
18730 opt_level: 0,
18731 }
18732 Sep 22 23:21:37.926 INFO Upstairs <-> Downstairs Message Version: 4
18733 Sep 22 23:21:37.926 INFO Crucible stats registered with UUID: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5
18734 Sep 22 23:21:37.926 INFO Crucible 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 has session id: 8053272a-0e59-472a-830e-9cb2148ea8b6
18735 Sep 22 23:21:37.926 INFO Connection request from 517f6bf3-86c2-4511-bc4e-8840c2dfa85c with version 4, task: proc
18736 Sep 22 23:21:37.926 INFO UUID: 6b1a9c06-348a-42ce-a59e-2b95a9617acf
18737 Sep 22 23:21:37.926 INFO upstairs UpstairsConnection { upstairs_id: 517f6bf3-86c2-4511-bc4e-8840c2dfa85c, session_id: 9a97b4b8-4148-45e4-bbb1-302708e60500, gen: 3 } connected, version 4, task: proc
18738 Sep 22 23:21:37.926 INFO Blocks per extent:5 Total Extents: 2
18739 Sep 22 23:21:37.926 INFO current number of open files limit 65536 is already the maximum
18740 Sep 22 23:21:37.926 INFO Connection request from 517f6bf3-86c2-4511-bc4e-8840c2dfa85c with version 4, task: proc
18741 Sep 22 23:21:37.926 INFO Opened existing region file "/tmp/downstairs-vbQXcM4a/region.json"
18742 Sep 22 23:21:37.926 INFO Crucible Version: Crucible Version: 0.0.1
18743 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18744 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18745 rustc: 1.70.0 stable x86_64-unknown-illumos
18746 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18747 Sep 22 23:21:37.926 INFO Database read version 1
18748 Sep 22 23:21:37.926 INFO upstairs UpstairsConnection { upstairs_id: 517f6bf3-86c2-4511-bc4e-8840c2dfa85c, session_id: 9a97b4b8-4148-45e4-bbb1-302708e60500, gen: 3 } connected, version 4, task: proc
18749 Sep 22 23:21:37.926 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18750 Sep 22 23:21:37.926 INFO Database write version 1
18751 Sep 22 23:21:37.926 INFO Using address: 127.0.0.1:64895, task: main
18752 Sep 22 23:21:37.926 INFO Connection request from 517f6bf3-86c2-4511-bc4e-8840c2dfa85c with version 4, task: proc
18753 Sep 22 23:21:37.926 INFO upstairs UpstairsConnection { upstairs_id: 517f6bf3-86c2-4511-bc4e-8840c2dfa85c, session_id: 9a97b4b8-4148-45e4-bbb1-302708e60500, gen: 3 } connected, version 4, task: proc
18754 Sep 22 23:21:37.926 INFO [0] connecting to 127.0.0.1:48404, looper: 0
18755 Sep 22 23:21:37.926 INFO [1] connecting to 127.0.0.1:54505, looper: 1
18756 Sep 22 23:21:37.927 INFO [2] connecting to 127.0.0.1:38410, looper: 2
18757 Sep 22 23:21:37.927 INFO Repair listens on 127.0.0.1:0, task: repair
18758 Sep 22 23:21:37.927 INFO up_listen starts, task: up_listen
18759 Sep 22 23:21:37.927 INFO Wait for all three downstairs to come online
18760 Sep 22 23:21:37.927 INFO Flush timeout: 0.5
18761 Sep 22 23:21:37.927 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:41794, task: repair
18762 Sep 22 23:21:37.927 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:41794, task: repair
18763 Sep 22 23:21:37.927 INFO listening, local_addr: 127.0.0.1:41794, task: repair
18764 Sep 22 23:21:37.927 INFO accepted connection from 127.0.0.1:64681, task: main
18765 Sep 22 23:21:37.927 INFO accepted connection from 127.0.0.1:48744, task: main
18766 Sep 22 23:21:37.927 INFO accepted connection from 127.0.0.1:44304, task: main
18767 Sep 22 23:21:37.927 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:41794, task: repair
18768 Sep 22 23:21:37.927 INFO Using repair address: 127.0.0.1:41794, task: main
18769 Sep 22 23:21:37.927 INFO No SSL acceptor configured, task: main
18770 Sep 22 23:21:37.927 INFO [0] 517f6bf3-86c2-4511-bc4e-8840c2dfa85c (9a97b4b8-4148-45e4-bbb1-302708e60500) New New New ds_transition to WaitActive
18771 Sep 22 23:21:37.927 INFO [0] Transition from New to WaitActive
18772 Sep 22 23:21:37.927 INFO [1] 517f6bf3-86c2-4511-bc4e-8840c2dfa85c (9a97b4b8-4148-45e4-bbb1-302708e60500) WaitActive New New ds_transition to WaitActive
18773 Sep 22 23:21:37.927 INFO [1] Transition from New to WaitActive
18774 Sep 22 23:21:37.927 INFO [2] 517f6bf3-86c2-4511-bc4e-8840c2dfa85c (9a97b4b8-4148-45e4-bbb1-302708e60500) WaitActive WaitActive New ds_transition to WaitActive
18775 Sep 22 23:21:37.927 INFO [2] Transition from New to WaitActive
18776 Sep 22 23:21:37.927 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 looper connected, looper: 0
18777 Sep 22 23:21:37.927 INFO [0] Proc runs for 127.0.0.1:48404 in state New
18778 Sep 22 23:21:37.927 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 looper connected, looper: 1
18779 Sep 22 23:21:37.927 INFO [1] Proc runs for 127.0.0.1:54505 in state New
18780 Sep 22 23:21:37.927 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 looper connected, looper: 2
18781 Sep 22 23:21:37.927 INFO [2] Proc runs for 127.0.0.1:38410 in state New
18782 Sep 22 23:21:37.927 INFO current number of open files limit 65536 is already the maximum
18783 Sep 22 23:21:37.928 INFO Created new region file "/tmp/downstairs-TcKuMYwD/region.json"
18784 Sep 22 23:21:37.928 INFO Connection request from 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 with version 4, task: proc
18785 Sep 22 23:21:37.928 INFO upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 2e580a9f-7e54-44b6-83db-09df314d2c01, gen: 3 } connected, version 4, task: proc
18786 Sep 22 23:21:37.928 INFO Connection request from 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 with version 4, task: proc
18787 Sep 22 23:21:37.928 INFO upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 2e580a9f-7e54-44b6-83db-09df314d2c01, gen: 3 } connected, version 4, task: proc
18788 Sep 22 23:21:37.928 INFO Connection request from 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 with version 4, task: proc
18789 Sep 22 23:21:37.928 INFO upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 2e580a9f-7e54-44b6-83db-09df314d2c01, gen: 3 } connected, version 4, task: proc
18790 Sep 22 23:21:37.928 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (2e580a9f-7e54-44b6-83db-09df314d2c01) New New New ds_transition to WaitActive
18791 Sep 22 23:21:37.928 INFO [0] Transition from New to WaitActive
18792 Sep 22 23:21:37.928 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (2e580a9f-7e54-44b6-83db-09df314d2c01) WaitActive New New ds_transition to WaitActive
18793 Sep 22 23:21:37.928 INFO [1] Transition from New to WaitActive
18794 Sep 22 23:21:37.928 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (2e580a9f-7e54-44b6-83db-09df314d2c01) WaitActive WaitActive New ds_transition to WaitActive
18795 Sep 22 23:21:37.928 INFO [2] Transition from New to WaitActive
18796 The guest has requested activation
18797 Sep 22 23:21:37.928 INFO 517f6bf3-86c2-4511-bc4e-8840c2dfa85c active request set
18798 Sep 22 23:21:37.929 INFO [0] received activate with gen 3
18799 Sep 22 23:21:37.929 INFO [0] client got ds_active_rx, promote! session 9a97b4b8-4148-45e4-bbb1-302708e60500
18800 Sep 22 23:21:37.929 INFO [1] received activate with gen 3
18801 Sep 22 23:21:37.929 INFO [1] client got ds_active_rx, promote! session 9a97b4b8-4148-45e4-bbb1-302708e60500
18802 Sep 22 23:21:37.929 INFO [2] received activate with gen 3
18803 Sep 22 23:21:37.929 INFO [2] client got ds_active_rx, promote! session 9a97b4b8-4148-45e4-bbb1-302708e60500
18804 Sep 22 23:21:37.929 INFO UpstairsConnection { upstairs_id: 517f6bf3-86c2-4511-bc4e-8840c2dfa85c, session_id: 9a97b4b8-4148-45e4-bbb1-302708e60500, gen: 3 } is now active (read-write)
18805 Sep 22 23:21:37.929 INFO UpstairsConnection { upstairs_id: 517f6bf3-86c2-4511-bc4e-8840c2dfa85c, session_id: 9a97b4b8-4148-45e4-bbb1-302708e60500, gen: 3 } is now active (read-write)
18806 Sep 22 23:21:37.929 INFO UpstairsConnection { upstairs_id: 517f6bf3-86c2-4511-bc4e-8840c2dfa85c, session_id: 9a97b4b8-4148-45e4-bbb1-302708e60500, gen: 3 } is now active (read-write)
18807 Sep 22 23:21:37.929 INFO UUID: 2dedfda5-97c1-427a-8b74-5c08fb2c233b
18808 Sep 22 23:21:37.929 INFO Blocks per extent:5 Total Extents: 2
18809 Sep 22 23:21:37.929 INFO [0] downstairs client at 127.0.0.1:51174 has UUID e1b44f76-91da-4986-a06d-4f047931e7eb
18810 Sep 22 23:21:37.929 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e1b44f76-91da-4986-a06d-4f047931e7eb, encrypted: true, database_read_version: 1, database_write_version: 1 }
18811 Sep 22 23:21:37.929 INFO 517f6bf3-86c2-4511-bc4e-8840c2dfa85c WaitActive WaitActive WaitActive
18812 Sep 22 23:21:37.929 INFO Crucible Version: Crucible Version: 0.0.1
18813 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18814 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18815 rustc: 1.70.0 stable x86_64-unknown-illumos
18816 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18817 Sep 22 23:21:37.929 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18818 Sep 22 23:21:37.929 INFO Using address: 127.0.0.1:58081, task: main
18819 Sep 22 23:21:37.929 INFO [1] downstairs client at 127.0.0.1:59981 has UUID fb1574ce-cb6c-41db-a1fc-e45b9d42517f
18820 Sep 22 23:21:37.930 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: fb1574ce-cb6c-41db-a1fc-e45b9d42517f, encrypted: true, database_read_version: 1, database_write_version: 1 }
18821 Sep 22 23:21:37.930 INFO 517f6bf3-86c2-4511-bc4e-8840c2dfa85c WaitActive WaitActive WaitActive
18822 Sep 22 23:21:37.930 INFO [2] downstairs client at 127.0.0.1:40994 has UUID ab64c062-c03c-4bae-8346-acfa3ee4c8e7
18823 Sep 22 23:21:37.930 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ab64c062-c03c-4bae-8346-acfa3ee4c8e7, encrypted: true, database_read_version: 1, database_write_version: 1 }
18824 Sep 22 23:21:37.930 INFO 517f6bf3-86c2-4511-bc4e-8840c2dfa85c WaitActive WaitActive WaitActive
18825 Sep 22 23:21:37.930 INFO Repair listens on 127.0.0.1:0, task: repair
18826 Sep 22 23:21:37.930 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:61379, task: repair
18827 Sep 22 23:21:37.930 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:61379, task: repair
18828 Sep 22 23:21:37.930 INFO Current flush_numbers [0..12]: [0, 0]
18829 Sep 22 23:21:37.930 INFO listening, local_addr: 127.0.0.1:61379, task: repair
18830 Sep 22 23:21:37.930 INFO Downstairs has completed Negotiation, task: proc
18831 Sep 22 23:21:37.930 INFO Current flush_numbers [0..12]: [0, 0]
18832 Sep 22 23:21:37.930 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:61379, task: repair
18833 Sep 22 23:21:37.930 INFO Using repair address: 127.0.0.1:61379, task: main
18834 Sep 22 23:21:37.930 INFO No SSL acceptor configured, task: main
18835 Sep 22 23:21:37.931 INFO Downstairs has completed Negotiation, task: proc
18836 Sep 22 23:21:37.931 INFO current number of open files limit 65536 is already the maximum
18837 Sep 22 23:21:37.931 INFO Current flush_numbers [0..12]: [0, 0]
18838 Sep 22 23:21:37.931 INFO Created new region file "/tmp/downstairs-XLPx1jrJ/region.json"
18839 Sep 22 23:21:37.931 INFO Downstairs has completed Negotiation, task: proc
18840 Sep 22 23:21:37.931 INFO [0] 517f6bf3-86c2-4511-bc4e-8840c2dfa85c (9a97b4b8-4148-45e4-bbb1-302708e60500) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
18841 Sep 22 23:21:37.931 INFO [0] Transition from WaitActive to WaitQuorum
18842 Sep 22 23:21:37.931 WARN [0] new RM replaced this: None
18843 Sep 22 23:21:37.931 INFO [0] Starts reconcile loop
18844 Sep 22 23:21:37.931 INFO [1] 517f6bf3-86c2-4511-bc4e-8840c2dfa85c (9a97b4b8-4148-45e4-bbb1-302708e60500) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
18845 Sep 22 23:21:37.931 INFO [1] Transition from WaitActive to WaitQuorum
18846 Sep 22 23:21:37.931 WARN [1] new RM replaced this: None
18847 Sep 22 23:21:37.931 INFO [1] Starts reconcile loop
18848 Sep 22 23:21:37.931 INFO [2] 517f6bf3-86c2-4511-bc4e-8840c2dfa85c (9a97b4b8-4148-45e4-bbb1-302708e60500) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
18849 Sep 22 23:21:37.931 INFO [2] Transition from WaitActive to WaitQuorum
18850 Sep 22 23:21:37.931 WARN [2] new RM replaced this: None
18851 Sep 22 23:21:37.931 INFO [2] Starts reconcile loop
18852 Sep 22 23:21:37.932 INFO [0] 127.0.0.1:51174 task reports connection:true
18853 Sep 22 23:21:37.932 INFO 517f6bf3-86c2-4511-bc4e-8840c2dfa85c WaitQuorum WaitQuorum WaitQuorum
18854 Sep 22 23:21:37.932 INFO [0]R flush_numbers: [0, 0]
18855 Sep 22 23:21:37.932 INFO [0]R generation: [0, 0]
18856 Sep 22 23:21:37.932 INFO [0]R dirty: [false, false]
18857 Sep 22 23:21:37.932 INFO [1]R flush_numbers: [0, 0]
18858 Sep 22 23:21:37.932 INFO [1]R generation: [0, 0]
18859 Sep 22 23:21:37.932 INFO [1]R dirty: [false, false]
18860 Sep 22 23:21:37.932 INFO [2]R flush_numbers: [0, 0]
18861 Sep 22 23:21:37.932 INFO [2]R generation: [0, 0]
18862 Sep 22 23:21:37.932 INFO [2]R dirty: [false, false]
18863 Sep 22 23:21:37.932 INFO Max found gen is 1
18864 Sep 22 23:21:37.932 INFO Generation requested: 3 >= found:1
18865 Sep 22 23:21:37.932 INFO Next flush: 1
18866 Sep 22 23:21:37.932 INFO All extents match
18867 Sep 22 23:21:37.932 INFO No downstairs repair required
18868 Sep 22 23:21:37.932 INFO No initial repair work was required
18869 Sep 22 23:21:37.932 INFO Set Downstairs and Upstairs active
18870 Sep 22 23:21:37.932 INFO current number of open files limit 65536 is already the maximum
18871 Sep 22 23:21:37.932 INFO 517f6bf3-86c2-4511-bc4e-8840c2dfa85c is now active with session: 9a97b4b8-4148-45e4-bbb1-302708e60500
18872 Sep 22 23:21:37.932 INFO Opened existing region file "/tmp/downstairs-TcKuMYwD/region.json"
18873 Sep 22 23:21:37.932 INFO Database read version 1
18874 Sep 22 23:21:37.932 INFO 517f6bf3-86c2-4511-bc4e-8840c2dfa85c Set Active after no repair
18875 Sep 22 23:21:37.932 INFO Database write version 1
18876 Sep 22 23:21:37.932 INFO Notify all downstairs, region set compare is done.
18877 Sep 22 23:21:37.932 INFO Set check for repair
18878 Sep 22 23:21:37.932 INFO [1] 127.0.0.1:59981 task reports connection:true
18879 Sep 22 23:21:37.932 INFO 517f6bf3-86c2-4511-bc4e-8840c2dfa85c Active Active Active
18880 Sep 22 23:21:37.932 INFO Set check for repair
18881 Sep 22 23:21:37.932 INFO [2] 127.0.0.1:40994 task reports connection:true
18882 Sep 22 23:21:37.932 INFO 517f6bf3-86c2-4511-bc4e-8840c2dfa85c Active Active Active
18883 Sep 22 23:21:37.932 INFO Set check for repair
18884 Sep 22 23:21:37.932 INFO [0] received reconcile message
18885 Sep 22 23:21:37.932 INFO [0] All repairs completed, exit
18886 Sep 22 23:21:37.932 INFO [0] Starts cmd_loop
18887 Sep 22 23:21:37.932 INFO [1] received reconcile message
18888 Sep 22 23:21:37.932 INFO [1] All repairs completed, exit
18889 Sep 22 23:21:37.932 INFO [1] Starts cmd_loop
18890 Sep 22 23:21:37.932 INFO [2] received reconcile message
18891 Sep 22 23:21:37.932 INFO [2] All repairs completed, exit
18892 Sep 22 23:21:37.932 INFO [2] Starts cmd_loop
18893 The guest has finished waiting for activation
18894 The guest has requested activation
18895 Sep 22 23:21:37.933 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 active request set
18896 Sep 22 23:21:37.933 INFO [0] received activate with gen 3
18897 Sep 22 23:21:37.933 INFO [0] client got ds_active_rx, promote! session 2e580a9f-7e54-44b6-83db-09df314d2c01
18898 Sep 22 23:21:37.933 INFO [1] received activate with gen 3
18899 Sep 22 23:21:37.933 INFO [1] client got ds_active_rx, promote! session 2e580a9f-7e54-44b6-83db-09df314d2c01
18900 Sep 22 23:21:37.933 INFO [2] received activate with gen 3
18901 Sep 22 23:21:37.933 INFO [2] client got ds_active_rx, promote! session 2e580a9f-7e54-44b6-83db-09df314d2c01
18902 Sep 22 23:21:37.933 INFO Signaling to UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 8442ae1d-103c-45db-9a72-ab1fbfb096dd, gen: 2 } thread that UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 2e580a9f-7e54-44b6-83db-09df314d2c01, gen: 3 } is being promoted (read-only)
18903 Sep 22 23:21:37.933 INFO Signaling to UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 8442ae1d-103c-45db-9a72-ab1fbfb096dd, gen: 2 } thread that UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 2e580a9f-7e54-44b6-83db-09df314d2c01, gen: 3 } is being promoted (read-only)
18904 test test::integration_test_two_layers_write_unwritten ... ok
18905 Sep 22 23:21:37.933 INFO Signaling to UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 8442ae1d-103c-45db-9a72-ab1fbfb096dd, gen: 2 } thread that UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 2e580a9f-7e54-44b6-83db-09df314d2c01, gen: 3 } is being promoted (read-only)
18906 test test::integration_test_upstairs_read_only_rejects_write ... ok
18907 Sep 22 23:21:37.933 WARN Another upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 2e580a9f-7e54-44b6-83db-09df314d2c01, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 8442ae1d-103c-45db-9a72-ab1fbfb096dd, gen: 2 }, task: main
18908 Sep 22 23:21:37.933 INFO connection (127.0.0.1:50540): all done
18909 Sep 22 23:21:37.933 WARN Another upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 2e580a9f-7e54-44b6-83db-09df314d2c01, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 8442ae1d-103c-45db-9a72-ab1fbfb096dd, gen: 2 }, task: main
18910 Sep 22 23:21:37.933 INFO connection (127.0.0.1:51558): all done
18911 Sep 22 23:21:37.933 WARN Another upstairs UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 2e580a9f-7e54-44b6-83db-09df314d2c01, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 4b7c3632-4da0-4e8d-94a7-3f194685b4d5, session_id: 8442ae1d-103c-45db-9a72-ab1fbfb096dd, gen: 2 }, task: main
18912 Sep 22 23:21:37.933 INFO connection (127.0.0.1:34528): all done
18913 Sep 22 23:21:37.933 INFO current number of open files limit 65536 is already the maximum
18914 Sep 22 23:21:37.934 INFO current number of open files limit 65536 is already the maximum
18915 Sep 22 23:21:37.934 INFO Created new region file "/tmp/downstairs-0A3zeEIk/region.json"
18916 Sep 22 23:21:37.934 INFO Created new region file "/tmp/downstairs-UUom91KY/region.json"
18917 Sep 22 23:21:37.934 ERRO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (8442ae1d-103c-45db-9a72-ab1fbfb096dd) cmd_loop saw YouAreNoLongerActive 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 2e580a9f-7e54-44b6-83db-09df314d2c01 3
18918 Sep 22 23:21:37.934 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (8442ae1d-103c-45db-9a72-ab1fbfb096dd) Active Active Active ds_transition to Disabled
18919 Sep 22 23:21:37.934 INFO [1] Transition from Active to Disabled
18920 Sep 22 23:21:37.934 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 set inactive, session 8442ae1d-103c-45db-9a72-ab1fbfb096dd
18921 Sep 22 23:21:37.934 ERRO 127.0.0.1:54505: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 2, looper: 1
18922 Sep 22 23:21:37.934 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Gone missing, transition from Disabled to Disconnected
18923 Sep 22 23:21:37.934 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 connection to 127.0.0.1:54505 closed, looper: 1
18924 Sep 22 23:21:37.934 ERRO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (8442ae1d-103c-45db-9a72-ab1fbfb096dd) cmd_loop saw YouAreNoLongerActive 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 2e580a9f-7e54-44b6-83db-09df314d2c01 3
18925 Sep 22 23:21:37.934 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (8442ae1d-103c-45db-9a72-ab1fbfb096dd) Active Disconnected Active ds_transition to Disabled
18926 Sep 22 23:21:37.934 INFO [2] Transition from Active to Disabled
18927 Sep 22 23:21:37.934 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 set inactive, session 8442ae1d-103c-45db-9a72-ab1fbfb096dd
18928 Sep 22 23:21:37.934 INFO UUID: 62354bbd-e3bc-4b5d-ab02-957b1e479be5
18929 Sep 22 23:21:37.934 ERRO 127.0.0.1:38410: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 2, looper: 2
18930 Sep 22 23:21:37.934 INFO Blocks per extent:5 Total Extents: 2
18931 Sep 22 23:21:37.934 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Gone missing, transition from Disabled to Disconnected
18932 Sep 22 23:21:37.934 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 connection to 127.0.0.1:38410 closed, looper: 2
18933 Sep 22 23:21:37.934 INFO Crucible Version: Crucible Version: 0.0.1
18934 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18935 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18936 rustc: 1.70.0 stable x86_64-unknown-illumos
18937 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18938 Sep 22 23:21:37.934 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18939 Sep 22 23:21:37.934 ERRO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (8442ae1d-103c-45db-9a72-ab1fbfb096dd) cmd_loop saw YouAreNoLongerActive 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 2e580a9f-7e54-44b6-83db-09df314d2c01 3
18940 Sep 22 23:21:37.934 INFO Using address: 127.0.0.1:37503, task: main
18941 Sep 22 23:21:37.934 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (8442ae1d-103c-45db-9a72-ab1fbfb096dd) Active Disconnected Disconnected ds_transition to Disabled
18942 Sep 22 23:21:37.934 INFO [0] Transition from Active to Disabled
18943 Sep 22 23:21:37.934 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 set inactive, session 8442ae1d-103c-45db-9a72-ab1fbfb096dd
18944 Sep 22 23:21:37.935 ERRO 127.0.0.1:48404: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 2, looper: 0
18945 Sep 22 23:21:37.935 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Gone missing, transition from Disabled to Disconnected
18946 Sep 22 23:21:37.935 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 connection to 127.0.0.1:48404 closed, looper: 0
18947 Sep 22 23:21:37.935 INFO Repair listens on 127.0.0.1:0, task: repair
18948 Sep 22 23:21:37.935 WARN [1] pm_task rx.recv() is None
18949 Sep 22 23:21:37.935 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:60153, task: repair
18950 Sep 22 23:21:37.935 INFO [1] 127.0.0.1:54505 task reports connection:false
18951 Sep 22 23:21:37.935 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:60153, task: repair
18952 Sep 22 23:21:37.935 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Disconnected Disconnected Disconnected
18953 Sep 22 23:21:37.935 INFO [1] 127.0.0.1:54505 task reports offline
18954 Sep 22 23:21:37.935 INFO listening, local_addr: 127.0.0.1:60153, task: repair
18955 Sep 22 23:21:37.935 INFO [2] 127.0.0.1:38410 task reports connection:false
18956 Sep 22 23:21:37.935 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Disconnected Disconnected Disconnected
18957 Sep 22 23:21:37.935 INFO [2] 127.0.0.1:38410 task reports offline
18958 Sep 22 23:21:37.935 INFO [0] 127.0.0.1:48404 task reports connection:false
18959 Sep 22 23:21:37.935 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Disconnected Disconnected Disconnected
18960 Sep 22 23:21:37.935 INFO [0] 127.0.0.1:48404 task reports offline
18961 Sep 22 23:21:37.935 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:60153, task: repair
18962 Sep 22 23:21:37.935 WARN [2] pm_task rx.recv() is None
18963 Sep 22 23:21:37.935 WARN [0] pm_task rx.recv() is None
18964 Sep 22 23:21:37.935 INFO Using repair address: 127.0.0.1:60153, task: main
18965 Sep 22 23:21:37.935 INFO No SSL acceptor configured, task: main
18966 Sep 22 23:21:37.935 INFO current number of open files limit 65536 is already the maximum
18967 Sep 22 23:21:37.935 INFO Opened existing region file "/tmp/downstairs-XLPx1jrJ/region.json"
18968 Sep 22 23:21:37.935 INFO Database read version 1
18969 Sep 22 23:21:37.935 INFO Database write version 1
18970 Sep 22 23:21:37.935 INFO [0] downstairs client at 127.0.0.1:48404 has UUID f5ea863d-974b-4712-8f07-7666c185979b
18971 Sep 22 23:21:37.935 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f5ea863d-974b-4712-8f07-7666c185979b, encrypted: true, database_read_version: 1, database_write_version: 1 }
18972 Sep 22 23:21:37.935 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 WaitActive WaitActive WaitActive
18973 Sep 22 23:21:37.936 INFO [1] downstairs client at 127.0.0.1:54505 has UUID a1447c8e-cf80-4bea-b730-010907ef0c7f
18974 Sep 22 23:21:37.936 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a1447c8e-cf80-4bea-b730-010907ef0c7f, encrypted: true, database_read_version: 1, database_write_version: 1 }
18975 Sep 22 23:21:37.936 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 WaitActive WaitActive WaitActive
18976 Sep 22 23:21:37.936 INFO [2] downstairs client at 127.0.0.1:38410 has UUID 1b0c673f-012e-4936-8109-087022151153
18977 Sep 22 23:21:37.936 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 1b0c673f-012e-4936-8109-087022151153, encrypted: true, database_read_version: 1, database_write_version: 1 }
18978 Sep 22 23:21:37.936 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 WaitActive WaitActive WaitActive
18979 Sep 22 23:21:37.936 INFO Upstairs starts
18980 Sep 22 23:21:37.936 INFO Current flush_numbers [0..12]: [1, 1]
18981 Sep 22 23:21:37.936 INFO Crucible Version: BuildInfo {
18982 version: "0.0.1",
18983 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
18984 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
18985 git_branch: "main",
18986 rustc_semver: "1.70.0",
18987 rustc_channel: "stable",
18988 rustc_host_triple: "x86_64-unknown-illumos",
18989 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
18990 cargo_triple: "x86_64-unknown-illumos",
18991 debug: true,
18992 opt_level: 0,
18993 }
18994 Sep 22 23:21:37.936 INFO Upstairs <-> Downstairs Message Version: 4
18995 Sep 22 23:21:37.936 INFO Crucible stats registered with UUID: 8db920ae-ce39-4f1d-b015-8a9c440a0a0a
18996 Sep 22 23:21:37.936 INFO Crucible 8db920ae-ce39-4f1d-b015-8a9c440a0a0a has session id: e74aff3f-2e11-4449-adea-83055a3ce4f5
18997 Sep 22 23:21:37.936 INFO Downstairs has completed Negotiation, task: proc
18998 Sep 22 23:21:37.936 INFO listening on 127.0.0.1:0, task: main
18999 Sep 22 23:21:37.936 INFO listening on 127.0.0.1:0, task: main
19000 Sep 22 23:21:37.936 INFO listening on 127.0.0.1:0, task: main
19001 Sep 22 23:21:37.936 INFO [0] connecting to 127.0.0.1:60187, looper: 0
19002 Sep 22 23:21:37.936 INFO [1] connecting to 127.0.0.1:64895, looper: 1
19003 Sep 22 23:21:37.937 INFO [2] connecting to 127.0.0.1:37503, looper: 2
19004 Sep 22 23:21:37.937 INFO up_listen starts, task: up_listen
19005 Sep 22 23:21:37.937 INFO Wait for all three downstairs to come online
19006 Sep 22 23:21:37.937 INFO Flush timeout: 0.5
19007 Sep 22 23:21:37.937 INFO accepted connection from 127.0.0.1:59290, task: main
19008 Sep 22 23:21:37.937 INFO Current flush_numbers [0..12]: [1, 1]
19009 Sep 22 23:21:37.937 INFO accepted connection from 127.0.0.1:50925, task: main
19010 Sep 22 23:21:37.937 INFO accepted connection from 127.0.0.1:64950, task: main
19011 Sep 22 23:21:37.937 INFO [1] 8db920ae-ce39-4f1d-b015-8a9c440a0a0a looper connected, looper: 1
19012 Sep 22 23:21:37.937 INFO Downstairs has completed Negotiation, task: proc
19013 Sep 22 23:21:37.937 INFO [1] Proc runs for 127.0.0.1:64895 in state New
19014 Sep 22 23:21:37.937 INFO [0] 8db920ae-ce39-4f1d-b015-8a9c440a0a0a looper connected, looper: 0
19015 Sep 22 23:21:37.937 INFO [0] Proc runs for 127.0.0.1:60187 in state New
19016 Sep 22 23:21:37.937 INFO Current flush_numbers [0..12]: [1, 1]
19017 Sep 22 23:21:37.937 INFO [2] 8db920ae-ce39-4f1d-b015-8a9c440a0a0a looper connected, looper: 2
19018 Sep 22 23:21:37.937 INFO [2] Proc runs for 127.0.0.1:37503 in state New
19019 Sep 22 23:21:37.937 INFO Downstairs has completed Negotiation, task: proc
19020 Sep 22 23:21:37.938 INFO [0] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (2e580a9f-7e54-44b6-83db-09df314d2c01) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
19021 Sep 22 23:21:37.938 INFO [0] Transition from WaitActive to WaitQuorum
19022 Sep 22 23:21:37.938 WARN [0] new RM replaced this: None
19023 Sep 22 23:21:37.938 INFO [0] Starts reconcile loop
19024 Sep 22 23:21:37.938 INFO [1] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (2e580a9f-7e54-44b6-83db-09df314d2c01) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
19025 Sep 22 23:21:37.938 INFO [1] Transition from WaitActive to WaitQuorum
19026 Sep 22 23:21:37.938 WARN [1] new RM replaced this: None
19027 Sep 22 23:21:37.938 INFO current number of open files limit 65536 is already the maximum
19028 Sep 22 23:21:37.938 INFO [1] Starts reconcile loop
19029 Sep 22 23:21:37.938 INFO [2] 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 (2e580a9f-7e54-44b6-83db-09df314d2c01) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
19030 Sep 22 23:21:37.938 INFO Created new region file "/tmp/downstairs-XDMkkT95/region.json"
19031 Sep 22 23:21:37.938 INFO [2] Transition from WaitActive to WaitQuorum
19032 Sep 22 23:21:37.938 WARN [2] new RM replaced this: None
19033 Sep 22 23:21:37.938 INFO [2] Starts reconcile loop
19034 Sep 22 23:21:37.938 INFO [0] 127.0.0.1:48404 task reports connection:true
19035 Sep 22 23:21:37.938 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 WaitQuorum WaitQuorum WaitQuorum
19036 Sep 22 23:21:37.938 INFO UUID: 68909a60-f6e3-4357-be0b-275dba4d89d7
19037 Sep 22 23:21:37.938 INFO [0]R flush_numbers: [1, 1]
19038 Sep 22 23:21:37.938 INFO [0]R generation: [1, 1]
19039 Sep 22 23:21:37.938 INFO Blocks per extent:5 Total Extents: 2
19040 Sep 22 23:21:37.938 INFO [0]R dirty: [false, false]
19041 Sep 22 23:21:37.938 INFO [1]R flush_numbers: [1, 1]
19042 Sep 22 23:21:37.938 INFO [1]R generation: [1, 1]
19043 Sep 22 23:21:37.938 INFO [1]R dirty: [false, false]
19044 Sep 22 23:21:37.938 INFO [2]R flush_numbers: [1, 1]
19045 Sep 22 23:21:37.938 INFO Crucible Version: Crucible Version: 0.0.1
19046 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19047 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19048 rustc: 1.70.0 stable x86_64-unknown-illumos
19049 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19050 Sep 22 23:21:37.938 INFO [2]R generation: [1, 1]
19051 Sep 22 23:21:37.938 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19052 Sep 22 23:21:37.938 INFO [2]R dirty: [false, false]
19053 Sep 22 23:21:37.938 INFO Using address: 127.0.0.1:39334, task: main
19054 Sep 22 23:21:37.938 INFO Max found gen is 2
19055 Sep 22 23:21:37.938 INFO Generation requested: 3 >= found:2
19056 Sep 22 23:21:37.938 INFO current number of open files limit 65536 is already the maximum
19057 Sep 22 23:21:37.938 INFO Next flush: 2
19058 Sep 22 23:21:37.938 INFO Opened existing region file "/tmp/downstairs-0A3zeEIk/region.json"
19059 Sep 22 23:21:37.938 INFO All extents match
19060 Sep 22 23:21:37.938 INFO Database read version 1
19061 Sep 22 23:21:37.938 INFO Database write version 1
19062 Sep 22 23:21:37.938 INFO No downstairs repair required
19063 Sep 22 23:21:37.938 INFO No initial repair work was required
19064 Sep 22 23:21:37.938 INFO Set Downstairs and Upstairs active
19065 Sep 22 23:21:37.938 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 is now active with session: 2e580a9f-7e54-44b6-83db-09df314d2c01
19066 Sep 22 23:21:37.938 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Set Active after no repair
19067 Sep 22 23:21:37.938 INFO Notify all downstairs, region set compare is done.
19068 Sep 22 23:21:37.938 INFO current number of open files limit 65536 is already the maximum
19069 Sep 22 23:21:37.938 INFO Opened existing region file "/tmp/downstairs-UUom91KY/region.json"
19070 Sep 22 23:21:37.938 INFO Set check for repair
19071 Sep 22 23:21:37.938 INFO Database read version 1
19072 Sep 22 23:21:37.938 INFO Database write version 1
19073 Sep 22 23:21:37.938 INFO [1] 127.0.0.1:54505 task reports connection:true
19074 Sep 22 23:21:37.939 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Active Active Active
19075 Sep 22 23:21:37.939 INFO Set check for repair
19076 Sep 22 23:21:37.939 INFO Repair listens on 127.0.0.1:0, task: repair
19077 Sep 22 23:21:37.939 INFO [2] 127.0.0.1:38410 task reports connection:true
19078 Sep 22 23:21:37.939 INFO 4b7c3632-4da0-4e8d-94a7-3f194685b4d5 Active Active Active
19079 Sep 22 23:21:37.939 INFO Set check for repair
19080 Sep 22 23:21:37.939 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:61766, task: repair
19081 Sep 22 23:21:37.939 INFO [0] received reconcile message
19082 Sep 22 23:21:37.939 INFO [0] All repairs completed, exit
19083 Sep 22 23:21:37.939 INFO [0] Starts cmd_loop
19084 Sep 22 23:21:37.939 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:61766, task: repair
19085 Sep 22 23:21:37.939 INFO [1] received reconcile message
19086 Sep 22 23:21:37.939 INFO listening, local_addr: 127.0.0.1:61766, task: repair
19087 Sep 22 23:21:37.939 INFO [1] All repairs completed, exit
19088 Sep 22 23:21:37.939 INFO [1] Starts cmd_loop
19089 Sep 22 23:21:37.939 INFO [2] received reconcile message
19090 Sep 22 23:21:37.939 INFO [2] All repairs completed, exit
19091 Sep 22 23:21:37.939 INFO [2] Starts cmd_loop
19092 The guest has finished waiting for activation
19093 Sep 22 23:21:37.939 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:61766, task: repair
19094 Sep 22 23:21:37.939 INFO Using repair address: 127.0.0.1:61766, task: main
19095 Sep 22 23:21:37.939 INFO No SSL acceptor configured, task: main
19096 Sep 22 23:21:37.939 DEBG IO Read 1000 has deps []
19097 Sep 22 23:21:37.940 INFO Upstairs starts
19098 Sep 22 23:21:37.940 INFO Crucible Version: BuildInfo {
19099 version: "0.0.1",
19100 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
19101 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
19102 git_branch: "main",
19103 rustc_semver: "1.70.0",
19104 rustc_channel: "stable",
19105 rustc_host_triple: "x86_64-unknown-illumos",
19106 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
19107 cargo_triple: "x86_64-unknown-illumos",
19108 debug: true,
19109 opt_level: 0,
19110 }
19111 Sep 22 23:21:37.940 INFO Upstairs <-> Downstairs Message Version: 4
19112 Sep 22 23:21:37.940 INFO Crucible stats registered with UUID: c34debde-f046-44c6-9183-91356e8062e6
19113 Sep 22 23:21:37.940 INFO Crucible c34debde-f046-44c6-9183-91356e8062e6 has session id: 25d0d638-ed65-46be-a21d-837b71bb6b7b
19114 Sep 22 23:21:37.940 INFO listening on 127.0.0.1:0, task: main
19115 Sep 22 23:21:37.940 DEBG Read :1000 deps:[] res:true
19116 Sep 22 23:21:37.940 INFO listening on 127.0.0.1:0, task: main
19117 Sep 22 23:21:37.940 INFO listening on 127.0.0.1:0, task: main
19118 Sep 22 23:21:37.940 INFO [0] connecting to 127.0.0.1:41387, looper: 0
19119 Sep 22 23:21:37.940 INFO [1] connecting to 127.0.0.1:58081, looper: 1
19120 Sep 22 23:21:37.940 DEBG Read :1000 deps:[] res:true
19121 Sep 22 23:21:37.940 INFO [2] connecting to 127.0.0.1:39334, looper: 2
19122 Sep 22 23:21:37.940 INFO up_listen starts, task: up_listen
19123 Sep 22 23:21:37.940 INFO Wait for all three downstairs to come online
19124 Sep 22 23:21:37.940 INFO Flush timeout: 0.5
19125 Sep 22 23:21:37.941 INFO accepted connection from 127.0.0.1:33706, task: main
19126 Sep 22 23:21:37.941 INFO accepted connection from 127.0.0.1:48642, task: main
19127 Sep 22 23:21:37.941 INFO accepted connection from 127.0.0.1:44693, task: main
19128 Sep 22 23:21:37.941 INFO UUID: c0e11d40-fa1e-47cb-9523-4c205fda14a9
19129 Sep 22 23:21:37.941 INFO Blocks per extent:5 Total Extents: 2
19130 Sep 22 23:21:37.941 INFO [0] c34debde-f046-44c6-9183-91356e8062e6 looper connected, looper: 0
19131 Sep 22 23:21:37.941 DEBG Read :1000 deps:[] res:true
19132 Sep 22 23:21:37.941 INFO [0] Proc runs for 127.0.0.1:41387 in state New
19133 Sep 22 23:21:37.941 INFO Crucible Version: Crucible Version: 0.0.1
19134 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19135 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19136 rustc: 1.70.0 stable x86_64-unknown-illumos
19137 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19138 Sep 22 23:21:37.941 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19139 Sep 22 23:21:37.941 INFO Using address: 127.0.0.1:57089, task: main
19140 Sep 22 23:21:37.941 INFO [1] c34debde-f046-44c6-9183-91356e8062e6 looper connected, looper: 1
19141 Sep 22 23:21:37.941 INFO [1] Proc runs for 127.0.0.1:58081 in state New
19142 Sep 22 23:21:37.941 INFO UUID: da052c76-0745-44df-96e3-d0a8c019994b
19143 Sep 22 23:21:37.941 INFO Blocks per extent:5 Total Extents: 2
19144 Sep 22 23:21:37.941 INFO [2] c34debde-f046-44c6-9183-91356e8062e6 looper connected, looper: 2
19145 Sep 22 23:21:37.941 INFO Crucible Version: Crucible Version: 0.0.1
19146 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19147 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19148 rustc: 1.70.0 stable x86_64-unknown-illumos
19149 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19150 Sep 22 23:21:37.941 INFO [2] Proc runs for 127.0.0.1:39334 in state New
19151 Sep 22 23:21:37.941 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19152 Sep 22 23:21:37.941 INFO Using address: 127.0.0.1:38958, task: main
19153 Sep 22 23:21:37.942 INFO Repair listens on 127.0.0.1:0, task: repair
19154 Sep 22 23:21:37.942 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:63200, task: repair
19155 Sep 22 23:21:37.942 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:63200, task: repair
19156 Sep 22 23:21:37.942 INFO Connection request from c34debde-f046-44c6-9183-91356e8062e6 with version 4, task: proc
19157 Sep 22 23:21:37.942 INFO listening, local_addr: 127.0.0.1:63200, task: repair
19158 Sep 22 23:21:37.942 INFO upstairs UpstairsConnection { upstairs_id: c34debde-f046-44c6-9183-91356e8062e6, session_id: 1f51d95c-7aaf-4901-936b-d4d4f0f07c99, gen: 1 } connected, version 4, task: proc
19159 Sep 22 23:21:37.942 INFO Repair listens on 127.0.0.1:0, task: repair
19160 Sep 22 23:21:37.942 INFO Connection request from c34debde-f046-44c6-9183-91356e8062e6 with version 4, task: proc
19161 Sep 22 23:21:37.942 INFO upstairs UpstairsConnection { upstairs_id: c34debde-f046-44c6-9183-91356e8062e6, session_id: 1f51d95c-7aaf-4901-936b-d4d4f0f07c99, gen: 1 } connected, version 4, task: proc
19162 Sep 22 23:21:37.942 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:63200, task: repair
19163 Sep 22 23:21:37.942 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:61420, task: repair
19164 Sep 22 23:21:37.942 DEBG [0] Read AckReady 1000, : downstairs
19165 Sep 22 23:21:37.942 INFO Using repair address: 127.0.0.1:63200, task: main
19166 Sep 22 23:21:37.942 INFO No SSL acceptor configured, task: main
19167 Sep 22 23:21:37.942 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:61420, task: repair
19168 Sep 22 23:21:37.942 INFO Connection request from c34debde-f046-44c6-9183-91356e8062e6 with version 4, task: proc
19169 Sep 22 23:21:37.942 INFO upstairs UpstairsConnection { upstairs_id: c34debde-f046-44c6-9183-91356e8062e6, session_id: 1f51d95c-7aaf-4901-936b-d4d4f0f07c99, gen: 1 } connected, version 4, task: proc
19170 Sep 22 23:21:37.942 INFO listening, local_addr: 127.0.0.1:61420, task: repair
19171 Sep 22 23:21:37.942 DEBG [1] Read already AckReady 1000, : downstairs
19172 The guest has requested activation
19173 Sep 22 23:21:37.942 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:61420, task: repair
19174 Sep 22 23:21:37.942 INFO current number of open files limit 65536 is already the maximum
19175 Sep 22 23:21:37.942 DEBG [2] Read already AckReady 1000, : downstairs
19176 Sep 22 23:21:37.942 INFO Using repair address: 127.0.0.1:61420, task: main
19177 Sep 22 23:21:37.942 INFO No SSL acceptor configured, task: main
19178 Sep 22 23:21:37.942 DEBG up_ds_listen was notified
19179 Sep 22 23:21:37.942 INFO [0] c34debde-f046-44c6-9183-91356e8062e6 (1f51d95c-7aaf-4901-936b-d4d4f0f07c99) New New New ds_transition to WaitActive
19180 Sep 22 23:21:37.942 DEBG up_ds_listen process 1000
19181 Sep 22 23:21:37.942 INFO [0] Transition from New to WaitActive
19182 Sep 22 23:21:37.942 DEBG [A] ack job 1000:1, : downstairs
19183 Sep 22 23:21:37.942 INFO Created new region file "/tmp/downstairs-YjjeJN3C/region.json"
19184 Sep 22 23:21:37.942 INFO [1] c34debde-f046-44c6-9183-91356e8062e6 (1f51d95c-7aaf-4901-936b-d4d4f0f07c99) WaitActive New New ds_transition to WaitActive
19185 Sep 22 23:21:37.942 INFO [1] Transition from New to WaitActive
19186 Sep 22 23:21:37.942 INFO current number of open files limit 65536 is already the maximum
19187 Sep 22 23:21:37.942 INFO [2] c34debde-f046-44c6-9183-91356e8062e6 (1f51d95c-7aaf-4901-936b-d4d4f0f07c99) WaitActive WaitActive New ds_transition to WaitActive
19188 Sep 22 23:21:37.942 INFO [2] Transition from New to WaitActive
19189 Sep 22 23:21:37.943 INFO current number of open files limit 65536 is already the maximum
19190 Sep 22 23:21:37.943 INFO Opened existing region file "/tmp/downstairs-XDMkkT95/region.json"
19191 Sep 22 23:21:37.943 INFO Database read version 1
19192 Sep 22 23:21:37.943 INFO Database write version 1
19193 Sep 22 23:21:37.943 INFO c34debde-f046-44c6-9183-91356e8062e6 active request set
19194 Sep 22 23:21:37.943 INFO Created new region file "/tmp/downstairs-m476cQbB/region.json"
19195 Sep 22 23:21:37.943 DEBG up_ds_listen checked 1 jobs, back to waiting
19196 Sep 22 23:21:37.943 INFO [0] received activate with gen 1
19197 Sep 22 23:21:37.943 INFO [0] client got ds_active_rx, promote! session 1f51d95c-7aaf-4901-936b-d4d4f0f07c99
19198 Sep 22 23:21:37.943 INFO [1] received activate with gen 1
19199 Sep 22 23:21:37.943 INFO [1] client got ds_active_rx, promote! session 1f51d95c-7aaf-4901-936b-d4d4f0f07c99
19200 Sep 22 23:21:37.943 DEBG IO Read 1000 has deps []
19201 Sep 22 23:21:37.943 INFO [2] received activate with gen 1
19202 Sep 22 23:21:37.943 INFO [2] client got ds_active_rx, promote! session 1f51d95c-7aaf-4901-936b-d4d4f0f07c99
19203 Sep 22 23:21:37.943 INFO UpstairsConnection { upstairs_id: c34debde-f046-44c6-9183-91356e8062e6, session_id: 1f51d95c-7aaf-4901-936b-d4d4f0f07c99, gen: 1 } is now active (read-write)
19204 Sep 22 23:21:37.943 INFO UpstairsConnection { upstairs_id: c34debde-f046-44c6-9183-91356e8062e6, session_id: 1f51d95c-7aaf-4901-936b-d4d4f0f07c99, gen: 1 } is now active (read-write)
19205 Sep 22 23:21:37.943 INFO UpstairsConnection { upstairs_id: c34debde-f046-44c6-9183-91356e8062e6, session_id: 1f51d95c-7aaf-4901-936b-d4d4f0f07c99, gen: 1 } is now active (read-write)
19206 Sep 22 23:21:37.943 DEBG Read :1000 deps:[] res:true
19207 Sep 22 23:21:37.944 INFO [0] downstairs client at 127.0.0.1:41387 has UUID b708efc7-8172-4645-a89a-1dd5a6e86ff4
19208 Sep 22 23:21:37.944 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: b708efc7-8172-4645-a89a-1dd5a6e86ff4, encrypted: true, database_read_version: 1, database_write_version: 1 }
19209 Sep 22 23:21:37.944 DEBG Read :1000 deps:[] res:true
19210 Sep 22 23:21:37.944 INFO c34debde-f046-44c6-9183-91356e8062e6 WaitActive WaitActive WaitActive
19211 Sep 22 23:21:37.944 INFO [1] downstairs client at 127.0.0.1:58081 has UUID 2dedfda5-97c1-427a-8b74-5c08fb2c233b
19212 Sep 22 23:21:37.944 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 2dedfda5-97c1-427a-8b74-5c08fb2c233b, encrypted: true, database_read_version: 1, database_write_version: 1 }
19213 Sep 22 23:21:37.944 INFO c34debde-f046-44c6-9183-91356e8062e6 WaitActive WaitActive WaitActive
19214 Sep 22 23:21:37.944 INFO [2] downstairs client at 127.0.0.1:39334 has UUID 68909a60-f6e3-4357-be0b-275dba4d89d7
19215 Sep 22 23:21:37.944 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 68909a60-f6e3-4357-be0b-275dba4d89d7, encrypted: true, database_read_version: 1, database_write_version: 1 }
19216 Sep 22 23:21:37.944 DEBG Read :1000 deps:[] res:true
19217 Sep 22 23:21:37.944 INFO c34debde-f046-44c6-9183-91356e8062e6 WaitActive WaitActive WaitActive
19218 Sep 22 23:21:37.944 INFO Current flush_numbers [0..12]: [0, 0]
19219 Sep 22 23:21:37.945 INFO Downstairs has completed Negotiation, task: proc
19220 Sep 22 23:21:37.945 INFO Current flush_numbers [0..12]: [0, 0]
19221 Sep 22 23:21:37.945 INFO Downstairs has completed Negotiation, task: proc
19222 Sep 22 23:21:37.946 INFO Current flush_numbers [0..12]: [0, 0]
19223 Sep 22 23:21:37.946 INFO UUID: c2905fd3-00af-4a7b-b34a-8c57c12a0633
19224 Sep 22 23:21:37.946 INFO Blocks per extent:5 Total Extents: 2
19225 Sep 22 23:21:37.946 INFO Downstairs has completed Negotiation, task: proc
19226 Sep 22 23:21:37.946 INFO Crucible Version: Crucible Version: 0.0.1
19227 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19228 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19229 rustc: 1.70.0 stable x86_64-unknown-illumos
19230 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19231 Sep 22 23:21:37.946 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19232 Sep 22 23:21:37.946 INFO Using address: 127.0.0.1:42374, task: main
19233 Sep 22 23:21:37.946 INFO [0] c34debde-f046-44c6-9183-91356e8062e6 (1f51d95c-7aaf-4901-936b-d4d4f0f07c99) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
19234 Sep 22 23:21:37.946 INFO [0] Transition from WaitActive to WaitQuorum
19235 Sep 22 23:21:37.946 WARN [0] new RM replaced this: None
19236 Sep 22 23:21:37.946 INFO [0] Starts reconcile loop
19237 Sep 22 23:21:37.946 INFO [1] c34debde-f046-44c6-9183-91356e8062e6 (1f51d95c-7aaf-4901-936b-d4d4f0f07c99) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
19238 Sep 22 23:21:37.946 INFO [1] Transition from WaitActive to WaitQuorum
19239 Sep 22 23:21:37.946 WARN [1] new RM replaced this: None
19240 Sep 22 23:21:37.946 INFO [1] Starts reconcile loop
19241 Sep 22 23:21:37.946 INFO Repair listens on 127.0.0.1:0, task: repair
19242 Sep 22 23:21:37.946 INFO [2] c34debde-f046-44c6-9183-91356e8062e6 (1f51d95c-7aaf-4901-936b-d4d4f0f07c99) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
19243 Sep 22 23:21:37.947 INFO [2] Transition from WaitActive to WaitQuorum
19244 Sep 22 23:21:37.947 WARN [2] new RM replaced this: None
19245 Sep 22 23:21:37.947 INFO [2] Starts reconcile loop
19246 Sep 22 23:21:37.947 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:56257, task: repair
19247 Sep 22 23:21:37.947 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:56257, task: repair
19248 Sep 22 23:21:37.947 INFO [0] 127.0.0.1:41387 task reports connection:true
19249 Sep 22 23:21:37.947 INFO c34debde-f046-44c6-9183-91356e8062e6 WaitQuorum WaitQuorum WaitQuorum
19250 Sep 22 23:21:37.947 INFO listening, local_addr: 127.0.0.1:56257, task: repair
19251 Sep 22 23:21:37.947 INFO [0]R flush_numbers: [0, 0]
19252 Sep 22 23:21:37.947 INFO [0]R generation: [0, 0]
19253 Sep 22 23:21:37.947 INFO [0]R dirty: [false, false]
19254 Sep 22 23:21:37.947 INFO [1]R flush_numbers: [0, 0]
19255 Sep 22 23:21:37.947 INFO [1]R generation: [0, 0]
19256 Sep 22 23:21:37.947 INFO [1]R dirty: [false, false]
19257 Sep 22 23:21:37.947 INFO [2]R flush_numbers: [0, 0]
19258 Sep 22 23:21:37.947 INFO [2]R generation: [0, 0]
19259 Sep 22 23:21:37.947 INFO [2]R dirty: [false, false]
19260 Sep 22 23:21:37.947 INFO Max found gen is 1
19261 Sep 22 23:21:37.947 INFO Generation requested: 1 >= found:1
19262 Sep 22 23:21:37.947 INFO current number of open files limit 65536 is already the maximum
19263 Sep 22 23:21:37.947 INFO Next flush: 1
19264 Sep 22 23:21:37.947 INFO Opened existing region file "/tmp/downstairs-YjjeJN3C/region.json"
19265 Sep 22 23:21:37.947 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:56257, task: repair
19266 Sep 22 23:21:37.947 INFO All extents match
19267 Sep 22 23:21:37.947 INFO Database read version 1
19268 Sep 22 23:21:37.947 INFO No downstairs repair required
19269 Sep 22 23:21:37.947 INFO Database write version 1
19270 Sep 22 23:21:37.947 INFO Using repair address: 127.0.0.1:56257, task: main
19271 Sep 22 23:21:37.947 INFO No initial repair work was required
19272 Sep 22 23:21:37.947 INFO No SSL acceptor configured, task: main
19273 Sep 22 23:21:37.947 INFO Set Downstairs and Upstairs active
19274 Sep 22 23:21:37.947 INFO c34debde-f046-44c6-9183-91356e8062e6 is now active with session: 1f51d95c-7aaf-4901-936b-d4d4f0f07c99
19275 Sep 22 23:21:37.947 INFO c34debde-f046-44c6-9183-91356e8062e6 Set Active after no repair
19276 Sep 22 23:21:37.947 INFO Notify all downstairs, region set compare is done.
19277 Sep 22 23:21:37.947 INFO Set check for repair
19278 Sep 22 23:21:37.947 INFO [1] 127.0.0.1:58081 task reports connection:true
19279 Sep 22 23:21:37.947 INFO c34debde-f046-44c6-9183-91356e8062e6 Active Active Active
19280 Sep 22 23:21:37.947 INFO Set check for repair
19281 Sep 22 23:21:37.947 INFO [2] 127.0.0.1:39334 task reports connection:true
19282 Sep 22 23:21:37.947 INFO c34debde-f046-44c6-9183-91356e8062e6 Active Active Active
19283 Sep 22 23:21:37.947 INFO Set check for repair
19284 Sep 22 23:21:37.947 INFO [0] received reconcile message
19285 Sep 22 23:21:37.947 INFO [0] All repairs completed, exit
19286 Sep 22 23:21:37.947 INFO [0] Starts cmd_loop
19287 Sep 22 23:21:37.947 INFO [1] received reconcile message
19288 Sep 22 23:21:37.947 INFO [1] All repairs completed, exit
19289 Sep 22 23:21:37.947 INFO [1] Starts cmd_loop
19290 Sep 22 23:21:37.947 INFO Connection request from 8db920ae-ce39-4f1d-b015-8a9c440a0a0a with version 4, task: proc
19291 Sep 22 23:21:37.947 INFO current number of open files limit 65536 is already the maximum
19292 Sep 22 23:21:37.947 INFO upstairs UpstairsConnection { upstairs_id: 8db920ae-ce39-4f1d-b015-8a9c440a0a0a, session_id: 10e6c626-e9aa-4c08-8604-f77440ade166, gen: 1 } connected, version 4, task: proc
19293 Sep 22 23:21:37.947 INFO Opened existing region file "/tmp/downstairs-m476cQbB/region.json"
19294 Sep 22 23:21:37.947 INFO [2] received reconcile message
19295 Sep 22 23:21:37.948 INFO Database read version 1
19296 Sep 22 23:21:37.948 INFO [2] All repairs completed, exit
19297 Sep 22 23:21:37.948 INFO Database write version 1
19298 Sep 22 23:21:37.948 INFO [2] Starts cmd_loop
19299 Sep 22 23:21:37.948 INFO Connection request from 8db920ae-ce39-4f1d-b015-8a9c440a0a0a with version 4, task: proc
19300 The guest has finished waiting for activation
19301 Sep 22 23:21:37.948 INFO upstairs UpstairsConnection { upstairs_id: 8db920ae-ce39-4f1d-b015-8a9c440a0a0a, session_id: 10e6c626-e9aa-4c08-8604-f77440ade166, gen: 1 } connected, version 4, task: proc
19302 Sep 22 23:21:37.948 DEBG [0] Read AckReady 1000, : downstairs
19303 Sep 22 23:21:37.948 INFO Connection request from 8db920ae-ce39-4f1d-b015-8a9c440a0a0a with version 4, task: proc
19304 Sep 22 23:21:37.948 INFO upstairs UpstairsConnection { upstairs_id: 8db920ae-ce39-4f1d-b015-8a9c440a0a0a, session_id: 10e6c626-e9aa-4c08-8604-f77440ade166, gen: 1 } connected, version 4, task: proc
19305 Sep 22 23:21:37.948 WARN c34debde-f046-44c6-9183-91356e8062e6 request to replace downstairs 127.0.0.1:41387 with 127.0.0.1:58081
19306 Sep 22 23:21:37.948 INFO listening on 127.0.0.1:0, task: main
19307 Sep 22 23:21:37.948 INFO c34debde-f046-44c6-9183-91356e8062e6 found old target: 127.0.0.1:41387 at 0
19308 Sep 22 23:21:37.948 INFO c34debde-f046-44c6-9183-91356e8062e6 found new target: 127.0.0.1:58081 at 1
19309 Sep 22 23:21:37.948 WARN 8db920ae-ce39-4f1d-b015-8a9c440a0a0a request to replace downstairs 127.0.0.1:60187 with 127.0.0.1:42374
19310 Sep 22 23:21:37.948 INFO 8db920ae-ce39-4f1d-b015-8a9c440a0a0a found old target: 127.0.0.1:60187 at 0
19311 Sep 22 23:21:37.948 INFO 8db920ae-ce39-4f1d-b015-8a9c440a0a0a replacing old: 127.0.0.1:60187 at 0
19312 Sep 22 23:21:37.948 INFO [0] client skip 0 in process jobs because fault, : downstairs
19313 Sep 22 23:21:37.948 INFO [0] changed 0 jobs to fault skipped, : downstairs
19314 Sep 22 23:21:37.948 INFO [0] 8db920ae-ce39-4f1d-b015-8a9c440a0a0a (10e6c626-e9aa-4c08-8604-f77440ade166) New New New ds_transition to Replacing
19315 Sep 22 23:21:37.948 WARN c34debde-f046-44c6-9183-91356e8062e6 request to replace downstairs 127.0.0.1:58081 with 127.0.0.1:41387
19316 Sep 22 23:21:37.948 INFO [0] Transition from New to Replacing
19317 Sep 22 23:21:37.948 INFO c34debde-f046-44c6-9183-91356e8062e6 found new target: 127.0.0.1:41387 at 0
19318 Sep 22 23:21:37.948 INFO c34debde-f046-44c6-9183-91356e8062e6 found old target: 127.0.0.1:58081 at 1
19319 Sep 22 23:21:37.950 INFO UUID: a8a89756-4dfe-4289-90cc-a00c710e1634
19320 Sep 22 23:21:37.950 INFO Blocks per extent:5 Total Extents: 2
19321 Sep 22 23:21:37.950 INFO Crucible Version: Crucible Version: 0.0.1
19322 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19323 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19324 rustc: 1.70.0 stable x86_64-unknown-illumos
19325 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19326 Sep 22 23:21:37.950 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19327 Sep 22 23:21:37.950 INFO Using address: 127.0.0.1:37739, task: main
19328 Sep 22 23:21:37.950 DEBG [1] Read already AckReady 1000, : downstairs
19329 Sep 22 23:21:37.951 INFO Repair listens on 127.0.0.1:0, task: repair
19330 Sep 22 23:21:37.951 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:48197, task: repair
19331 Sep 22 23:21:37.951 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:48197, task: repair
19332 Sep 22 23:21:37.951 INFO listening, local_addr: 127.0.0.1:48197, task: repair
19333 Sep 22 23:21:37.951 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:48197, task: repair
19334 Sep 22 23:21:37.951 INFO Using repair address: 127.0.0.1:48197, task: main
19335 Sep 22 23:21:37.951 INFO No SSL acceptor configured, task: main
19336 Sep 22 23:21:37.951 INFO UUID: fc6d0cf8-ea6a-447c-b146-12fc49b9f0fc
19337 Sep 22 23:21:37.951 INFO Blocks per extent:5 Total Extents: 2
19338 Sep 22 23:21:37.951 INFO current number of open files limit 65536 is already the maximum
19339 Sep 22 23:21:37.952 INFO Created new region file "/tmp/downstairs-sOjaheCu/region.json"
19340 Sep 22 23:21:37.952 INFO Crucible Version: Crucible Version: 0.0.1
19341 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19342 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19343 rustc: 1.70.0 stable x86_64-unknown-illumos
19344 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19345 Sep 22 23:21:37.952 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19346 Sep 22 23:21:37.952 INFO Using address: 127.0.0.1:49673, task: main
19347 Sep 22 23:21:37.952 INFO Repair listens on 127.0.0.1:0, task: repair
19348 Sep 22 23:21:37.952 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:63439, task: repair
19349 Sep 22 23:21:37.952 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:63439, task: repair
19350 Sep 22 23:21:37.952 INFO listening, local_addr: 127.0.0.1:63439, task: repair
19351 Sep 22 23:21:37.952 DEBG [2] Read already AckReady 1000, : downstairs
19352 Sep 22 23:21:37.953 DEBG up_ds_listen was notified
19353 Sep 22 23:21:37.953 DEBG up_ds_listen process 1000
19354 Sep 22 23:21:37.953 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:63439, task: repair
19355 Sep 22 23:21:37.953 DEBG [A] ack job 1000:1, : downstairs
19356 Sep 22 23:21:37.953 INFO Using repair address: 127.0.0.1:63439, task: main
19357 Sep 22 23:21:37.953 INFO No SSL acceptor configured, task: main
19358 test test::integration_test_volume_replace_active ... ok
19359 Sep 22 23:21:37.953 INFO current number of open files limit 65536 is already the maximum
19360 Sep 22 23:21:37.953 INFO current number of open files limit 65536 is already the maximum
19361 Sep 22 23:21:37.953 INFO Created new region file "/tmp/downstairs-bHFzvnWo/region.json"
19362 Sep 22 23:21:37.953 DEBG up_ds_listen checked 1 jobs, back to waiting
19363 Sep 22 23:21:37.953 INFO Created new region file "/tmp/downstairs-wZJpXBn2/region.json"
19364 test test::integration_test_volume_inactive_replace_downstairs ... ok
19365 Sep 22 23:21:37.954 DEBG IO Flush 1001 has deps [JobId(1000)]
19366 Sep 22 23:21:37.954 INFO current number of open files limit 65536 is already the maximum
19367 Sep 22 23:21:37.954 INFO Created new region file "/tmp/downstairs-iZzJ56Ro/region.json"
19368 Sep 22 23:21:37.955 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:3
19369 Sep 22 23:21:37.955 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:3
19370 Sep 22 23:21:37.955 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:3
19371 Sep 22 23:21:37.955 DEBG up_ds_listen was notified
19372 Sep 22 23:21:37.955 DEBG up_ds_listen process 1001
19373 Sep 22 23:21:37.955 DEBG [A] ack job 1001:2, : downstairs
19374 Sep 22 23:21:37.955 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
19375 Sep 22 23:21:37.955 DEBG up_ds_listen checked 1 jobs, back to waiting
19376 Sep 22 23:21:37.955 INFO current number of open files limit 65536 is already the maximum
19377 Sep 22 23:21:37.956 INFO Opened existing region file "/tmp/downstairs-sOjaheCu/region.json"
19378 Sep 22 23:21:37.956 INFO Database read version 1
19379 Sep 22 23:21:37.956 INFO Database write version 1
19380 Sep 22 23:21:37.956 DEBG IO Write 1002 has deps []
19381 Sep 22 23:21:37.956 DEBG up_ds_listen was notified
19382 Sep 22 23:21:37.956 DEBG up_ds_listen process 1002
19383 Sep 22 23:21:37.956 DEBG [A] ack job 1002:3, : downstairs
19384 Sep 22 23:21:37.956 DEBG up_ds_listen checked 1 jobs, back to waiting
19385 Sep 22 23:21:37.956 INFO current number of open files limit 65536 is already the maximum
19386 Sep 22 23:21:37.956 INFO Opened existing region file "/tmp/downstairs-bHFzvnWo/region.json"
19387 Sep 22 23:21:37.956 INFO Database read version 1
19388 Sep 22 23:21:37.956 INFO Database write version 1
19389 Sep 22 23:21:37.957 INFO UUID: 6e7dc3e7-8b4b-4591-9b7c-15aab164f030
19390 Sep 22 23:21:37.957 INFO Blocks per extent:5 Total Extents: 2
19391 Sep 22 23:21:37.957 INFO Crucible Version: Crucible Version: 0.0.1
19392 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19393 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19394 rustc: 1.70.0 stable x86_64-unknown-illumos
19395 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19396 Sep 22 23:21:37.957 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19397 Sep 22 23:21:37.957 INFO Using address: 127.0.0.1:49483, task: main
19398 Sep 22 23:21:37.957 INFO Repair listens on 127.0.0.1:0, task: repair
19399 Sep 22 23:21:37.957 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:63219, task: repair
19400 Sep 22 23:21:37.957 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:63219, task: repair
19401 Sep 22 23:21:37.957 INFO listening, local_addr: 127.0.0.1:63219, task: repair
19402 Sep 22 23:21:37.957 DEBG Write :1002 deps:[] res:true
19403 Sep 22 23:21:37.957 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:63219, task: repair
19404 Sep 22 23:21:37.957 INFO Using repair address: 127.0.0.1:63219, task: main
19405 Sep 22 23:21:37.957 INFO No SSL acceptor configured, task: main
19406 Sep 22 23:21:37.958 DEBG Write :1002 deps:[] res:true
19407 Sep 22 23:21:37.958 INFO Upstairs starts
19408 Sep 22 23:21:37.958 INFO Crucible Version: BuildInfo {
19409 version: "0.0.1",
19410 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
19411 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
19412 git_branch: "main",
19413 rustc_semver: "1.70.0",
19414 rustc_channel: "stable",
19415 rustc_host_triple: "x86_64-unknown-illumos",
19416 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
19417 cargo_triple: "x86_64-unknown-illumos",
19418 debug: true,
19419 opt_level: 0,
19420 }
19421 Sep 22 23:21:37.958 INFO current number of open files limit 65536 is already the maximum
19422 Sep 22 23:21:37.958 INFO Upstairs <-> Downstairs Message Version: 4
19423 Sep 22 23:21:37.958 INFO Opened existing region file "/tmp/downstairs-iZzJ56Ro/region.json"
19424 Sep 22 23:21:37.958 INFO Crucible stats registered with UUID: eab93731-ef60-499c-b430-9f7d2df0dd78
19425 Sep 22 23:21:37.958 INFO Database read version 1
19426 Sep 22 23:21:37.958 INFO Crucible eab93731-ef60-499c-b430-9f7d2df0dd78 has session id: fa94d668-b6ae-4230-8587-284ca20af7fa
19427 Sep 22 23:21:37.958 INFO Database write version 1
19428 Sep 22 23:21:37.958 INFO UUID: a376e236-d708-4e3c-a463-d40be79bcc36
19429 Sep 22 23:21:37.958 INFO Blocks per extent:5 Total Extents: 2
19430 Sep 22 23:21:37.958 INFO listening on 127.0.0.1:0, task: main
19431 Sep 22 23:21:37.958 INFO Crucible Version: Crucible Version: 0.0.1
19432 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19433 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19434 rustc: 1.70.0 stable x86_64-unknown-illumos
19435 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19436 Sep 22 23:21:37.958 INFO listening on 127.0.0.1:0, task: main
19437 Sep 22 23:21:37.958 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19438 Sep 22 23:21:37.958 INFO listening on 127.0.0.1:0, task: main
19439 Sep 22 23:21:37.958 INFO Using address: 127.0.0.1:48309, task: main
19440 Sep 22 23:21:37.958 INFO [0] connecting to 127.0.0.1:57089, looper: 0
19441 Sep 22 23:21:37.958 DEBG Write :1002 deps:[] res:true
19442 Sep 22 23:21:37.958 INFO [1] connecting to 127.0.0.1:37739, looper: 1
19443 Sep 22 23:21:37.958 INFO [2] connecting to 127.0.0.1:49483, looper: 2
19444 Sep 22 23:21:37.958 INFO Repair listens on 127.0.0.1:0, task: repair
19445 Sep 22 23:21:37.958 INFO up_listen starts, task: up_listen
19446 Sep 22 23:21:37.958 INFO Wait for all three downstairs to come online
19447 Sep 22 23:21:37.958 INFO Flush timeout: 0.5
19448 Sep 22 23:21:37.958 DEBG IO Read 1003 has deps [JobId(1002)]
19449 Sep 22 23:21:37.958 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53407, task: repair
19450 Sep 22 23:21:37.959 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53407, task: repair
19451 Sep 22 23:21:37.959 INFO listening, local_addr: 127.0.0.1:53407, task: repair
19452 Sep 22 23:21:37.959 INFO accepted connection from 127.0.0.1:48338, task: main
19453 Sep 22 23:21:37.959 INFO accepted connection from 127.0.0.1:63069, task: main
19454 Sep 22 23:21:37.959 INFO accepted connection from 127.0.0.1:39626, task: main
19455 Sep 22 23:21:37.959 INFO [0] eab93731-ef60-499c-b430-9f7d2df0dd78 looper connected, looper: 0
19456 Sep 22 23:21:37.959 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53407, task: repair
19457 Sep 22 23:21:37.959 INFO [0] Proc runs for 127.0.0.1:57089 in state New
19458 Sep 22 23:21:37.959 INFO Using repair address: 127.0.0.1:53407, task: main
19459 Sep 22 23:21:37.959 INFO No SSL acceptor configured, task: main
19460 Sep 22 23:21:37.959 INFO [1] eab93731-ef60-499c-b430-9f7d2df0dd78 looper connected, looper: 1
19461 Sep 22 23:21:37.959 INFO [1] Proc runs for 127.0.0.1:37739 in state New
19462 Sep 22 23:21:37.959 INFO [2] eab93731-ef60-499c-b430-9f7d2df0dd78 looper connected, looper: 2
19463 Sep 22 23:21:37.959 INFO [2] Proc runs for 127.0.0.1:49483 in state New
19464 Sep 22 23:21:37.959 INFO Connection request from eab93731-ef60-499c-b430-9f7d2df0dd78 with version 4, task: proc
19465 Sep 22 23:21:37.959 INFO upstairs UpstairsConnection { upstairs_id: eab93731-ef60-499c-b430-9f7d2df0dd78, session_id: 2a665520-5c33-4466-a6bf-8df41383b368, gen: 1 } connected, version 4, task: proc
19466 Sep 22 23:21:37.959 INFO Connection request from eab93731-ef60-499c-b430-9f7d2df0dd78 with version 4, task: proc
19467 Sep 22 23:21:37.959 INFO upstairs UpstairsConnection { upstairs_id: eab93731-ef60-499c-b430-9f7d2df0dd78, session_id: 2a665520-5c33-4466-a6bf-8df41383b368, gen: 1 } connected, version 4, task: proc
19468 Sep 22 23:21:37.959 INFO Connection request from eab93731-ef60-499c-b430-9f7d2df0dd78 with version 4, task: proc
19469 Sep 22 23:21:37.959 INFO upstairs UpstairsConnection { upstairs_id: eab93731-ef60-499c-b430-9f7d2df0dd78, session_id: 2a665520-5c33-4466-a6bf-8df41383b368, gen: 1 } connected, version 4, task: proc
19470 Sep 22 23:21:37.959 DEBG Read :1003 deps:[JobId(1002)] res:true
19471 Sep 22 23:21:37.959 INFO Upstairs starts
19472 Sep 22 23:21:37.959 INFO Crucible Version: BuildInfo {
19473 version: "0.0.1",
19474 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
19475 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
19476 git_branch: "main",
19477 rustc_semver: "1.70.0",
19478 rustc_channel: "stable",
19479 rustc_host_triple: "x86_64-unknown-illumos",
19480 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
19481 cargo_triple: "x86_64-unknown-illumos",
19482 debug: true,
19483 opt_level: 0,
19484 }
19485 Sep 22 23:21:37.959 INFO Upstairs <-> Downstairs Message Version: 4
19486 Sep 22 23:21:37.959 INFO Crucible stats registered with UUID: 52fe8d40-333d-4b64-8663-2adf0476947f
19487 The guest has requested activation
19488 Sep 22 23:21:37.959 INFO Crucible 52fe8d40-333d-4b64-8663-2adf0476947f has session id: 1000716e-0f09-43f5-b2f6-22b09aa4abd1
19489 Sep 22 23:21:37.960 INFO [0] eab93731-ef60-499c-b430-9f7d2df0dd78 (2a665520-5c33-4466-a6bf-8df41383b368) New New New ds_transition to WaitActive
19490 Sep 22 23:21:37.960 INFO [0] Transition from New to WaitActive
19491 Sep 22 23:21:37.960 INFO [1] eab93731-ef60-499c-b430-9f7d2df0dd78 (2a665520-5c33-4466-a6bf-8df41383b368) WaitActive New New ds_transition to WaitActive
19492 Sep 22 23:21:37.960 INFO [1] Transition from New to WaitActive
19493 Sep 22 23:21:37.960 DEBG Read :1003 deps:[JobId(1002)] res:true
19494 Sep 22 23:21:37.960 INFO listening on 127.0.0.1:0, task: main
19495 Sep 22 23:21:37.960 INFO [2] eab93731-ef60-499c-b430-9f7d2df0dd78 (2a665520-5c33-4466-a6bf-8df41383b368) WaitActive WaitActive New ds_transition to WaitActive
19496 Sep 22 23:21:37.960 INFO listening on 127.0.0.1:0, task: main
19497 Sep 22 23:21:37.960 INFO [2] Transition from New to WaitActive
19498 Sep 22 23:21:37.960 INFO listening on 127.0.0.1:0, task: main
19499 Sep 22 23:21:37.960 INFO eab93731-ef60-499c-b430-9f7d2df0dd78 active request set
19500 Sep 22 23:21:37.960 INFO [0] connecting to 127.0.0.1:38958, looper: 0
19501 Sep 22 23:21:37.960 INFO [0] received activate with gen 1
19502 Sep 22 23:21:37.960 INFO [0] client got ds_active_rx, promote! session 2a665520-5c33-4466-a6bf-8df41383b368
19503 Sep 22 23:21:37.960 INFO [1] received activate with gen 1
19504 Sep 22 23:21:37.960 INFO [1] client got ds_active_rx, promote! session 2a665520-5c33-4466-a6bf-8df41383b368
19505 Sep 22 23:21:37.960 INFO [1] connecting to 127.0.0.1:49673, looper: 1
19506 Sep 22 23:21:37.960 INFO [2] received activate with gen 1
19507 Sep 22 23:21:37.960 DEBG Read :1003 deps:[JobId(1002)] res:true
19508 Sep 22 23:21:37.960 INFO [2] client got ds_active_rx, promote! session 2a665520-5c33-4466-a6bf-8df41383b368
19509 Sep 22 23:21:37.960 INFO [2] connecting to 127.0.0.1:48309, looper: 2
19510 Sep 22 23:21:37.960 INFO UpstairsConnection { upstairs_id: eab93731-ef60-499c-b430-9f7d2df0dd78, session_id: 2a665520-5c33-4466-a6bf-8df41383b368, gen: 1 } is now active (read-write)
19511 Sep 22 23:21:37.960 INFO UpstairsConnection { upstairs_id: eab93731-ef60-499c-b430-9f7d2df0dd78, session_id: 2a665520-5c33-4466-a6bf-8df41383b368, gen: 1 } is now active (read-write)
19512 Sep 22 23:21:37.960 INFO up_listen starts, task: up_listen
19513 Sep 22 23:21:37.960 INFO Wait for all three downstairs to come online
19514 Sep 22 23:21:37.960 INFO Flush timeout: 0.5
19515 Sep 22 23:21:37.960 INFO UpstairsConnection { upstairs_id: eab93731-ef60-499c-b430-9f7d2df0dd78, session_id: 2a665520-5c33-4466-a6bf-8df41383b368, gen: 1 } is now active (read-write)
19516 Sep 22 23:21:37.960 INFO UUID: 566f4276-dd26-4651-b2d1-ce64bf5a08b2
19517 Sep 22 23:21:37.960 INFO Blocks per extent:5 Total Extents: 2
19518 Sep 22 23:21:37.960 INFO Crucible Version: Crucible Version: 0.0.1
19519 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19520 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19521 rustc: 1.70.0 stable x86_64-unknown-illumos
19522 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19523 Sep 22 23:21:37.960 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19524 Sep 22 23:21:37.960 INFO accepted connection from 127.0.0.1:35663, task: main
19525 Sep 22 23:21:37.960 INFO Using address: 127.0.0.1:44460, task: main
19526 Sep 22 23:21:37.961 INFO accepted connection from 127.0.0.1:51009, task: main
19527 Sep 22 23:21:37.961 INFO [0] downstairs client at 127.0.0.1:57089 has UUID c0e11d40-fa1e-47cb-9523-4c205fda14a9
19528 Sep 22 23:21:37.961 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c0e11d40-fa1e-47cb-9523-4c205fda14a9, encrypted: true, database_read_version: 1, database_write_version: 1 }
19529 Sep 22 23:21:37.961 INFO eab93731-ef60-499c-b430-9f7d2df0dd78 WaitActive WaitActive WaitActive
19530 Sep 22 23:21:37.961 INFO accepted connection from 127.0.0.1:33595, task: main
19531 Sep 22 23:21:37.961 INFO [1] downstairs client at 127.0.0.1:37739 has UUID a8a89756-4dfe-4289-90cc-a00c710e1634
19532 Sep 22 23:21:37.961 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a8a89756-4dfe-4289-90cc-a00c710e1634, encrypted: true, database_read_version: 1, database_write_version: 1 }
19533 Sep 22 23:21:37.961 INFO eab93731-ef60-499c-b430-9f7d2df0dd78 WaitActive WaitActive WaitActive
19534 Sep 22 23:21:37.961 INFO [0] 52fe8d40-333d-4b64-8663-2adf0476947f looper connected, looper: 0
19535 Sep 22 23:21:37.961 INFO [0] Proc runs for 127.0.0.1:38958 in state New
19536 Sep 22 23:21:37.961 INFO [2] downstairs client at 127.0.0.1:49483 has UUID 6e7dc3e7-8b4b-4591-9b7c-15aab164f030
19537 Sep 22 23:21:37.961 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 6e7dc3e7-8b4b-4591-9b7c-15aab164f030, encrypted: true, database_read_version: 1, database_write_version: 1 }
19538 Sep 22 23:21:37.961 INFO eab93731-ef60-499c-b430-9f7d2df0dd78 WaitActive WaitActive WaitActive
19539 Sep 22 23:21:37.961 INFO [1] 52fe8d40-333d-4b64-8663-2adf0476947f looper connected, looper: 1
19540 Sep 22 23:21:37.961 INFO [1] Proc runs for 127.0.0.1:49673 in state New
19541 Sep 22 23:21:37.961 INFO Current flush_numbers [0..12]: [0, 0]
19542 Sep 22 23:21:37.961 INFO [2] 52fe8d40-333d-4b64-8663-2adf0476947f looper connected, looper: 2
19543 Sep 22 23:21:37.961 INFO [2] Proc runs for 127.0.0.1:48309 in state New
19544 Sep 22 23:21:37.961 INFO Repair listens on 127.0.0.1:0, task: repair
19545 Sep 22 23:21:37.961 INFO Downstairs has completed Negotiation, task: proc
19546 Sep 22 23:21:37.961 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51286, task: repair
19547 Sep 22 23:21:37.961 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51286, task: repair
19548 Sep 22 23:21:37.961 INFO listening, local_addr: 127.0.0.1:51286, task: repair
19549 Sep 22 23:21:37.961 INFO Current flush_numbers [0..12]: [0, 0]
19550 Sep 22 23:21:37.961 INFO Connection request from 52fe8d40-333d-4b64-8663-2adf0476947f with version 4, task: proc
19551 Sep 22 23:21:37.961 INFO upstairs UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } connected, version 4, task: proc
19552 Sep 22 23:21:37.961 INFO Downstairs has completed Negotiation, task: proc
19553 Sep 22 23:21:37.961 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51286, task: repair
19554 Sep 22 23:21:37.961 INFO Using repair address: 127.0.0.1:51286, task: main
19555 Sep 22 23:21:37.961 INFO Connection request from 52fe8d40-333d-4b64-8663-2adf0476947f with version 4, task: proc
19556 Sep 22 23:21:37.962 INFO No SSL acceptor configured, task: main
19557 Sep 22 23:21:37.962 INFO upstairs UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } connected, version 4, task: proc
19558 Sep 22 23:21:37.962 INFO Current flush_numbers [0..12]: [0, 0]
19559 Sep 22 23:21:37.962 DEBG [0] Read AckReady 1003, : downstairs
19560 Sep 22 23:21:37.962 INFO Connection request from 52fe8d40-333d-4b64-8663-2adf0476947f with version 4, task: proc
19561 Sep 22 23:21:37.962 INFO upstairs UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } connected, version 4, task: proc
19562 Sep 22 23:21:37.962 INFO Downstairs has completed Negotiation, task: proc
19563 Sep 22 23:21:37.962 INFO current number of open files limit 65536 is already the maximum
19564 The guest has requested activation
19565 Sep 22 23:21:37.962 INFO Created new region file "/tmp/downstairs-NuYuZPJM/region.json"
19566 Sep 22 23:21:37.962 INFO [0] eab93731-ef60-499c-b430-9f7d2df0dd78 (2a665520-5c33-4466-a6bf-8df41383b368) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
19567 Sep 22 23:21:37.962 INFO [0] Transition from WaitActive to WaitQuorum
19568 Sep 22 23:21:37.962 WARN [0] new RM replaced this: None
19569 Sep 22 23:21:37.962 INFO [0] Starts reconcile loop
19570 Sep 22 23:21:37.962 INFO [0] 52fe8d40-333d-4b64-8663-2adf0476947f (fc0a5f14-ae91-4f48-aded-c2a3fecef8a0) New New New ds_transition to WaitActive
19571 Sep 22 23:21:37.962 INFO [0] Transition from New to WaitActive
19572 Sep 22 23:21:37.962 INFO [1] eab93731-ef60-499c-b430-9f7d2df0dd78 (2a665520-5c33-4466-a6bf-8df41383b368) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
19573 Sep 22 23:21:37.962 INFO [1] Transition from WaitActive to WaitQuorum
19574 Sep 22 23:21:37.962 WARN [1] new RM replaced this: None
19575 Sep 22 23:21:37.962 INFO [1] Starts reconcile loop
19576 Sep 22 23:21:37.962 INFO [1] 52fe8d40-333d-4b64-8663-2adf0476947f (fc0a5f14-ae91-4f48-aded-c2a3fecef8a0) WaitActive New New ds_transition to WaitActive
19577 Sep 22 23:21:37.962 INFO [1] Transition from New to WaitActive
19578 Sep 22 23:21:37.962 INFO [2] eab93731-ef60-499c-b430-9f7d2df0dd78 (2a665520-5c33-4466-a6bf-8df41383b368) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
19579 Sep 22 23:21:37.962 INFO [2] Transition from WaitActive to WaitQuorum
19580 Sep 22 23:21:37.962 WARN [2] new RM replaced this: None
19581 Sep 22 23:21:37.962 DEBG [1] Read already AckReady 1003, : downstairs
19582 Sep 22 23:21:37.962 INFO [2] Starts reconcile loop
19583 Sep 22 23:21:37.962 INFO [0] 127.0.0.1:57089 task reports connection:true
19584 Sep 22 23:21:37.962 INFO [2] 52fe8d40-333d-4b64-8663-2adf0476947f (fc0a5f14-ae91-4f48-aded-c2a3fecef8a0) WaitActive WaitActive New ds_transition to WaitActive
19585 Sep 22 23:21:37.962 INFO eab93731-ef60-499c-b430-9f7d2df0dd78 WaitQuorum WaitQuorum WaitQuorum
19586 Sep 22 23:21:37.962 INFO [2] Transition from New to WaitActive
19587 Sep 22 23:21:37.962 INFO [0]R flush_numbers: [0, 0]
19588 Sep 22 23:21:37.962 INFO [0]R generation: [0, 0]
19589 Sep 22 23:21:37.962 INFO [0]R dirty: [false, false]
19590 Sep 22 23:21:37.962 INFO [1]R flush_numbers: [0, 0]
19591 Sep 22 23:21:37.962 INFO [1]R generation: [0, 0]
19592 Sep 22 23:21:37.962 INFO [1]R dirty: [false, false]
19593 Sep 22 23:21:37.962 INFO [2]R flush_numbers: [0, 0]
19594 Sep 22 23:21:37.962 INFO 52fe8d40-333d-4b64-8663-2adf0476947f active request set
19595 Sep 22 23:21:37.962 INFO [2]R generation: [0, 0]
19596 Sep 22 23:21:37.962 INFO [2]R dirty: [false, false]
19597 Sep 22 23:21:37.962 INFO Max found gen is 1
19598 Sep 22 23:21:37.962 INFO Generation requested: 1 >= found:1
19599 Sep 22 23:21:37.962 INFO Next flush: 1
19600 Sep 22 23:21:37.962 INFO All extents match
19601 Sep 22 23:21:37.962 INFO No downstairs repair required
19602 Sep 22 23:21:37.962 INFO No initial repair work was required
19603 Sep 22 23:21:37.962 INFO [0] received activate with gen 1
19604 Sep 22 23:21:37.962 INFO Set Downstairs and Upstairs active
19605 Sep 22 23:21:37.962 INFO [0] client got ds_active_rx, promote! session fc0a5f14-ae91-4f48-aded-c2a3fecef8a0
19606 Sep 22 23:21:37.962 INFO eab93731-ef60-499c-b430-9f7d2df0dd78 is now active with session: 2a665520-5c33-4466-a6bf-8df41383b368
19607 Sep 22 23:21:37.962 INFO eab93731-ef60-499c-b430-9f7d2df0dd78 Set Active after no repair
19608 Sep 22 23:21:37.962 INFO Notify all downstairs, region set compare is done.
19609 Sep 22 23:21:37.962 INFO Set check for repair
19610 Sep 22 23:21:37.962 INFO [1] received activate with gen 1
19611 Sep 22 23:21:37.962 INFO [1] 127.0.0.1:37739 task reports connection:true
19612 Sep 22 23:21:37.962 INFO [1] client got ds_active_rx, promote! session fc0a5f14-ae91-4f48-aded-c2a3fecef8a0
19613 Sep 22 23:21:37.962 INFO eab93731-ef60-499c-b430-9f7d2df0dd78 Active Active Active
19614 Sep 22 23:21:37.962 INFO Set check for repair
19615 Sep 22 23:21:37.962 INFO [2] 127.0.0.1:49483 task reports connection:true
19616 Sep 22 23:21:37.963 INFO eab93731-ef60-499c-b430-9f7d2df0dd78 Active Active Active
19617 Sep 22 23:21:37.963 INFO [2] received activate with gen 1
19618 Sep 22 23:21:37.963 INFO Set check for repair
19619 Sep 22 23:21:37.963 INFO [2] client got ds_active_rx, promote! session fc0a5f14-ae91-4f48-aded-c2a3fecef8a0
19620 Sep 22 23:21:37.963 INFO [0] received reconcile message
19621 Sep 22 23:21:37.963 DEBG [2] Read already AckReady 1003, : downstairs
19622 Sep 22 23:21:37.963 INFO [0] All repairs completed, exit
19623 Sep 22 23:21:37.963 DEBG up_ds_listen was notified
19624 Sep 22 23:21:37.963 DEBG up_ds_listen process 1003
19625 Sep 22 23:21:37.963 INFO [0] Starts cmd_loop
19626 Sep 22 23:21:37.963 INFO UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } is now active (read-write)
19627 Sep 22 23:21:37.963 DEBG [A] ack job 1003:4, : downstairs
19628 Sep 22 23:21:37.963 INFO [1] received reconcile message
19629 Sep 22 23:21:37.963 INFO [1] All repairs completed, exit
19630 Sep 22 23:21:37.963 INFO [1] Starts cmd_loop
19631 Sep 22 23:21:37.963 INFO UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } is now active (read-write)
19632 Sep 22 23:21:37.963 INFO [2] received reconcile message
19633 Sep 22 23:21:37.963 INFO [2] All repairs completed, exit
19634 Sep 22 23:21:37.963 INFO [2] Starts cmd_loop
19635 Sep 22 23:21:37.963 INFO UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } is now active (read-write)
19636 The guest has finished waiting for activation
19637 Sep 22 23:21:37.963 INFO current number of open files limit 65536 is already the maximum
19638 Sep 22 23:21:37.963 INFO Created new region file "/tmp/downstairs-DzLzCdxJ/region.json"
19639 Sep 22 23:21:37.963 DEBG up_ds_listen checked 1 jobs, back to waiting
19640 Sep 22 23:21:37.963 INFO [0] downstairs client at 127.0.0.1:38958 has UUID da052c76-0745-44df-96e3-d0a8c019994b
19641 Sep 22 23:21:37.963 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: da052c76-0745-44df-96e3-d0a8c019994b, encrypted: true, database_read_version: 1, database_write_version: 1 }
19642 Sep 22 23:21:37.963 DEBG IO Read 1001 has deps []
19643 Sep 22 23:21:37.963 INFO 52fe8d40-333d-4b64-8663-2adf0476947f WaitActive WaitActive WaitActive
19644 Sep 22 23:21:37.964 INFO [1] downstairs client at 127.0.0.1:49673 has UUID fc6d0cf8-ea6a-447c-b146-12fc49b9f0fc
19645 Sep 22 23:21:37.964 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: fc6d0cf8-ea6a-447c-b146-12fc49b9f0fc, encrypted: true, database_read_version: 1, database_write_version: 1 }
19646 Sep 22 23:21:37.964 INFO 52fe8d40-333d-4b64-8663-2adf0476947f WaitActive WaitActive WaitActive
19647 Sep 22 23:21:37.964 INFO [2] downstairs client at 127.0.0.1:48309 has UUID a376e236-d708-4e3c-a463-d40be79bcc36
19648 Sep 22 23:21:37.964 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a376e236-d708-4e3c-a463-d40be79bcc36, encrypted: true, database_read_version: 1, database_write_version: 1 }
19649 Sep 22 23:21:37.964 INFO 52fe8d40-333d-4b64-8663-2adf0476947f WaitActive WaitActive WaitActive
19650 Sep 22 23:21:37.964 INFO Current flush_numbers [0..12]: [0, 0]
19651 Sep 22 23:21:37.965 DEBG Read :1001 deps:[] res:true
19652 Sep 22 23:21:37.965 INFO Downstairs has completed Negotiation, task: proc
19653 Sep 22 23:21:37.965 INFO Current flush_numbers [0..12]: [0, 0]
19654 Sep 22 23:21:37.965 DEBG Read :1001 deps:[] res:true
19655 Sep 22 23:21:37.965 INFO Downstairs has completed Negotiation, task: proc
19656 Sep 22 23:21:37.965 INFO Current flush_numbers [0..12]: [0, 0]
19657 Sep 22 23:21:37.966 DEBG Read :1001 deps:[] res:true
19658 Sep 22 23:21:37.966 INFO Downstairs has completed Negotiation, task: proc
19659 Sep 22 23:21:37.966 INFO current number of open files limit 65536 is already the maximum
19660 Sep 22 23:21:37.966 INFO Opened existing region file "/tmp/downstairs-NuYuZPJM/region.json"
19661 Sep 22 23:21:37.966 INFO [0] 52fe8d40-333d-4b64-8663-2adf0476947f (fc0a5f14-ae91-4f48-aded-c2a3fecef8a0) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
19662 Sep 22 23:21:37.966 INFO Database read version 1
19663 Sep 22 23:21:37.966 INFO [0] Transition from WaitActive to WaitQuorum
19664 Sep 22 23:21:37.966 INFO Database write version 1
19665 Sep 22 23:21:37.966 WARN [0] new RM replaced this: None
19666 Sep 22 23:21:37.966 INFO [0] Starts reconcile loop
19667 Sep 22 23:21:37.966 INFO [1] 52fe8d40-333d-4b64-8663-2adf0476947f (fc0a5f14-ae91-4f48-aded-c2a3fecef8a0) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
19668 Sep 22 23:21:37.966 INFO [1] Transition from WaitActive to WaitQuorum
19669 Sep 22 23:21:37.966 WARN [1] new RM replaced this: None
19670 Sep 22 23:21:37.966 INFO [1] Starts reconcile loop
19671 Sep 22 23:21:37.967 INFO [2] 52fe8d40-333d-4b64-8663-2adf0476947f (fc0a5f14-ae91-4f48-aded-c2a3fecef8a0) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
19672 Sep 22 23:21:37.967 INFO [2] Transition from WaitActive to WaitQuorum
19673 Sep 22 23:21:37.967 WARN [2] new RM replaced this: None
19674 Sep 22 23:21:37.967 INFO [2] Starts reconcile loop
19675 Sep 22 23:21:37.967 INFO [0] 127.0.0.1:38958 task reports connection:true
19676 Sep 22 23:21:37.967 INFO current number of open files limit 65536 is already the maximum
19677 Sep 22 23:21:37.967 INFO Opened existing region file "/tmp/downstairs-Was11KKc/region.json"
19678 Sep 22 23:21:37.967 INFO 52fe8d40-333d-4b64-8663-2adf0476947f WaitQuorum WaitQuorum WaitQuorum
19679 Sep 22 23:21:37.967 INFO Database read version 1
19680 Sep 22 23:21:37.967 INFO Database write version 1
19681 Sep 22 23:21:37.967 INFO [0]R flush_numbers: [0, 0]
19682 Sep 22 23:21:37.967 INFO [0]R generation: [0, 0]
19683 Sep 22 23:21:37.967 INFO [0]R dirty: [false, false]
19684 Sep 22 23:21:37.967 INFO [1]R flush_numbers: [0, 0]
19685 Sep 22 23:21:37.967 INFO [1]R generation: [0, 0]
19686 Sep 22 23:21:37.967 INFO [1]R dirty: [false, false]
19687 Sep 22 23:21:37.967 INFO [2]R flush_numbers: [0, 0]
19688 Sep 22 23:21:37.967 INFO [2]R generation: [0, 0]
19689 Sep 22 23:21:37.967 INFO [2]R dirty: [false, false]
19690 Sep 22 23:21:37.967 INFO Max found gen is 1
19691 Sep 22 23:21:37.967 INFO Generation requested: 1 >= found:1
19692 Sep 22 23:21:37.967 INFO Next flush: 1
19693 Sep 22 23:21:37.967 INFO All extents match
19694 Sep 22 23:21:37.967 INFO No downstairs repair required
19695 Sep 22 23:21:37.967 INFO No initial repair work was required
19696 Sep 22 23:21:37.967 INFO Set Downstairs and Upstairs active
19697 Sep 22 23:21:37.967 INFO 52fe8d40-333d-4b64-8663-2adf0476947f is now active with session: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0
19698 Sep 22 23:21:37.967 INFO 52fe8d40-333d-4b64-8663-2adf0476947f Set Active after no repair
19699 Sep 22 23:21:37.967 INFO Notify all downstairs, region set compare is done.
19700 Sep 22 23:21:37.967 INFO Set check for repair
19701 Sep 22 23:21:37.967 INFO [1] 127.0.0.1:49673 task reports connection:true
19702 Sep 22 23:21:37.967 INFO 52fe8d40-333d-4b64-8663-2adf0476947f Active Active Active
19703 Sep 22 23:21:37.967 INFO Set check for repair
19704 Sep 22 23:21:37.967 INFO [2] 127.0.0.1:48309 task reports connection:true
19705 Sep 22 23:21:37.967 INFO 52fe8d40-333d-4b64-8663-2adf0476947f Active Active Active
19706 Sep 22 23:21:37.967 INFO Set check for repair
19707 Sep 22 23:21:37.967 INFO [0] received reconcile message
19708 Sep 22 23:21:37.967 INFO [0] All repairs completed, exit
19709 Sep 22 23:21:37.967 INFO [0] Starts cmd_loop
19710 Sep 22 23:21:37.967 INFO [1] received reconcile message
19711 Sep 22 23:21:37.968 INFO [1] All repairs completed, exit
19712 Sep 22 23:21:37.968 INFO [1] Starts cmd_loop
19713 Sep 22 23:21:37.968 INFO [2] received reconcile message
19714 Sep 22 23:21:37.968 INFO [2] All repairs completed, exit
19715 Sep 22 23:21:37.968 INFO [2] Starts cmd_loop
19716 Sep 22 23:21:37.968 INFO current number of open files limit 65536 is already the maximum
19717 Sep 22 23:21:37.968 INFO Opened existing region file "/tmp/downstairs-DzLzCdxJ/region.json"
19718 The guest has finished waiting for activation
19719 Sep 22 23:21:37.968 INFO Database read version 1
19720 Sep 22 23:21:37.968 INFO Database write version 1
19721 Sep 22 23:21:37.970 INFO UUID: 479a8ee4-cd0f-4e61-bc88-2060ae3b8bdc
19722 Sep 22 23:21:37.970 INFO Blocks per extent:5 Total Extents: 2
19723 Sep 22 23:21:37.970 INFO UUID: 7a4b0c61-160e-4afd-815e-9c0c3ef81d3d
19724 Sep 22 23:21:37.970 INFO Blocks per extent:5 Total Extents: 2
19725 Sep 22 23:21:37.970 INFO Crucible Version: Crucible Version: 0.0.1
19726 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19727 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19728 rustc: 1.70.0 stable x86_64-unknown-illumos
19729 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19730 Sep 22 23:21:37.970 INFO Crucible Version: Crucible Version: 0.0.1
19731 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19732 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19733 rustc: 1.70.0 stable x86_64-unknown-illumos
19734 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19735 Sep 22 23:21:37.970 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19736 Sep 22 23:21:37.970 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19737 Sep 22 23:21:37.970 INFO Using address: 127.0.0.1:61097, task: main
19738 Sep 22 23:21:37.970 INFO Using address: 127.0.0.1:32949, task: main
19739 Sep 22 23:21:37.970 INFO Repair listens on 127.0.0.1:0, task: repair
19740 Sep 22 23:21:37.970 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:64906, task: repair
19741 Sep 22 23:21:37.970 INFO Repair listens on 127.0.0.1:0, task: repair
19742 Sep 22 23:21:37.970 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:64906, task: repair
19743 Sep 22 23:21:37.970 INFO listening, local_addr: 127.0.0.1:64906, task: repair
19744 Sep 22 23:21:37.970 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:44906, task: repair
19745 Sep 22 23:21:37.970 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:44906, task: repair
19746 Sep 22 23:21:37.970 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:64906, task: repair
19747 Sep 22 23:21:37.970 INFO listening, local_addr: 127.0.0.1:44906, task: repair
19748 Sep 22 23:21:37.970 DEBG [0] Read AckReady 1001, : downstairs
19749 Sep 22 23:21:37.971 INFO Using repair address: 127.0.0.1:64906, task: main
19750 Sep 22 23:21:37.971 INFO No SSL acceptor configured, task: main
19751 Sep 22 23:21:37.971 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:44906, task: repair
19752 Sep 22 23:21:37.971 INFO Using repair address: 127.0.0.1:44906, task: main
19753 Sep 22 23:21:37.971 INFO No SSL acceptor configured, task: main
19754 Sep 22 23:21:37.971 INFO current number of open files limit 65536 is already the maximum
19755 Sep 22 23:21:37.971 INFO Created new region file "/tmp/downstairs-JXFR0BRF/region.json"
19756 Sep 22 23:21:37.971 INFO current number of open files limit 65536 is already the maximum
19757 Sep 22 23:21:37.971 DEBG IO Write 1000 has deps []
19758 Sep 22 23:21:37.971 INFO Created new region file "/tmp/downstairs-MD5F5T4f/region.json"
19759 Sep 22 23:21:37.971 DEBG up_ds_listen was notified
19760 Sep 22 23:21:37.971 DEBG up_ds_listen process 1000
19761 Sep 22 23:21:37.971 DEBG [A] ack job 1000:1, : downstairs
19762 Sep 22 23:21:37.971 DEBG up_ds_listen checked 1 jobs, back to waiting
19763 Sep 22 23:21:37.972 INFO current number of open files limit 65536 is already the maximum
19764 Sep 22 23:21:37.972 INFO Created new region file "/tmp/downstairs-kiLstbeI/region.json"
19765 Sep 22 23:21:37.973 DEBG [1] Read already AckReady 1001, : downstairs
19766 Sep 22 23:21:37.974 INFO current number of open files limit 65536 is already the maximum
19767 Sep 22 23:21:37.974 INFO Opened existing region file "/tmp/downstairs-JXFR0BRF/region.json"
19768 Sep 22 23:21:37.974 INFO Database read version 1
19769 Sep 22 23:21:37.974 INFO Database write version 1
19770 Sep 22 23:21:37.976 DEBG [2] Read already AckReady 1001, : downstairs
19771 Sep 22 23:21:37.976 DEBG up_ds_listen was notified
19772 Sep 22 23:21:37.976 DEBG up_ds_listen process 1001
19773 Sep 22 23:21:37.976 DEBG [A] ack job 1001:2, : downstairs
19774 Sep 22 23:21:37.976 INFO current number of open files limit 65536 is already the maximum
19775 Sep 22 23:21:37.976 INFO Opened existing region file "/tmp/downstairs-MD5F5T4f/region.json"
19776 Sep 22 23:21:37.976 INFO Database read version 1
19777 Sep 22 23:21:37.976 INFO Database write version 1
19778 Sep 22 23:21:37.977 DEBG up_ds_listen checked 1 jobs, back to waiting
19779 Sep 22 23:21:37.977 DEBG IO Flush 1004 has deps [JobId(1003), JobId(1002)]
19780 Sep 22 23:21:37.978 INFO UUID: e0100601-6e4a-4881-b022-fa3b270481de
19781 Sep 22 23:21:37.978 INFO Blocks per extent:5 Total Extents: 2
19782 Sep 22 23:21:37.978 INFO Crucible Version: Crucible Version: 0.0.1
19783 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19784 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19785 rustc: 1.70.0 stable x86_64-unknown-illumos
19786 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19787 Sep 22 23:21:37.978 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19788 Sep 22 23:21:37.978 INFO Using address: 127.0.0.1:47263, task: main
19789 Sep 22 23:21:37.978 INFO Repair listens on 127.0.0.1:0, task: repair
19790 Sep 22 23:21:37.978 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:43250, task: repair
19791 Sep 22 23:21:37.978 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:43250, task: repair
19792 Sep 22 23:21:37.978 INFO listening, local_addr: 127.0.0.1:43250, task: repair
19793 Sep 22 23:21:37.978 INFO current number of open files limit 65536 is already the maximum
19794 Sep 22 23:21:37.978 INFO Opened existing region file "/tmp/downstairs-kiLstbeI/region.json"
19795 Sep 22 23:21:37.978 INFO Database read version 1
19796 Sep 22 23:21:37.978 INFO Database write version 1
19797 Sep 22 23:21:37.978 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:43250, task: repair
19798 Sep 22 23:21:37.978 INFO Using repair address: 127.0.0.1:43250, task: main
19799 Sep 22 23:21:37.978 INFO No SSL acceptor configured, task: main
19800 Sep 22 23:21:37.979 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002)] res:true f:2 g:3
19801 Sep 22 23:21:37.979 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002)] res:true f:2 g:3
19802 Sep 22 23:21:37.979 INFO listening on 127.0.0.1:0, task: main
19803 Sep 22 23:21:37.979 INFO listening on 127.0.0.1:0, task: main
19804 Sep 22 23:21:37.979 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002)] res:true f:2 g:3
19805 Sep 22 23:21:37.979 WARN eab93731-ef60-499c-b430-9f7d2df0dd78 request to replace downstairs 127.0.0.1:61097 with 127.0.0.1:47263
19806 Sep 22 23:21:37.979 WARN eab93731-ef60-499c-b430-9f7d2df0dd78 downstairs 127.0.0.1:61097 not found
19807 Sep 22 23:21:37.979 INFO UUID: 7d8c61fb-0fd3-430c-b1f3-5e7e68ad6607
19808 Sep 22 23:21:37.979 INFO Blocks per extent:5 Total Extents: 2
19809 Sep 22 23:21:37.979 INFO Crucible Version: Crucible Version: 0.0.1
19810 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19811 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19812 rustc: 1.70.0 stable x86_64-unknown-illumos
19813 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19814 Sep 22 23:21:37.979 DEBG up_ds_listen was notified
19815 Sep 22 23:21:37.979 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19816 Sep 22 23:21:37.979 INFO Using address: 127.0.0.1:53630, task: main
19817 Sep 22 23:21:37.979 DEBG up_ds_listen process 1004
19818 Sep 22 23:21:37.979 DEBG [A] ack job 1004:5, : downstairs
19819 Sep 22 23:21:37.979 DEBG [rc] retire 1004 clears [JobId(1002), JobId(1003), JobId(1004)], : downstairs
19820 Sep 22 23:21:37.979 DEBG up_ds_listen checked 1 jobs, back to waiting
19821 Sep 22 23:21:37.980 INFO Repair listens on 127.0.0.1:0, task: repair
19822 Sep 22 23:21:37.980 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:48976, task: repair
19823 Sep 22 23:21:37.980 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:48976, task: repair
19824 Sep 22 23:21:37.980 INFO listening, local_addr: 127.0.0.1:48976, task: repair
19825 Sep 22 23:21:37.980 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:48976, task: repair
19826 Sep 22 23:21:37.980 INFO Using repair address: 127.0.0.1:48976, task: main
19827 Sep 22 23:21:37.980 INFO No SSL acceptor configured, task: main
19828 Sep 22 23:21:37.980 INFO current number of open files limit 65536 is already the maximum
19829 Sep 22 23:21:37.981 INFO Created new region file "/tmp/downstairs-42faY5Vy/region.json"
19830 Sep 22 23:21:37.981 INFO UUID: f9881bf1-86b5-438d-b94a-613d437493c1
19831 Sep 22 23:21:37.981 INFO Blocks per extent:5 Total Extents: 2
19832 Sep 22 23:21:37.981 INFO Crucible Version: Crucible Version: 0.0.1
19833 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19834 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19835 rustc: 1.70.0 stable x86_64-unknown-illumos
19836 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19837 Sep 22 23:21:37.981 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19838 Sep 22 23:21:37.981 INFO Using address: 127.0.0.1:62901, task: main
19839 Sep 22 23:21:37.982 INFO Repair listens on 127.0.0.1:0, task: repair
19840 Sep 22 23:21:37.982 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:45053, task: repair
19841 Sep 22 23:21:37.982 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:45053, task: repair
19842 Sep 22 23:21:37.982 INFO listening, local_addr: 127.0.0.1:45053, task: repair
19843 Sep 22 23:21:37.982 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:45053, task: repair
19844 Sep 22 23:21:37.982 INFO Using repair address: 127.0.0.1:45053, task: main
19845 Sep 22 23:21:37.982 INFO No SSL acceptor configured, task: main
19846 Sep 22 23:21:37.984 INFO listening on 127.0.0.1:0, task: main
19847 Sep 22 23:21:37.984 WARN 52fe8d40-333d-4b64-8663-2adf0476947f request to replace downstairs 127.0.0.1:38958 with 127.0.0.1:62901
19848 Sep 22 23:21:37.984 INFO 52fe8d40-333d-4b64-8663-2adf0476947f found old target: 127.0.0.1:38958 at 0
19849 Sep 22 23:21:37.984 INFO 52fe8d40-333d-4b64-8663-2adf0476947f replacing old: 127.0.0.1:38958 at 0
19850 Sep 22 23:21:37.984 INFO [0] client skip 1 in process jobs because fault, : downstairs
19851 Sep 22 23:21:37.984 INFO [0] changed 1 jobs to fault skipped, : downstairs
19852 Sep 22 23:21:37.984 INFO [0] 52fe8d40-333d-4b64-8663-2adf0476947f (fc0a5f14-ae91-4f48-aded-c2a3fecef8a0) Active Active Active ds_transition to Replacing
19853 Sep 22 23:21:37.984 INFO [0] Transition from Active to Replacing
19854 test test::integration_test_volume_replace_bad_downstairs ... ok
19855 Sep 22 23:21:37.986 INFO current number of open files limit 65536 is already the maximum
19856 Sep 22 23:21:37.986 INFO Created new region file "/tmp/downstairs-GnOuEPSa/region.json"
19857 Sep 22 23:21:37.986 DEBG Write :1000 deps:[] res:true
19858 Sep 22 23:21:37.986 INFO current number of open files limit 65536 is already the maximum
19859 Sep 22 23:21:37.987 INFO Opened existing region file "/tmp/downstairs-42faY5Vy/region.json"
19860 Sep 22 23:21:37.987 INFO Database read version 1
19861 Sep 22 23:21:37.987 INFO Database write version 1
19862 Sep 22 23:21:37.988 DEBG Write :1000 deps:[] res:true
19863 Sep 22 23:21:37.988 INFO UUID: 3e5893e8-0146-44bc-9dcd-87f1c9321550
19864 Sep 22 23:21:37.988 INFO Blocks per extent:5 Total Extents: 2
19865 Sep 22 23:21:37.989 INFO Crucible Version: Crucible Version: 0.0.1
19866 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19867 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19868 rustc: 1.70.0 stable x86_64-unknown-illumos
19869 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19870 Sep 22 23:21:37.989 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19871 Sep 22 23:21:37.989 INFO Using address: 127.0.0.1:62409, task: main
19872 Sep 22 23:21:37.989 INFO Repair listens on 127.0.0.1:0, task: repair
19873 Sep 22 23:21:37.989 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:49988, task: repair
19874 Sep 22 23:21:37.989 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:49988, task: repair
19875 Sep 22 23:21:37.989 INFO listening, local_addr: 127.0.0.1:49988, task: repair
19876 Sep 22 23:21:37.989 DEBG Write :1000 deps:[] res:true
19877 Sep 22 23:21:37.989 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:49988, task: repair
19878 Sep 22 23:21:37.989 INFO Using repair address: 127.0.0.1:49988, task: main
19879 Sep 22 23:21:37.989 INFO current number of open files limit 65536 is already the maximum
19880 Sep 22 23:21:37.989 INFO No SSL acceptor configured, task: main
19881 Sep 22 23:21:37.989 INFO Opened existing region file "/tmp/downstairs-GnOuEPSa/region.json"
19882 Sep 22 23:21:37.989 INFO Database read version 1
19883 Sep 22 23:21:37.989 INFO Database write version 1
19884 Sep 22 23:21:37.989 WARN [0] 52fe8d40-333d-4b64-8663-2adf0476947f WARNING finish job 1000 when downstairs state:Replacing
19885 Sep 22 23:21:37.989 WARN [0] Dropping already skipped job 1000, : downstairs
19886 Sep 22 23:21:37.990 WARN [0] will exit pm_task, this downstairs Replacing
19887 Sep 22 23:21:37.990 DEBG up_ds_listen was notified
19888 Sep 22 23:21:37.990 DEBG up_ds_listen checked 0 jobs, back to waiting
19889 Sep 22 23:21:37.990 ERRO 127.0.0.1:38958: proc: [0] client work task ended, Ok(Err([0] This downstairs now in Replacing)), so we end too, looper: 0
19890 Sep 22 23:21:37.990 INFO [0] 52fe8d40-333d-4b64-8663-2adf0476947f Gone missing, transition from Replacing to Replaced
19891 Sep 22 23:21:37.990 INFO [0] 52fe8d40-333d-4b64-8663-2adf0476947f connection to 127.0.0.1:38958 closed, looper: 0
19892 Sep 22 23:21:37.990 INFO [0] 127.0.0.1:38958 task reports connection:false
19893 Sep 22 23:21:37.990 INFO 52fe8d40-333d-4b64-8663-2adf0476947f Replaced Active Active
19894 Sep 22 23:21:37.990 INFO [0] 127.0.0.1:38958 task reports offline
19895 Sep 22 23:21:37.990 INFO current number of open files limit 65536 is already the maximum
19896 Sep 22 23:21:37.990 WARN upstairs UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } disconnected, 0 jobs left, task: main
19897 Sep 22 23:21:37.990 WARN upstairs UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } was previously active, clearing, task: main
19898 Sep 22 23:21:37.990 INFO connection (127.0.0.1:51009): all done
19899 Sep 22 23:21:37.990 INFO Created new region file "/tmp/downstairs-DqCVu0Sw/region.json"
19900 test test::integration_test_snapshot_backed_vol ... ok
19901 Sep 22 23:21:37.991 INFO current number of open files limit 65536 is already the maximum
19902 Sep 22 23:21:37.991 INFO Created new region file "/tmp/downstairs-Km6l7HnV/region.json"
19903 Sep 22 23:21:37.991 INFO UUID: 5e565886-9774-4b9d-9633-93979cb0f8ae
19904 Sep 22 23:21:37.991 INFO Blocks per extent:5 Total Extents: 2
19905 Sep 22 23:21:37.991 INFO Crucible Version: Crucible Version: 0.0.1
19906 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19907 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19908 rustc: 1.70.0 stable x86_64-unknown-illumos
19909 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19910 Sep 22 23:21:37.991 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19911 Sep 22 23:21:37.991 INFO Using address: 127.0.0.1:49721, task: main
19912 Sep 22 23:21:37.991 INFO Repair listens on 127.0.0.1:0, task: repair
19913 Sep 22 23:21:37.991 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:56432, task: repair
19914 Sep 22 23:21:37.992 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:56432, task: repair
19915 Sep 22 23:21:37.992 INFO listening, local_addr: 127.0.0.1:56432, task: repair
19916 Sep 22 23:21:37.992 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:56432, task: repair
19917 Sep 22 23:21:37.992 INFO Using repair address: 127.0.0.1:56432, task: main
19918 Sep 22 23:21:37.992 INFO No SSL acceptor configured, task: main
19919 Sep 22 23:21:37.992 INFO current number of open files limit 65536 is already the maximum
19920 Sep 22 23:21:37.993 INFO Created new region file "/tmp/downstairs-TlJziCp7/region.json"
19921 Sep 22 23:21:37.994 INFO current number of open files limit 65536 is already the maximum
19922 Sep 22 23:21:37.994 INFO current number of open files limit 65536 is already the maximum
19923 Sep 22 23:21:37.994 INFO Opened existing region file "/tmp/downstairs-DqCVu0Sw/region.json"
19924 Sep 22 23:21:37.994 INFO Opened existing region file "/tmp/downstairs-Km6l7HnV/region.json"
19925 Sep 22 23:21:37.994 INFO Database read version 1
19926 Sep 22 23:21:37.994 INFO Database read version 1
19927 Sep 22 23:21:37.994 INFO Database write version 1
19928 Sep 22 23:21:37.994 INFO Database write version 1
19929 Sep 22 23:21:37.996 INFO UUID: fe4a84c6-9d76-42b5-9360-3287766e9e73
19930 Sep 22 23:21:37.996 INFO Blocks per extent:5 Total Extents: 2
19931 Sep 22 23:21:37.996 INFO Crucible Version: Crucible Version: 0.0.1
19932 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19933 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19934 rustc: 1.70.0 stable x86_64-unknown-illumos
19935 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19936 Sep 22 23:21:37.996 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19937 Sep 22 23:21:37.996 INFO Using address: 127.0.0.1:64852, task: main
19938 Sep 22 23:21:37.996 INFO Repair listens on 127.0.0.1:0, task: repair
19939 Sep 22 23:21:37.996 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:59280, task: repair
19940 Sep 22 23:21:37.996 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:59280, task: repair
19941 Sep 22 23:21:37.996 INFO listening, local_addr: 127.0.0.1:59280, task: repair
19942 Sep 22 23:21:37.996 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:59280, task: repair
19943 Sep 22 23:21:37.996 INFO Using repair address: 127.0.0.1:59280, task: main
19944 Sep 22 23:21:37.996 INFO No SSL acceptor configured, task: main
19945 Sep 22 23:21:37.997 INFO UUID: 1d55152f-d449-4fe0-aafd-0fc27bdcca9b
19946 Sep 22 23:21:37.997 INFO Blocks per extent:5 Total Extents: 2
19947 Sep 22 23:21:37.997 INFO current number of open files limit 65536 is already the maximum
19948 Sep 22 23:21:37.997 INFO current number of open files limit 65536 is already the maximum
19949 Sep 22 23:21:37.997 INFO Crucible Version: Crucible Version: 0.0.1
19950 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19951 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19952 rustc: 1.70.0 stable x86_64-unknown-illumos
19953 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19954 Sep 22 23:21:37.997 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19955 Sep 22 23:21:37.997 INFO Opened existing region file "/tmp/downstairs-TlJziCp7/region.json"
19956 Sep 22 23:21:37.997 INFO Database read version 1
19957 Sep 22 23:21:37.997 INFO Created new region file "/tmp/downstairs-W3ZwVj5O/region.json"
19958 Sep 22 23:21:37.997 INFO Using address: 127.0.0.1:58576, task: main
19959 Sep 22 23:21:37.997 INFO Database write version 1
19960 Sep 22 23:21:37.997 INFO Repair listens on 127.0.0.1:0, task: repair
19961 Sep 22 23:21:37.997 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:61143, task: repair
19962 Sep 22 23:21:37.997 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:61143, task: repair
19963 Sep 22 23:21:37.997 INFO listening, local_addr: 127.0.0.1:61143, task: repair
19964 Sep 22 23:21:37.997 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:61143, task: repair
19965 Sep 22 23:21:37.997 INFO Using repair address: 127.0.0.1:61143, task: main
19966 Sep 22 23:21:37.998 INFO No SSL acceptor configured, task: main
19967 Sep 22 23:21:37.998 INFO current number of open files limit 65536 is already the maximum
19968 Sep 22 23:21:37.998 INFO Created new region file "/tmp/downstairs-lvGHo6t9/region.json"
19969 Sep 22 23:21:37.999 INFO UUID: 6c7e6774-c872-46de-b630-fdc717de2268
19970 Sep 22 23:21:37.999 INFO Blocks per extent:5 Total Extents: 2
19971 Sep 22 23:21:37.999 INFO Crucible Version: Crucible Version: 0.0.1
19972 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19973 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19974 rustc: 1.70.0 stable x86_64-unknown-illumos
19975 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19976 Sep 22 23:21:37.999 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19977 Sep 22 23:21:37.999 INFO Using address: 127.0.0.1:49229, task: main
19978 Sep 22 23:21:37.999 INFO Repair listens on 127.0.0.1:0, task: repair
19979 Sep 22 23:21:37.999 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57612, task: repair
19980 Sep 22 23:21:37.999 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57612, task: repair
19981 Sep 22 23:21:37.999 INFO listening, local_addr: 127.0.0.1:57612, task: repair
19982 Sep 22 23:21:38.000 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57612, task: repair
19983 Sep 22 23:21:38.000 INFO Using repair address: 127.0.0.1:57612, task: main
19984 Sep 22 23:21:38.000 INFO No SSL acceptor configured, task: main
19985 Sep 22 23:21:38.000 INFO current number of open files limit 65536 is already the maximum
19986 Sep 22 23:21:38.000 INFO Created new region file "/tmp/downstairs-LhVTHdCU/region.json"
19987 Sep 22 23:21:38.001 INFO current number of open files limit 65536 is already the maximum
19988 Sep 22 23:21:38.001 INFO Opened existing region file "/tmp/downstairs-W3ZwVj5O/region.json"
19989 Sep 22 23:21:38.001 INFO Database read version 1
19990 Sep 22 23:21:38.001 INFO Database write version 1
19991 Sep 22 23:21:38.004 INFO UUID: 2be75e90-0f81-4f50-bf88-09ce34abc850
19992 Sep 22 23:21:38.004 INFO Blocks per extent:5 Total Extents: 2
19993 Sep 22 23:21:38.004 INFO Crucible Version: Crucible Version: 0.0.1
19994 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19995 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19996 rustc: 1.70.0 stable x86_64-unknown-illumos
19997 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19998 Sep 22 23:21:38.004 INFO current number of open files limit 65536 is already the maximum
19999 Sep 22 23:21:38.004 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20000 Sep 22 23:21:38.004 INFO Opened existing region file "/tmp/downstairs-lvGHo6t9/region.json"
20001 Sep 22 23:21:38.004 INFO Using address: 127.0.0.1:55577, task: main
20002 Sep 22 23:21:38.004 INFO Database read version 1
20003 Sep 22 23:21:38.004 INFO Database write version 1
20004 Sep 22 23:21:38.004 INFO Repair listens on 127.0.0.1:0, task: repair
20005 Sep 22 23:21:38.004 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50556, task: repair
20006 Sep 22 23:21:38.004 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50556, task: repair
20007 Sep 22 23:21:38.004 INFO listening, local_addr: 127.0.0.1:50556, task: repair
20008 Sep 22 23:21:38.004 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50556, task: repair
20009 Sep 22 23:21:38.005 INFO Using repair address: 127.0.0.1:50556, task: main
20010 Sep 22 23:21:38.005 INFO No SSL acceptor configured, task: main
20011 Sep 22 23:21:38.005 INFO current number of open files limit 65536 is already the maximum
20012 Sep 22 23:21:38.005 INFO Opened existing region file "/tmp/downstairs-LhVTHdCU/region.json"
20013 Sep 22 23:21:38.005 INFO Database read version 1
20014 Sep 22 23:21:38.005 INFO Database write version 1
20015 Sep 22 23:21:38.005 INFO current number of open files limit 65536 is already the maximum
20016 Sep 22 23:21:38.005 INFO Created new region file "/tmp/downstairs-3VJmaWqa/region.json"
20017 Sep 22 23:21:38.008 INFO UUID: 709bf894-ad93-42e1-b7a3-127d2f1f8b04
20018 Sep 22 23:21:38.008 INFO Blocks per extent:5 Total Extents: 2
20019 Sep 22 23:21:38.008 INFO Crucible Version: Crucible Version: 0.0.1
20020 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20021 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20022 rustc: 1.70.0 stable x86_64-unknown-illumos
20023 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20024 Sep 22 23:21:38.008 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20025 Sep 22 23:21:38.008 INFO Using address: 127.0.0.1:59698, task: main
20026 Sep 22 23:21:38.008 INFO UUID: 1839dd14-6901-4a5d-8bee-dbdeaa422af1
20027 Sep 22 23:21:38.008 INFO Blocks per extent:5 Total Extents: 2
20028 Sep 22 23:21:38.009 INFO Crucible Version: Crucible Version: 0.0.1
20029 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20030 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20031 rustc: 1.70.0 stable x86_64-unknown-illumos
20032 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20033 Sep 22 23:21:38.009 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20034 Sep 22 23:21:38.009 INFO Using address: 127.0.0.1:55350, task: main
20035 Sep 22 23:21:38.009 INFO Repair listens on 127.0.0.1:0, task: repair
20036 Sep 22 23:21:38.009 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36399, task: repair
20037 Sep 22 23:21:38.009 INFO Repair listens on 127.0.0.1:0, task: repair
20038 Sep 22 23:21:38.009 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36399, task: repair
20039 Sep 22 23:21:38.009 INFO listening, local_addr: 127.0.0.1:36399, task: repair
20040 Sep 22 23:21:38.009 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:60827, task: repair
20041 Sep 22 23:21:38.009 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:60827, task: repair
20042 Sep 22 23:21:38.009 INFO listening, local_addr: 127.0.0.1:60827, task: repair
20043 Sep 22 23:21:38.009 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36399, task: repair
20044 Sep 22 23:21:38.009 INFO Using repair address: 127.0.0.1:36399, task: main
20045 Sep 22 23:21:38.009 INFO No SSL acceptor configured, task: main
20046 Sep 22 23:21:38.009 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:60827, task: repair
20047 Sep 22 23:21:38.009 INFO Using repair address: 127.0.0.1:60827, task: main
20048 Sep 22 23:21:38.009 INFO No SSL acceptor configured, task: main
20049 Sep 22 23:21:38.010 INFO current number of open files limit 65536 is already the maximum
20050 Sep 22 23:21:38.010 INFO Created new region file "/tmp/downstairs-cCknzOql/region.json"
20051 Sep 22 23:21:38.010 INFO Upstairs starts
20052 Sep 22 23:21:38.010 INFO Crucible Version: BuildInfo {
20053 version: "0.0.1",
20054 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
20055 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
20056 git_branch: "main",
20057 rustc_semver: "1.70.0",
20058 rustc_channel: "stable",
20059 rustc_host_triple: "x86_64-unknown-illumos",
20060 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
20061 cargo_triple: "x86_64-unknown-illumos",
20062 debug: true,
20063 opt_level: 0,
20064 }
20065 Sep 22 23:21:38.010 INFO Upstairs <-> Downstairs Message Version: 4
20066 Sep 22 23:21:38.010 INFO Crucible stats registered with UUID: 459d711a-572f-4bec-bee0-901ef0300807
20067 Sep 22 23:21:38.010 INFO Crucible 459d711a-572f-4bec-bee0-901ef0300807 has session id: 700a1ac5-add3-431a-ba59-75efa3b1f621
20068 Sep 22 23:21:38.010 INFO listening on 127.0.0.1:0, task: main
20069 Sep 22 23:21:38.010 INFO listening on 127.0.0.1:0, task: main
20070 Sep 22 23:21:38.010 INFO listening on 127.0.0.1:0, task: main
20071 Sep 22 23:21:38.010 INFO listening on 127.0.0.1:0, task: main
20072 Sep 22 23:21:38.011 INFO listening on 127.0.0.1:0, task: main
20073 Sep 22 23:21:38.011 INFO listening on 127.0.0.1:0, task: main
20074 Sep 22 23:21:38.011 INFO [0] connecting to 127.0.0.1:44460, looper: 0
20075 Sep 22 23:21:38.011 INFO [1] connecting to 127.0.0.1:32949, looper: 1
20076 Sep 22 23:21:38.011 INFO current number of open files limit 65536 is already the maximum
20077 Sep 22 23:21:38.011 INFO Opened existing region file "/tmp/downstairs-3VJmaWqa/region.json"
20078 Sep 22 23:21:38.011 INFO Database read version 1
20079 Sep 22 23:21:38.011 INFO Database write version 1
20080 Sep 22 23:21:38.011 INFO [2] connecting to 127.0.0.1:53630, looper: 2
20081 Sep 22 23:21:38.011 INFO up_listen starts, task: up_listen
20082 Sep 22 23:21:38.011 INFO Wait for all three downstairs to come online
20083 Sep 22 23:21:38.011 INFO Flush timeout: 0.5
20084 Sep 22 23:21:38.011 INFO accepted connection from 127.0.0.1:48984, task: main
20085 Sep 22 23:21:38.011 INFO accepted connection from 127.0.0.1:60768, task: main
20086 Sep 22 23:21:38.012 INFO accepted connection from 127.0.0.1:50875, task: main
20087 Sep 22 23:21:38.012 INFO [0] 459d711a-572f-4bec-bee0-901ef0300807 looper connected, looper: 0
20088 Sep 22 23:21:38.012 INFO [0] Proc runs for 127.0.0.1:44460 in state New
20089 Sep 22 23:21:38.012 INFO [1] 459d711a-572f-4bec-bee0-901ef0300807 looper connected, looper: 1
20090 Sep 22 23:21:38.012 INFO [1] Proc runs for 127.0.0.1:32949 in state New
20091 Sep 22 23:21:38.012 INFO [2] 459d711a-572f-4bec-bee0-901ef0300807 looper connected, looper: 2
20092 Sep 22 23:21:38.012 INFO [2] Proc runs for 127.0.0.1:53630 in state New
20093 Sep 22 23:21:38.012 INFO Upstairs starts
20094 Sep 22 23:21:38.012 INFO Crucible Version: BuildInfo {
20095 version: "0.0.1",
20096 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
20097 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
20098 git_branch: "main",
20099 rustc_semver: "1.70.0",
20100 rustc_channel: "stable",
20101 rustc_host_triple: "x86_64-unknown-illumos",
20102 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
20103 cargo_triple: "x86_64-unknown-illumos",
20104 debug: true,
20105 opt_level: 0,
20106 }
20107 Sep 22 23:21:38.012 INFO Upstairs <-> Downstairs Message Version: 4
20108 Sep 22 23:21:38.012 INFO Crucible stats registered with UUID: 57a23afd-0f7e-4863-905a-9c435a295364
20109 Sep 22 23:21:38.012 INFO Crucible 57a23afd-0f7e-4863-905a-9c435a295364 has session id: 100195fc-8b85-43fc-8631-36ee6b1ec0c4
20110 Sep 22 23:21:38.013 INFO Connection request from 459d711a-572f-4bec-bee0-901ef0300807 with version 4, task: proc
20111 Sep 22 23:21:38.013 INFO upstairs UpstairsConnection { upstairs_id: 459d711a-572f-4bec-bee0-901ef0300807, session_id: d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d, gen: 1 } connected, version 4, task: proc
20112 Sep 22 23:21:38.013 INFO Connection request from 459d711a-572f-4bec-bee0-901ef0300807 with version 4, task: proc
20113 Sep 22 23:21:38.013 INFO upstairs UpstairsConnection { upstairs_id: 459d711a-572f-4bec-bee0-901ef0300807, session_id: d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d, gen: 1 } connected, version 4, task: proc
20114 Sep 22 23:21:38.013 INFO Connection request from 459d711a-572f-4bec-bee0-901ef0300807 with version 4, task: proc
20115 Sep 22 23:21:38.013 INFO upstairs UpstairsConnection { upstairs_id: 459d711a-572f-4bec-bee0-901ef0300807, session_id: d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d, gen: 1 } connected, version 4, task: proc
20116 Sep 22 23:21:38.013 INFO [0] connecting to 127.0.0.1:62409, looper: 0
20117 Sep 22 23:21:38.013 INFO [1] connecting to 127.0.0.1:58576, looper: 1
20118 Sep 22 23:21:38.013 INFO [2] connecting to 127.0.0.1:55350, looper: 2
20119 Sep 22 23:21:38.013 INFO up_listen starts, task: up_listen
20120 Sep 22 23:21:38.013 INFO Wait for all three downstairs to come online
20121 Sep 22 23:21:38.013 INFO Flush timeout: 0.5
20122 Sep 22 23:21:38.013 INFO accepted connection from 127.0.0.1:39090, task: main
20123 Sep 22 23:21:38.013 INFO accepted connection from 127.0.0.1:56927, task: main
20124 Sep 22 23:21:38.014 INFO accepted connection from 127.0.0.1:44673, task: main
20125 Sep 22 23:21:38.014 INFO [0] 459d711a-572f-4bec-bee0-901ef0300807 (d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d) New New New ds_transition to WaitActive
20126 Sep 22 23:21:38.014 INFO UUID: 5366c239-e0fc-41ac-ab9f-b458e8730135
20127 Sep 22 23:21:38.014 INFO [0] Transition from New to WaitActive
20128 Sep 22 23:21:38.014 INFO Blocks per extent:5 Total Extents: 2
20129 Sep 22 23:21:38.014 INFO [1] 459d711a-572f-4bec-bee0-901ef0300807 (d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d) WaitActive New New ds_transition to WaitActive
20130 Sep 22 23:21:38.014 INFO Crucible Version: Crucible Version: 0.0.1
20131 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20132 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20133 rustc: 1.70.0 stable x86_64-unknown-illumos
20134 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20135 Sep 22 23:21:38.014 INFO [1] Transition from New to WaitActive
20136 Sep 22 23:21:38.014 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20137 Sep 22 23:21:38.014 INFO Using address: 127.0.0.1:49689, task: main
20138 Sep 22 23:21:38.014 INFO current number of open files limit 65536 is already the maximum
20139 Sep 22 23:21:38.014 INFO [2] 459d711a-572f-4bec-bee0-901ef0300807 (d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d) WaitActive WaitActive New ds_transition to WaitActive
20140 Sep 22 23:21:38.014 INFO Opened existing region file "/tmp/downstairs-cCknzOql/region.json"
20141 Sep 22 23:21:38.014 INFO [2] Transition from New to WaitActive
20142 Sep 22 23:21:38.014 INFO Database read version 1
20143 Sep 22 23:21:38.014 INFO Database write version 1
20144 Sep 22 23:21:38.014 INFO [0] 57a23afd-0f7e-4863-905a-9c435a295364 looper connected, looper: 0
20145 Sep 22 23:21:38.014 INFO [0] Proc runs for 127.0.0.1:62409 in state New
20146 Sep 22 23:21:38.014 INFO [1] 57a23afd-0f7e-4863-905a-9c435a295364 looper connected, looper: 1
20147 Sep 22 23:21:38.014 INFO [1] Proc runs for 127.0.0.1:58576 in state New
20148 Sep 22 23:21:38.014 INFO [2] 57a23afd-0f7e-4863-905a-9c435a295364 looper connected, looper: 2
20149 Sep 22 23:21:38.014 INFO [2] Proc runs for 127.0.0.1:55350 in state New
20150 Sep 22 23:21:38.014 INFO Repair listens on 127.0.0.1:0, task: repair
20151 Sep 22 23:21:38.014 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:37669, task: repair
20152 Sep 22 23:21:38.014 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:37669, task: repair
20153 Sep 22 23:21:38.014 INFO listening, local_addr: 127.0.0.1:37669, task: repair
20154 Sep 22 23:21:38.014 INFO Connection request from 57a23afd-0f7e-4863-905a-9c435a295364 with version 4, task: proc
20155 Sep 22 23:21:38.014 INFO upstairs UpstairsConnection { upstairs_id: 57a23afd-0f7e-4863-905a-9c435a295364, session_id: 465019ca-32d3-499c-bb90-0f856bcdf646, gen: 1 } connected, version 4, task: proc
20156 Sep 22 23:21:38.014 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:37669, task: repair
20157 Sep 22 23:21:38.014 INFO Using repair address: 127.0.0.1:37669, task: main
20158 Sep 22 23:21:38.014 INFO No SSL acceptor configured, task: main
20159 Sep 22 23:21:38.014 INFO Connection request from 57a23afd-0f7e-4863-905a-9c435a295364 with version 4, task: proc
20160 Sep 22 23:21:38.014 INFO upstairs UpstairsConnection { upstairs_id: 57a23afd-0f7e-4863-905a-9c435a295364, session_id: 465019ca-32d3-499c-bb90-0f856bcdf646, gen: 1 } connected, version 4, task: proc
20161 Sep 22 23:21:38.015 INFO Connection request from 57a23afd-0f7e-4863-905a-9c435a295364 with version 4, task: proc
20162 Sep 22 23:21:38.015 INFO upstairs UpstairsConnection { upstairs_id: 57a23afd-0f7e-4863-905a-9c435a295364, session_id: 465019ca-32d3-499c-bb90-0f856bcdf646, gen: 1 } connected, version 4, task: proc
20163 Sep 22 23:21:38.015 INFO [0] 57a23afd-0f7e-4863-905a-9c435a295364 (465019ca-32d3-499c-bb90-0f856bcdf646) New New New ds_transition to WaitActive
20164 Sep 22 23:21:38.015 INFO [0] Transition from New to WaitActive
20165 Sep 22 23:21:38.015 INFO [1] 57a23afd-0f7e-4863-905a-9c435a295364 (465019ca-32d3-499c-bb90-0f856bcdf646) WaitActive New New ds_transition to WaitActive
20166 Sep 22 23:21:38.015 INFO [1] Transition from New to WaitActive
20167 Sep 22 23:21:38.015 INFO [2] 57a23afd-0f7e-4863-905a-9c435a295364 (465019ca-32d3-499c-bb90-0f856bcdf646) WaitActive WaitActive New ds_transition to WaitActive
20168 Sep 22 23:21:38.015 INFO [2] Transition from New to WaitActive
20169 The guest has requested activation
20170 Sep 22 23:21:38.015 INFO 459d711a-572f-4bec-bee0-901ef0300807 active request set
20171 Sep 22 23:21:38.015 INFO [0] received activate with gen 1
20172 Sep 22 23:21:38.015 INFO [0] client got ds_active_rx, promote! session d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d
20173 Sep 22 23:21:38.015 INFO Upstairs starts
20174 Sep 22 23:21:38.015 INFO Crucible Version: BuildInfo {
20175 version: "0.0.1",
20176 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
20177 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
20178 git_branch: "main",
20179 rustc_semver: "1.70.0",
20180 rustc_channel: "stable",
20181 rustc_host_triple: "x86_64-unknown-illumos",
20182 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
20183 cargo_triple: "x86_64-unknown-illumos",
20184 debug: true,
20185 opt_level: 0,
20186 }
20187 Sep 22 23:21:38.015 INFO [1] received activate with gen 1
20188 Sep 22 23:21:38.015 INFO Upstairs <-> Downstairs Message Version: 4
20189 Sep 22 23:21:38.015 INFO [1] client got ds_active_rx, promote! session d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d
20190 Sep 22 23:21:38.015 INFO Crucible stats registered with UUID: 7aa67697-0340-40b6-98b6-fb2ed6103ed6
20191 Sep 22 23:21:38.015 INFO Crucible 7aa67697-0340-40b6-98b6-fb2ed6103ed6 has session id: ca9217d4-5a4f-4f73-90fb-a75120e3531d
20192 Sep 22 23:21:38.015 INFO [2] received activate with gen 1
20193 Sep 22 23:21:38.015 INFO [2] client got ds_active_rx, promote! session d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d
20194 Sep 22 23:21:38.015 INFO listening on 127.0.0.1:0, task: main
20195 Sep 22 23:21:38.015 INFO UpstairsConnection { upstairs_id: 459d711a-572f-4bec-bee0-901ef0300807, session_id: d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d, gen: 1 } is now active (read-write)
20196 Sep 22 23:21:38.015 INFO listening on 127.0.0.1:0, task: main
20197 Sep 22 23:21:38.015 INFO listening on 127.0.0.1:0, task: main
20198 Sep 22 23:21:38.016 INFO UpstairsConnection { upstairs_id: 459d711a-572f-4bec-bee0-901ef0300807, session_id: d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d, gen: 1 } is now active (read-write)
20199 Sep 22 23:21:38.016 INFO [0] connecting to 127.0.0.1:64852, looper: 0
20200 Sep 22 23:21:38.016 INFO UpstairsConnection { upstairs_id: 459d711a-572f-4bec-bee0-901ef0300807, session_id: d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d, gen: 1 } is now active (read-write)
20201 Sep 22 23:21:38.016 INFO [1] connecting to 127.0.0.1:55577, looper: 1
20202 Sep 22 23:21:38.016 INFO [2] connecting to 127.0.0.1:49689, looper: 2
20203 Sep 22 23:21:38.016 INFO up_listen starts, task: up_listen
20204 Sep 22 23:21:38.016 INFO Wait for all three downstairs to come online
20205 Sep 22 23:21:38.016 INFO Flush timeout: 0.5
20206 Sep 22 23:21:38.016 INFO [0] downstairs client at 127.0.0.1:44460 has UUID 566f4276-dd26-4651-b2d1-ce64bf5a08b2
20207 Sep 22 23:21:38.016 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 566f4276-dd26-4651-b2d1-ce64bf5a08b2, encrypted: true, database_read_version: 1, database_write_version: 1 }
20208 Sep 22 23:21:38.016 INFO 459d711a-572f-4bec-bee0-901ef0300807 WaitActive WaitActive WaitActive
20209 Sep 22 23:21:38.016 INFO [1] downstairs client at 127.0.0.1:32949 has UUID 479a8ee4-cd0f-4e61-bc88-2060ae3b8bdc
20210 Sep 22 23:21:38.016 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 479a8ee4-cd0f-4e61-bc88-2060ae3b8bdc, encrypted: true, database_read_version: 1, database_write_version: 1 }
20211 Sep 22 23:21:38.016 INFO accepted connection from 127.0.0.1:53788, task: main
20212 Sep 22 23:21:38.016 INFO 459d711a-572f-4bec-bee0-901ef0300807 WaitActive WaitActive WaitActive
20213 Sep 22 23:21:38.016 INFO accepted connection from 127.0.0.1:52799, task: main
20214 Sep 22 23:21:38.016 INFO [2] downstairs client at 127.0.0.1:53630 has UUID 7d8c61fb-0fd3-430c-b1f3-5e7e68ad6607
20215 Sep 22 23:21:38.016 INFO accepted connection from 127.0.0.1:38518, task: main
20216 Sep 22 23:21:38.016 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 7d8c61fb-0fd3-430c-b1f3-5e7e68ad6607, encrypted: true, database_read_version: 1, database_write_version: 1 }
20217 Sep 22 23:21:38.016 INFO 459d711a-572f-4bec-bee0-901ef0300807 WaitActive WaitActive WaitActive
20218 Sep 22 23:21:38.016 INFO [0] 7aa67697-0340-40b6-98b6-fb2ed6103ed6 looper connected, looper: 0
20219 Sep 22 23:21:38.016 INFO [0] Proc runs for 127.0.0.1:64852 in state New
20220 Sep 22 23:21:38.016 INFO [1] 7aa67697-0340-40b6-98b6-fb2ed6103ed6 looper connected, looper: 1
20221 Sep 22 23:21:38.016 INFO Current flush_numbers [0..12]: [0, 0]
20222 Sep 22 23:21:38.016 INFO [1] Proc runs for 127.0.0.1:55577 in state New
20223 Sep 22 23:21:38.016 INFO [2] 7aa67697-0340-40b6-98b6-fb2ed6103ed6 looper connected, looper: 2
20224 Sep 22 23:21:38.016 INFO [2] Proc runs for 127.0.0.1:49689 in state New
20225 Sep 22 23:21:38.017 INFO Downstairs has completed Negotiation, task: proc
20226 Sep 22 23:21:38.017 INFO UUID: 137bc727-67de-4ee4-a302-cef52368a28d
20227 Sep 22 23:21:38.017 INFO Blocks per extent:5 Total Extents: 2
20228 Sep 22 23:21:38.017 INFO Current flush_numbers [0..12]: [0, 0]
20229 Sep 22 23:21:38.017 INFO Connection request from 7aa67697-0340-40b6-98b6-fb2ed6103ed6 with version 4, task: proc
20230 Sep 22 23:21:38.017 INFO Crucible Version: Crucible Version: 0.0.1
20231 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20232 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20233 rustc: 1.70.0 stable x86_64-unknown-illumos
20234 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20235 Sep 22 23:21:38.017 INFO upstairs UpstairsConnection { upstairs_id: 7aa67697-0340-40b6-98b6-fb2ed6103ed6, session_id: 9ec08283-1762-4efd-adf7-8c5fe20cabd0, gen: 1 } connected, version 4, task: proc
20236 Sep 22 23:21:38.017 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20237 Sep 22 23:21:38.017 INFO Using address: 127.0.0.1:60338, task: main
20238 Sep 22 23:21:38.017 INFO Downstairs has completed Negotiation, task: proc
20239 Sep 22 23:21:38.017 INFO Connection request from 7aa67697-0340-40b6-98b6-fb2ed6103ed6 with version 4, task: proc
20240 Sep 22 23:21:38.017 INFO upstairs UpstairsConnection { upstairs_id: 7aa67697-0340-40b6-98b6-fb2ed6103ed6, session_id: 9ec08283-1762-4efd-adf7-8c5fe20cabd0, gen: 1 } connected, version 4, task: proc
20241 Sep 22 23:21:38.017 INFO Connection request from 7aa67697-0340-40b6-98b6-fb2ed6103ed6 with version 4, task: proc
20242 Sep 22 23:21:38.017 INFO upstairs UpstairsConnection { upstairs_id: 7aa67697-0340-40b6-98b6-fb2ed6103ed6, session_id: 9ec08283-1762-4efd-adf7-8c5fe20cabd0, gen: 1 } connected, version 4, task: proc
20243 Sep 22 23:21:38.017 INFO Current flush_numbers [0..12]: [0, 0]
20244 The guest has requested activation
20245 Sep 22 23:21:38.017 INFO Downstairs has completed Negotiation, task: proc
20246 Sep 22 23:21:38.017 INFO [0] 7aa67697-0340-40b6-98b6-fb2ed6103ed6 (9ec08283-1762-4efd-adf7-8c5fe20cabd0) New New New ds_transition to WaitActive
20247 Sep 22 23:21:38.017 INFO [0] Transition from New to WaitActive
20248 Sep 22 23:21:38.017 INFO [1] 7aa67697-0340-40b6-98b6-fb2ed6103ed6 (9ec08283-1762-4efd-adf7-8c5fe20cabd0) WaitActive New New ds_transition to WaitActive
20249 Sep 22 23:21:38.017 INFO [1] Transition from New to WaitActive
20250 Sep 22 23:21:38.017 INFO [0] 459d711a-572f-4bec-bee0-901ef0300807 (d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
20251 Sep 22 23:21:38.017 INFO [0] Transition from WaitActive to WaitQuorum
20252 Sep 22 23:21:38.017 WARN [0] new RM replaced this: None
20253 Sep 22 23:21:38.017 INFO Repair listens on 127.0.0.1:0, task: repair
20254 Sep 22 23:21:38.017 INFO [2] 7aa67697-0340-40b6-98b6-fb2ed6103ed6 (9ec08283-1762-4efd-adf7-8c5fe20cabd0) WaitActive WaitActive New ds_transition to WaitActive
20255 Sep 22 23:21:38.017 INFO [2] Transition from New to WaitActive
20256 Sep 22 23:21:38.017 INFO [0] Starts reconcile loop
20257 Sep 22 23:21:38.017 INFO 7aa67697-0340-40b6-98b6-fb2ed6103ed6 active request set
20258 Sep 22 23:21:38.017 INFO [1] 459d711a-572f-4bec-bee0-901ef0300807 (d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
20259 Sep 22 23:21:38.017 INFO [1] Transition from WaitActive to WaitQuorum
20260 Sep 22 23:21:38.017 WARN [1] new RM replaced this: None
20261 Sep 22 23:21:38.017 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52052, task: repair
20262 Sep 22 23:21:38.017 INFO [0] received activate with gen 1
20263 Sep 22 23:21:38.017 INFO [1] Starts reconcile loop
20264 Sep 22 23:21:38.017 INFO [0] client got ds_active_rx, promote! session 9ec08283-1762-4efd-adf7-8c5fe20cabd0
20265 Sep 22 23:21:38.017 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52052, task: repair
20266 Sep 22 23:21:38.017 INFO [2] 459d711a-572f-4bec-bee0-901ef0300807 (d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
20267 Sep 22 23:21:38.017 INFO [1] received activate with gen 1
20268 Sep 22 23:21:38.018 INFO [2] Transition from WaitActive to WaitQuorum
20269 Sep 22 23:21:38.018 INFO [1] client got ds_active_rx, promote! session 9ec08283-1762-4efd-adf7-8c5fe20cabd0
20270 Sep 22 23:21:38.018 WARN [2] new RM replaced this: None
20271 Sep 22 23:21:38.018 INFO listening, local_addr: 127.0.0.1:52052, task: repair
20272 Sep 22 23:21:38.018 INFO [2] Starts reconcile loop
20273 Sep 22 23:21:38.018 INFO [2] received activate with gen 1
20274 Sep 22 23:21:38.018 INFO [2] client got ds_active_rx, promote! session 9ec08283-1762-4efd-adf7-8c5fe20cabd0
20275 Sep 22 23:21:38.018 INFO [0] 127.0.0.1:44460 task reports connection:true
20276 Sep 22 23:21:38.018 INFO 459d711a-572f-4bec-bee0-901ef0300807 WaitQuorum WaitQuorum WaitQuorum
20277 Sep 22 23:21:38.018 INFO [0]R flush_numbers: [0, 0]
20278 Sep 22 23:21:38.018 INFO [0]R generation: [0, 0]
20279 Sep 22 23:21:38.018 INFO [0]R dirty: [false, false]
20280 Sep 22 23:21:38.018 INFO [1]R flush_numbers: [0, 0]
20281 Sep 22 23:21:38.018 INFO [1]R generation: [0, 0]
20282 Sep 22 23:21:38.018 INFO [1]R dirty: [false, false]
20283 Sep 22 23:21:38.018 INFO [2]R flush_numbers: [0, 0]
20284 Sep 22 23:21:38.018 INFO UpstairsConnection { upstairs_id: 7aa67697-0340-40b6-98b6-fb2ed6103ed6, session_id: 9ec08283-1762-4efd-adf7-8c5fe20cabd0, gen: 1 } is now active (read-write)
20285 Sep 22 23:21:38.018 INFO [2]R generation: [0, 0]
20286 Sep 22 23:21:38.018 INFO [2]R dirty: [false, false]
20287 Sep 22 23:21:38.018 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52052, task: repair
20288 Sep 22 23:21:38.018 INFO Max found gen is 1
20289 Sep 22 23:21:38.018 INFO Generation requested: 1 >= found:1
20290 Sep 22 23:21:38.018 INFO Next flush: 1
20291 Sep 22 23:21:38.018 INFO Using repair address: 127.0.0.1:52052, task: main
20292 Sep 22 23:21:38.018 INFO All extents match
20293 Sep 22 23:21:38.018 INFO No SSL acceptor configured, task: main
20294 Sep 22 23:21:38.018 INFO No downstairs repair required
20295 Sep 22 23:21:38.018 INFO UpstairsConnection { upstairs_id: 7aa67697-0340-40b6-98b6-fb2ed6103ed6, session_id: 9ec08283-1762-4efd-adf7-8c5fe20cabd0, gen: 1 } is now active (read-write)
20296 Sep 22 23:21:38.018 INFO No initial repair work was required
20297 Sep 22 23:21:38.018 INFO Set Downstairs and Upstairs active
20298 Sep 22 23:21:38.018 INFO 459d711a-572f-4bec-bee0-901ef0300807 is now active with session: d2b2fd2b-930d-4a52-b26d-41e04dc4ed1d
20299 Sep 22 23:21:38.018 INFO 459d711a-572f-4bec-bee0-901ef0300807 Set Active after no repair
20300 Sep 22 23:21:38.018 INFO Notify all downstairs, region set compare is done.
20301 Sep 22 23:21:38.018 INFO Set check for repair
20302 Sep 22 23:21:38.018 INFO UpstairsConnection { upstairs_id: 7aa67697-0340-40b6-98b6-fb2ed6103ed6, session_id: 9ec08283-1762-4efd-adf7-8c5fe20cabd0, gen: 1 } is now active (read-write)
20303 Sep 22 23:21:38.018 INFO [1] 127.0.0.1:32949 task reports connection:true
20304 Sep 22 23:21:38.018 INFO 459d711a-572f-4bec-bee0-901ef0300807 Active Active Active
20305 Sep 22 23:21:38.018 INFO Set check for repair
20306 Sep 22 23:21:38.018 INFO current number of open files limit 65536 is already the maximum
20307 Sep 22 23:21:38.018 INFO [2] 127.0.0.1:53630 task reports connection:true
20308 Sep 22 23:21:38.018 INFO 459d711a-572f-4bec-bee0-901ef0300807 Active Active Active
20309 Sep 22 23:21:38.018 INFO Set check for repair
20310 Sep 22 23:21:38.018 INFO [0] received reconcile message
20311 Sep 22 23:21:38.018 INFO [0] All repairs completed, exit
20312 Sep 22 23:21:38.018 INFO Created new region file "/tmp/downstairs-sqEOUB0C/region.json"
20313 Sep 22 23:21:38.018 INFO [0] Starts cmd_loop
20314 Sep 22 23:21:38.018 INFO [0] downstairs client at 127.0.0.1:64852 has UUID fe4a84c6-9d76-42b5-9360-3287766e9e73
20315 Sep 22 23:21:38.018 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: fe4a84c6-9d76-42b5-9360-3287766e9e73, encrypted: true, database_read_version: 1, database_write_version: 1 }
20316 Sep 22 23:21:38.018 INFO [1] received reconcile message
20317 Sep 22 23:21:38.018 INFO [1] All repairs completed, exit
20318 Sep 22 23:21:38.018 INFO 7aa67697-0340-40b6-98b6-fb2ed6103ed6 WaitActive WaitActive WaitActive
20319 Sep 22 23:21:38.018 INFO [1] Starts cmd_loop
20320 Sep 22 23:21:38.018 INFO [2] received reconcile message
20321 Sep 22 23:21:38.018 INFO [1] downstairs client at 127.0.0.1:55577 has UUID 2be75e90-0f81-4f50-bf88-09ce34abc850
20322 Sep 22 23:21:38.018 INFO [2] All repairs completed, exit
20323 Sep 22 23:21:38.018 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 2be75e90-0f81-4f50-bf88-09ce34abc850, encrypted: true, database_read_version: 1, database_write_version: 1 }
20324 Sep 22 23:21:38.018 INFO [2] Starts cmd_loop
20325 Sep 22 23:21:38.018 INFO 7aa67697-0340-40b6-98b6-fb2ed6103ed6 WaitActive WaitActive WaitActive
20326 The guest has finished waiting for activation
20327 Sep 22 23:21:38.018 INFO [2] downstairs client at 127.0.0.1:49689 has UUID 5366c239-e0fc-41ac-ab9f-b458e8730135
20328 Sep 22 23:21:38.018 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 5366c239-e0fc-41ac-ab9f-b458e8730135, encrypted: true, database_read_version: 1, database_write_version: 1 }
20329 Sep 22 23:21:38.018 INFO 7aa67697-0340-40b6-98b6-fb2ed6103ed6 WaitActive WaitActive WaitActive
20330 The guest has requested activation
20331 Sep 22 23:21:38.019 INFO Current flush_numbers [0..12]: [0, 0]
20332 Sep 22 23:21:38.019 INFO 57a23afd-0f7e-4863-905a-9c435a295364 active request set
20333 Sep 22 23:21:38.019 INFO [0] received activate with gen 1
20334 Sep 22 23:21:38.019 INFO [0] client got ds_active_rx, promote! session 465019ca-32d3-499c-bb90-0f856bcdf646
20335 Sep 22 23:21:38.019 INFO Downstairs has completed Negotiation, task: proc
20336 Sep 22 23:21:38.019 INFO [1] received activate with gen 1
20337 Sep 22 23:21:38.019 INFO [1] client got ds_active_rx, promote! session 465019ca-32d3-499c-bb90-0f856bcdf646
20338 Sep 22 23:21:38.019 INFO [2] received activate with gen 1
20339 Sep 22 23:21:38.019 INFO [2] client got ds_active_rx, promote! session 465019ca-32d3-499c-bb90-0f856bcdf646
20340 Sep 22 23:21:38.019 INFO Current flush_numbers [0..12]: [0, 0]
20341 Sep 22 23:21:38.019 INFO UpstairsConnection { upstairs_id: 57a23afd-0f7e-4863-905a-9c435a295364, session_id: 465019ca-32d3-499c-bb90-0f856bcdf646, gen: 1 } is now active (read-write)
20342 Sep 22 23:21:38.019 INFO UpstairsConnection { upstairs_id: 57a23afd-0f7e-4863-905a-9c435a295364, session_id: 465019ca-32d3-499c-bb90-0f856bcdf646, gen: 1 } is now active (read-write)
20343 Sep 22 23:21:38.019 INFO Downstairs has completed Negotiation, task: proc
20344 Sep 22 23:21:38.019 INFO UpstairsConnection { upstairs_id: 57a23afd-0f7e-4863-905a-9c435a295364, session_id: 465019ca-32d3-499c-bb90-0f856bcdf646, gen: 1 } is now active (read-write)
20345 Sep 22 23:21:38.019 INFO Current flush_numbers [0..12]: [0, 0]
20346 Sep 22 23:21:38.019 INFO Downstairs has completed Negotiation, task: proc
20347 Sep 22 23:21:38.020 INFO [0] downstairs client at 127.0.0.1:62409 has UUID 3e5893e8-0146-44bc-9dcd-87f1c9321550
20348 Sep 22 23:21:38.020 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 3e5893e8-0146-44bc-9dcd-87f1c9321550, encrypted: true, database_read_version: 1, database_write_version: 1 }
20349 Sep 22 23:21:38.020 INFO 57a23afd-0f7e-4863-905a-9c435a295364 WaitActive WaitActive WaitActive
20350 Sep 22 23:21:38.020 INFO [1] downstairs client at 127.0.0.1:58576 has UUID 1d55152f-d449-4fe0-aafd-0fc27bdcca9b
20351 Sep 22 23:21:38.020 INFO [0] 7aa67697-0340-40b6-98b6-fb2ed6103ed6 (9ec08283-1762-4efd-adf7-8c5fe20cabd0) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
20352 Sep 22 23:21:38.020 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 1d55152f-d449-4fe0-aafd-0fc27bdcca9b, encrypted: true, database_read_version: 1, database_write_version: 1 }
20353 Sep 22 23:21:38.020 INFO [0] Transition from WaitActive to WaitQuorum
20354 Sep 22 23:21:38.020 WARN [0] new RM replaced this: None
20355 Sep 22 23:21:38.020 INFO 57a23afd-0f7e-4863-905a-9c435a295364 WaitActive WaitActive WaitActive
20356 Sep 22 23:21:38.020 INFO [0] Starts reconcile loop
20357 Sep 22 23:21:38.020 INFO [2] downstairs client at 127.0.0.1:55350 has UUID 1839dd14-6901-4a5d-8bee-dbdeaa422af1
20358 Sep 22 23:21:38.020 INFO [1] 7aa67697-0340-40b6-98b6-fb2ed6103ed6 (9ec08283-1762-4efd-adf7-8c5fe20cabd0) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
20359 Sep 22 23:21:38.020 INFO [1] Transition from WaitActive to WaitQuorum
20360 Sep 22 23:21:38.020 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 1839dd14-6901-4a5d-8bee-dbdeaa422af1, encrypted: true, database_read_version: 1, database_write_version: 1 }
20361 Sep 22 23:21:38.020 WARN [1] new RM replaced this: None
20362 Sep 22 23:21:38.020 INFO [1] Starts reconcile loop
20363 Sep 22 23:21:38.020 INFO 57a23afd-0f7e-4863-905a-9c435a295364 WaitActive WaitActive WaitActive
20364 Sep 22 23:21:38.020 INFO [2] 7aa67697-0340-40b6-98b6-fb2ed6103ed6 (9ec08283-1762-4efd-adf7-8c5fe20cabd0) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
20365 Sep 22 23:21:38.020 INFO [2] Transition from WaitActive to WaitQuorum
20366 Sep 22 23:21:38.020 WARN [2] new RM replaced this: None
20367 Sep 22 23:21:38.020 INFO [2] Starts reconcile loop
20368 Sep 22 23:21:38.020 INFO [0] 127.0.0.1:64852 task reports connection:true
20369 Sep 22 23:21:38.020 INFO 7aa67697-0340-40b6-98b6-fb2ed6103ed6 WaitQuorum WaitQuorum WaitQuorum
20370 Sep 22 23:21:38.020 INFO Current flush_numbers [0..12]: [0, 0]
20371 Sep 22 23:21:38.020 INFO [0]R flush_numbers: [0, 0]
20372 Sep 22 23:21:38.020 INFO [0]R generation: [0, 0]
20373 Sep 22 23:21:38.020 INFO [0]R dirty: [false, false]
20374 Sep 22 23:21:38.020 INFO [1]R flush_numbers: [0, 0]
20375 Sep 22 23:21:38.020 INFO [1]R generation: [0, 0]
20376 Sep 22 23:21:38.020 INFO [1]R dirty: [false, false]
20377 Sep 22 23:21:38.020 INFO [2]R flush_numbers: [0, 0]
20378 Sep 22 23:21:38.020 INFO [2]R generation: [0, 0]
20379 Sep 22 23:21:38.020 INFO [2]R dirty: [false, false]
20380 Sep 22 23:21:38.020 INFO Max found gen is 1
20381 Sep 22 23:21:38.020 INFO Generation requested: 1 >= found:1
20382 Sep 22 23:21:38.020 INFO Downstairs has completed Negotiation, task: proc
20383 Sep 22 23:21:38.020 INFO Next flush: 1
20384 Sep 22 23:21:38.020 INFO All extents match
20385 Sep 22 23:21:38.020 INFO No downstairs repair required
20386 Sep 22 23:21:38.020 INFO No initial repair work was required
20387 Sep 22 23:21:38.020 INFO Set Downstairs and Upstairs active
20388 Sep 22 23:21:38.020 INFO 7aa67697-0340-40b6-98b6-fb2ed6103ed6 is now active with session: 9ec08283-1762-4efd-adf7-8c5fe20cabd0
20389 Sep 22 23:21:38.020 INFO 7aa67697-0340-40b6-98b6-fb2ed6103ed6 Set Active after no repair
20390 Sep 22 23:21:38.020 INFO Notify all downstairs, region set compare is done.
20391 Sep 22 23:21:38.020 INFO Set check for repair
20392 Sep 22 23:21:38.020 INFO [1] 127.0.0.1:55577 task reports connection:true
20393 Sep 22 23:21:38.020 INFO 7aa67697-0340-40b6-98b6-fb2ed6103ed6 Active Active Active
20394 Sep 22 23:21:38.020 INFO Set check for repair
20395 Sep 22 23:21:38.020 INFO [2] 127.0.0.1:49689 task reports connection:true
20396 Sep 22 23:21:38.020 INFO 7aa67697-0340-40b6-98b6-fb2ed6103ed6 Active Active Active
20397 Sep 22 23:21:38.020 INFO Set check for repair
20398 Sep 22 23:21:38.020 INFO [0] received reconcile message
20399 Sep 22 23:21:38.020 INFO [0] All repairs completed, exit
20400 Sep 22 23:21:38.020 INFO [0] Starts cmd_loop
20401 Sep 22 23:21:38.021 INFO [1] received reconcile message
20402 Sep 22 23:21:38.021 INFO [1] All repairs completed, exit
20403 Sep 22 23:21:38.021 INFO Current flush_numbers [0..12]: [0, 0]
20404 Sep 22 23:21:38.021 INFO [1] Starts cmd_loop
20405 Sep 22 23:21:38.021 INFO [2] received reconcile message
20406 Sep 22 23:21:38.021 INFO [2] All repairs completed, exit
20407 Sep 22 23:21:38.021 INFO [2] Starts cmd_loop
20408 The guest has finished waiting for activation
20409 Sep 22 23:21:38.021 INFO Downstairs has completed Negotiation, task: proc
20410 Sep 22 23:21:38.021 INFO current number of open files limit 65536 is already the maximum
20411 Sep 22 23:21:38.021 INFO Current flush_numbers [0..12]: [0, 0]
20412 Sep 22 23:21:38.021 INFO Created new region file "/tmp/downstairs-wXyx2JF2/region.json"
20413 Sep 22 23:21:38.021 INFO Downstairs has completed Negotiation, task: proc
20414 Sep 22 23:21:38.021 INFO [0] 57a23afd-0f7e-4863-905a-9c435a295364 (465019ca-32d3-499c-bb90-0f856bcdf646) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
20415 Sep 22 23:21:38.021 INFO [0] Transition from WaitActive to WaitQuorum
20416 Sep 22 23:21:38.021 WARN [0] new RM replaced this: None
20417 Sep 22 23:21:38.021 INFO [0] Starts reconcile loop
20418 Sep 22 23:21:38.021 INFO [1] 57a23afd-0f7e-4863-905a-9c435a295364 (465019ca-32d3-499c-bb90-0f856bcdf646) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
20419 Sep 22 23:21:38.021 INFO [1] Transition from WaitActive to WaitQuorum
20420 Sep 22 23:21:38.021 WARN [1] new RM replaced this: None
20421 Sep 22 23:21:38.021 INFO [1] Starts reconcile loop
20422 Sep 22 23:21:38.021 INFO [2] 57a23afd-0f7e-4863-905a-9c435a295364 (465019ca-32d3-499c-bb90-0f856bcdf646) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
20423 Sep 22 23:21:38.021 INFO [2] Transition from WaitActive to WaitQuorum
20424 Sep 22 23:21:38.021 WARN [2] new RM replaced this: None
20425 Sep 22 23:21:38.021 INFO [2] Starts reconcile loop
20426 Sep 22 23:21:38.021 INFO [0] 127.0.0.1:62409 task reports connection:true
20427 Sep 22 23:21:38.021 INFO 57a23afd-0f7e-4863-905a-9c435a295364 WaitQuorum WaitQuorum WaitQuorum
20428 Sep 22 23:21:38.022 INFO [0]R flush_numbers: [0, 0]
20429 Sep 22 23:21:38.022 INFO [0]R generation: [0, 0]
20430 Sep 22 23:21:38.022 INFO [0]R dirty: [false, false]
20431 Sep 22 23:21:38.022 INFO [1]R flush_numbers: [0, 0]
20432 Sep 22 23:21:38.022 INFO [1]R generation: [0, 0]
20433 Sep 22 23:21:38.022 INFO [1]R dirty: [false, false]
20434 Sep 22 23:21:38.022 INFO [2]R flush_numbers: [0, 0]
20435 Sep 22 23:21:38.022 INFO [2]R generation: [0, 0]
20436 Sep 22 23:21:38.022 INFO [2]R dirty: [false, false]
20437 Sep 22 23:21:38.022 INFO Max found gen is 1
20438 Sep 22 23:21:38.022 INFO Generation requested: 1 >= found:1
20439 Sep 22 23:21:38.022 INFO Next flush: 1
20440 Sep 22 23:21:38.022 INFO All extents match
20441 Sep 22 23:21:38.022 INFO No downstairs repair required
20442 Sep 22 23:21:38.022 INFO No initial repair work was required
20443 Sep 22 23:21:38.022 INFO Set Downstairs and Upstairs active
20444 Sep 22 23:21:38.022 INFO 57a23afd-0f7e-4863-905a-9c435a295364 is now active with session: 465019ca-32d3-499c-bb90-0f856bcdf646
20445 Sep 22 23:21:38.022 INFO 57a23afd-0f7e-4863-905a-9c435a295364 Set Active after no repair
20446 Sep 22 23:21:38.022 INFO Notify all downstairs, region set compare is done.
20447 Sep 22 23:21:38.022 INFO Set check for repair
20448 Sep 22 23:21:38.022 INFO [1] 127.0.0.1:58576 task reports connection:true
20449 Sep 22 23:21:38.022 INFO 57a23afd-0f7e-4863-905a-9c435a295364 Active Active Active
20450 Sep 22 23:21:38.022 INFO Set check for repair
20451 Sep 22 23:21:38.022 INFO [2] 127.0.0.1:55350 task reports connection:true
20452 Sep 22 23:21:38.022 INFO 57a23afd-0f7e-4863-905a-9c435a295364 Active Active Active
20453 Sep 22 23:21:38.022 INFO Set check for repair
20454 Sep 22 23:21:38.022 INFO [0] received reconcile message
20455 Sep 22 23:21:38.022 INFO [0] All repairs completed, exit
20456 Sep 22 23:21:38.022 INFO [0] Starts cmd_loop
20457 Sep 22 23:21:38.022 INFO [1] received reconcile message
20458 Sep 22 23:21:38.022 INFO [1] All repairs completed, exit
20459 Sep 22 23:21:38.022 INFO [1] Starts cmd_loop
20460 Sep 22 23:21:38.022 INFO [2] received reconcile message
20461 Sep 22 23:21:38.022 INFO [2] All repairs completed, exit
20462 Sep 22 23:21:38.022 INFO [2] Starts cmd_loop
20463 Sep 22 23:21:38.022 INFO current number of open files limit 65536 is already the maximum
20464 Sep 22 23:21:38.022 INFO Opened existing region file "/tmp/downstairs-sqEOUB0C/region.json"
20465 Sep 22 23:21:38.022 INFO Database read version 1
20466 The guest has finished waiting for activation
20467 Sep 22 23:21:38.022 INFO Database write version 1
20468 Sep 22 23:21:38.023 DEBG IO Write 1000 has deps []
20469 Sep 22 23:21:38.023 DEBG up_ds_listen was notified
20470 Sep 22 23:21:38.023 DEBG up_ds_listen process 1000
20471 Sep 22 23:21:38.023 DEBG [A] ack job 1000:1, : downstairs
20472 Sep 22 23:21:38.023 DEBG up_ds_listen checked 1 jobs, back to waiting
20473 Sep 22 23:21:38.024 DEBG Write :1000 deps:[] res:true
20474 Sep 22 23:21:38.024 DEBG Write :1000 deps:[] res:true
20475 Sep 22 23:21:38.025 DEBG Write :1000 deps:[] res:true
20476 Sep 22 23:21:38.025 DEBG IO Write 1000 has deps []
20477 Sep 22 23:21:38.025 DEBG up_ds_listen was notified
20478 Sep 22 23:21:38.025 DEBG up_ds_listen process 1000
20479 Sep 22 23:21:38.025 DEBG [A] ack job 1000:1, : downstairs
20480 Sep 22 23:21:38.025 DEBG up_ds_listen checked 1 jobs, back to waiting
20481 Sep 22 23:21:38.025 INFO UUID: 86a54504-22a8-4d69-94ec-02ec4287a819
20482 Sep 22 23:21:38.025 INFO current number of open files limit 65536 is already the maximum
20483 Sep 22 23:21:38.026 INFO Opened existing region file "/tmp/downstairs-wXyx2JF2/region.json"
20484 Sep 22 23:21:38.026 INFO Blocks per extent:5 Total Extents: 2
20485 Sep 22 23:21:38.026 INFO Database read version 1
20486 Sep 22 23:21:38.026 INFO Database write version 1
20487 Sep 22 23:21:38.026 INFO Crucible Version: Crucible Version: 0.0.1
20488 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20489 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20490 rustc: 1.70.0 stable x86_64-unknown-illumos
20491 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20492 Sep 22 23:21:38.026 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20493 Sep 22 23:21:38.026 INFO Using address: 127.0.0.1:56772, task: main
20494 Sep 22 23:21:38.026 INFO Repair listens on 127.0.0.1:0, task: repair
20495 Sep 22 23:21:38.026 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:64690, task: repair
20496 Sep 22 23:21:38.026 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:64690, task: repair
20497 Sep 22 23:21:38.026 INFO listening, local_addr: 127.0.0.1:64690, task: repair
20498 Sep 22 23:21:38.026 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:64690, task: repair
20499 Sep 22 23:21:38.026 INFO Using repair address: 127.0.0.1:64690, task: main
20500 Sep 22 23:21:38.026 INFO No SSL acceptor configured, task: main
20501 Sep 22 23:21:38.027 DEBG Write :1000 deps:[] res:true
20502 Sep 22 23:21:38.027 INFO current number of open files limit 65536 is already the maximum
20503 Sep 22 23:21:38.027 INFO Created new region file "/tmp/downstairs-jWkf3rfF/region.json"
20504 Sep 22 23:21:38.027 DEBG Write :1000 deps:[] res:true
20505 Sep 22 23:21:38.027 INFO UUID: f4ab8931-de6c-4224-afb4-a8e9cc1a1136
20506 Sep 22 23:21:38.027 INFO Blocks per extent:5 Total Extents: 2
20507 Sep 22 23:21:38.027 INFO Crucible Version: Crucible Version: 0.0.1
20508 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20509 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20510 rustc: 1.70.0 stable x86_64-unknown-illumos
20511 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20512 Sep 22 23:21:38.027 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20513 Sep 22 23:21:38.027 INFO Using address: 127.0.0.1:55826, task: main
20514 Sep 22 23:21:38.028 DEBG Write :1000 deps:[] res:true
20515 Sep 22 23:21:38.028 INFO Repair listens on 127.0.0.1:0, task: repair
20516 Sep 22 23:21:38.028 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:63059, task: repair
20517 Sep 22 23:21:38.028 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:63059, task: repair
20518 Sep 22 23:21:38.028 INFO listening, local_addr: 127.0.0.1:63059, task: repair
20519 Sep 22 23:21:38.028 DEBG IO Read 1001 has deps [JobId(1000)]
20520 Sep 22 23:21:38.028 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:63059, task: repair
20521 Sep 22 23:21:38.028 INFO Using repair address: 127.0.0.1:63059, task: main
20522 Sep 22 23:21:38.028 INFO No SSL acceptor configured, task: main
20523 Sep 22 23:21:38.029 INFO listening on 127.0.0.1:0, task: main
20524 Sep 22 23:21:38.029 DEBG Read :1001 deps:[JobId(1000)] res:true
20525 Sep 22 23:21:38.029 WARN 7aa67697-0340-40b6-98b6-fb2ed6103ed6 request to replace downstairs 127.0.0.1:64852 with 127.0.0.1:55826
20526 Sep 22 23:21:38.029 INFO 7aa67697-0340-40b6-98b6-fb2ed6103ed6 found old target: 127.0.0.1:64852 at 0
20527 Sep 22 23:21:38.029 INFO 7aa67697-0340-40b6-98b6-fb2ed6103ed6 replacing old: 127.0.0.1:64852 at 0
20528 Sep 22 23:21:38.029 INFO [0] client skip 0 in process jobs because fault, : downstairs
20529 Sep 22 23:21:38.029 INFO [0] changed 0 jobs to fault skipped, : downstairs
20530 Sep 22 23:21:38.029 INFO [0] 7aa67697-0340-40b6-98b6-fb2ed6103ed6 (9ec08283-1762-4efd-adf7-8c5fe20cabd0) Active Active Active ds_transition to Replacing
20531 Sep 22 23:21:38.029 INFO [0] Transition from Active to Replacing
20532 Sep 22 23:21:38.029 WARN 7aa67697-0340-40b6-98b6-fb2ed6103ed6 request to replace downstairs 127.0.0.1:64852 with 127.0.0.1:55826
20533 Sep 22 23:21:38.029 DEBG Read :1001 deps:[JobId(1000)] res:true
20534 Sep 22 23:21:38.029 INFO 7aa67697-0340-40b6-98b6-fb2ed6103ed6 found new target: 127.0.0.1:55826 at 0
20535 Sep 22 23:21:38.029 DEBG Read :1001 deps:[JobId(1000)] res:true
20536 Sep 22 23:21:38.030 INFO current number of open files limit 65536 is already the maximum
20537 Sep 22 23:21:38.030 INFO Opened existing region file "/tmp/downstairs-jWkf3rfF/region.json"
20538 Sep 22 23:21:38.030 INFO Database read version 1
20539 Sep 22 23:21:38.030 INFO Database write version 1
20540 Sep 22 23:21:38.030 DEBG [0] Read AckReady 1001, : downstairs
20541 Sep 22 23:21:38.030 DEBG [1] Read already AckReady 1001, : downstairs
20542 Sep 22 23:21:38.031 DEBG [2] Read already AckReady 1001, : downstairs
20543 Sep 22 23:21:38.031 DEBG up_ds_listen was notified
20544 Sep 22 23:21:38.031 DEBG up_ds_listen process 1001
20545 Sep 22 23:21:38.031 DEBG [A] ack job 1001:2, : downstairs
20546 Sep 22 23:21:38.031 DEBG up_ds_listen checked 1 jobs, back to waiting
20547 Sep 22 23:21:38.032 DEBG IO Read 1001 has deps [JobId(1000)]
20548 Sep 22 23:21:38.032 INFO UUID: f98632f4-9734-43e6-b572-d9debb4ad27f
20549 Sep 22 23:21:38.032 INFO Blocks per extent:5 Total Extents: 2
20550 Sep 22 23:21:38.032 INFO Crucible Version: Crucible Version: 0.0.1
20551 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20552 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20553 rustc: 1.70.0 stable x86_64-unknown-illumos
20554 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20555 Sep 22 23:21:38.032 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20556 Sep 22 23:21:38.032 INFO Using address: 127.0.0.1:62775, task: main
20557 Sep 22 23:21:38.032 DEBG Read :1001 deps:[JobId(1000)] res:true
20558 Sep 22 23:21:38.033 INFO Repair listens on 127.0.0.1:0, task: repair
20559 Sep 22 23:21:38.033 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50255, task: repair
20560 Sep 22 23:21:38.033 DEBG Read :1001 deps:[JobId(1000)] res:true
20561 Sep 22 23:21:38.033 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50255, task: repair
20562 Sep 22 23:21:38.033 INFO listening, local_addr: 127.0.0.1:50255, task: repair
20563 Sep 22 23:21:38.033 DEBG Read :1001 deps:[JobId(1000)] res:true
20564 Sep 22 23:21:38.033 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50255, task: repair
20565 Sep 22 23:21:38.033 INFO Using repair address: 127.0.0.1:50255, task: main
20566 Sep 22 23:21:38.033 INFO No SSL acceptor configured, task: main
20567 Sep 22 23:21:38.034 DEBG [0] Read AckReady 1001, : downstairs
20568 Sep 22 23:21:38.034 DEBG [1] Read already AckReady 1001, : downstairs
20569 test test::integration_test_volume_twice_replace_downstairs ... ok
20570 Sep 22 23:21:38.034 INFO Upstairs starts
20571 Sep 22 23:21:38.034 INFO Crucible Version: BuildInfo {
20572 version: "0.0.1",
20573 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
20574 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
20575 git_branch: "main",
20576 rustc_semver: "1.70.0",
20577 rustc_channel: "stable",
20578 rustc_host_triple: "x86_64-unknown-illumos",
20579 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
20580 cargo_triple: "x86_64-unknown-illumos",
20581 debug: true,
20582 opt_level: 0,
20583 }
20584 Sep 22 23:21:38.034 INFO Upstairs <-> Downstairs Message Version: 4
20585 Sep 22 23:21:38.034 INFO Crucible stats registered with UUID: 252f3746-7497-4f5d-930d-d4a80969a0ed
20586 Sep 22 23:21:38.034 INFO Crucible 252f3746-7497-4f5d-930d-d4a80969a0ed has session id: 4e25679f-917a-45ca-9f0e-f27058713d27
20587 Sep 22 23:21:38.034 INFO current number of open files limit 65536 is already the maximum
20588 Sep 22 23:21:38.034 INFO Created new region file "/tmp/downstairs-rsuZzCsU/region.json"
20589 Sep 22 23:21:38.034 INFO listening on 127.0.0.1:0, task: main
20590 Sep 22 23:21:38.034 DEBG [2] Read already AckReady 1001, : downstairs
20591 Sep 22 23:21:38.035 INFO listening on 127.0.0.1:0, task: main
20592 Sep 22 23:21:38.035 DEBG up_ds_listen was notified
20593 Sep 22 23:21:38.035 DEBG up_ds_listen process 1001
20594 Sep 22 23:21:38.035 INFO listening on 127.0.0.1:0, task: main
20595 Sep 22 23:21:38.035 DEBG [A] ack job 1001:2, : downstairs
20596 Sep 22 23:21:38.035 INFO listening on 127.0.0.1:0, task: main
20597 Sep 22 23:21:38.035 INFO listening on 127.0.0.1:0, task: main
20598 Sep 22 23:21:38.035 INFO listening on 127.0.0.1:0, task: main
20599 Sep 22 23:21:38.035 DEBG up_ds_listen checked 1 jobs, back to waiting
20600 Sep 22 23:21:38.035 INFO [0] connecting to 127.0.0.1:49721, looper: 0
20601 Sep 22 23:21:38.035 INFO [1] connecting to 127.0.0.1:49229, looper: 1
20602 Sep 22 23:21:38.035 INFO [2] connecting to 127.0.0.1:59698, looper: 2
20603 Sep 22 23:21:38.035 INFO up_listen starts, task: up_listen
20604 Sep 22 23:21:38.035 INFO Wait for all three downstairs to come online
20605 Sep 22 23:21:38.035 INFO Flush timeout: 0.5
20606 Sep 22 23:21:38.035 INFO accepted connection from 127.0.0.1:50277, task: main
20607 Sep 22 23:21:38.036 INFO accepted connection from 127.0.0.1:48202, task: main
20608 Sep 22 23:21:38.036 DEBG IO Write 1002 has deps []
20609 Sep 22 23:21:38.036 INFO accepted connection from 127.0.0.1:50028, task: main
20610 Sep 22 23:21:38.036 DEBG up_ds_listen was notified
20611 Sep 22 23:21:38.036 DEBG up_ds_listen process 1002
20612 Sep 22 23:21:38.036 DEBG [A] ack job 1002:3, : downstairs
20613 Sep 22 23:21:38.036 DEBG up_ds_listen checked 1 jobs, back to waiting
20614 Sep 22 23:21:38.036 INFO [0] 252f3746-7497-4f5d-930d-d4a80969a0ed looper connected, looper: 0
20615 Sep 22 23:21:38.036 INFO [0] Proc runs for 127.0.0.1:49721 in state New
20616 Sep 22 23:21:38.036 INFO [1] 252f3746-7497-4f5d-930d-d4a80969a0ed looper connected, looper: 1
20617 Sep 22 23:21:38.036 INFO [1] Proc runs for 127.0.0.1:49229 in state New
20618 Sep 22 23:21:38.036 INFO [2] 252f3746-7497-4f5d-930d-d4a80969a0ed looper connected, looper: 2
20619 Sep 22 23:21:38.036 INFO [2] Proc runs for 127.0.0.1:59698 in state New
20620 Sep 22 23:21:38.036 INFO Scrub check for f70a61ee-a9f0-49ce-8bae-d3843b954927
20621 Sep 22 23:21:38.036 INFO Scrub for f70a61ee-a9f0-49ce-8bae-d3843b954927 begins
20622 Sep 22 23:21:38.036 INFO Scrub with total_size:5120 block_size:512
20623 Sep 22 23:21:38.036 INFO Scrubs from block 0 to 10 in (256) 131072 size IOs pm:0
20624 Sep 22 23:21:38.036 INFO Adjust block_count to 10 at offset 0
20625 Sep 22 23:21:38.037 INFO Upstairs starts
20626 Sep 22 23:21:38.037 INFO Crucible Version: BuildInfo {
20627 version: "0.0.1",
20628 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
20629 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
20630 git_branch: "main",
20631 rustc_semver: "1.70.0",
20632 rustc_channel: "stable",
20633 rustc_host_triple: "x86_64-unknown-illumos",
20634 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
20635 cargo_triple: "x86_64-unknown-illumos",
20636 debug: true,
20637 opt_level: 0,
20638 }
20639 Sep 22 23:21:38.037 INFO Upstairs <-> Downstairs Message Version: 4
20640 Sep 22 23:21:38.037 INFO Crucible stats registered with UUID: 723c92d6-2550-4896-aeb0-c3ba0579c262
20641 Sep 22 23:21:38.037 INFO Crucible 723c92d6-2550-4896-aeb0-c3ba0579c262 has session id: 0b68b8af-adb3-4d45-8261-82fa9b42ae96
20642 Sep 22 23:21:38.037 INFO Connection request from 252f3746-7497-4f5d-930d-d4a80969a0ed with version 4, task: proc
20643 Sep 22 23:21:38.037 INFO upstairs UpstairsConnection { upstairs_id: 252f3746-7497-4f5d-930d-d4a80969a0ed, session_id: 2f207150-16ea-4b9b-9a7d-235b5da77cb3, gen: 1 } connected, version 4, task: proc
20644 Sep 22 23:21:38.037 INFO current number of open files limit 65536 is already the maximum
20645 Sep 22 23:21:38.037 INFO Connection request from 252f3746-7497-4f5d-930d-d4a80969a0ed with version 4, task: proc
20646 Sep 22 23:21:38.037 INFO Opened existing region file "/tmp/downstairs-rsuZzCsU/region.json"
20647 Sep 22 23:21:38.037 INFO upstairs UpstairsConnection { upstairs_id: 252f3746-7497-4f5d-930d-d4a80969a0ed, session_id: 2f207150-16ea-4b9b-9a7d-235b5da77cb3, gen: 1 } connected, version 4, task: proc
20648 Sep 22 23:21:38.037 INFO Database read version 1
20649 Sep 22 23:21:38.037 INFO Database write version 1
20650 Sep 22 23:21:38.037 INFO Connection request from 252f3746-7497-4f5d-930d-d4a80969a0ed with version 4, task: proc
20651 Sep 22 23:21:38.037 INFO upstairs UpstairsConnection { upstairs_id: 252f3746-7497-4f5d-930d-d4a80969a0ed, session_id: 2f207150-16ea-4b9b-9a7d-235b5da77cb3, gen: 1 } connected, version 4, task: proc
20652 Sep 22 23:21:38.037 INFO [0] connecting to 127.0.0.1:60338, looper: 0
20653 Sep 22 23:21:38.037 INFO [1] connecting to 127.0.0.1:56772, looper: 1
20654 Sep 22 23:21:38.038 INFO [2] connecting to 127.0.0.1:62775, looper: 2
20655 Sep 22 23:21:38.038 INFO up_listen starts, task: up_listen
20656 Sep 22 23:21:38.038 INFO Wait for all three downstairs to come online
20657 Sep 22 23:21:38.038 INFO Flush timeout: 0.5
20658 Sep 22 23:21:38.038 INFO accepted connection from 127.0.0.1:48111, task: main
20659 Sep 22 23:21:38.038 INFO [0] 252f3746-7497-4f5d-930d-d4a80969a0ed (2f207150-16ea-4b9b-9a7d-235b5da77cb3) New New New ds_transition to WaitActive
20660 Sep 22 23:21:38.038 INFO [0] Transition from New to WaitActive
20661 Sep 22 23:21:38.038 DEBG Write :1002 deps:[] res:true
20662 Sep 22 23:21:38.038 INFO [1] 252f3746-7497-4f5d-930d-d4a80969a0ed (2f207150-16ea-4b9b-9a7d-235b5da77cb3) WaitActive New New ds_transition to WaitActive
20663 Sep 22 23:21:38.038 INFO [1] Transition from New to WaitActive
20664 Sep 22 23:21:38.038 INFO accepted connection from 127.0.0.1:33070, task: main
20665 Sep 22 23:21:38.038 INFO [2] 252f3746-7497-4f5d-930d-d4a80969a0ed (2f207150-16ea-4b9b-9a7d-235b5da77cb3) WaitActive WaitActive New ds_transition to WaitActive
20666 Sep 22 23:21:38.039 INFO [2] Transition from New to WaitActive
20667 Sep 22 23:21:38.039 INFO accepted connection from 127.0.0.1:56574, task: main
20668 Sep 22 23:21:38.039 INFO [1] 723c92d6-2550-4896-aeb0-c3ba0579c262 looper connected, looper: 1
20669 Sep 22 23:21:38.039 INFO [1] Proc runs for 127.0.0.1:56772 in state New
20670 Sep 22 23:21:38.039 DEBG Write :1002 deps:[] res:true
20671 Sep 22 23:21:38.039 INFO [0] 723c92d6-2550-4896-aeb0-c3ba0579c262 looper connected, looper: 0
20672 Sep 22 23:21:38.039 INFO [0] Proc runs for 127.0.0.1:60338 in state New
20673 Sep 22 23:21:38.039 INFO [2] 723c92d6-2550-4896-aeb0-c3ba0579c262 looper connected, looper: 2
20674 Sep 22 23:21:38.039 INFO [2] Proc runs for 127.0.0.1:62775 in state New
20675 Sep 22 23:21:38.039 DEBG Write :1002 deps:[] res:true
20676 Sep 22 23:21:38.039 INFO UUID: f00b2386-14a7-47f7-ad70-b49b4e8545ea
20677 Sep 22 23:21:38.039 INFO Blocks per extent:5 Total Extents: 2
20678 Sep 22 23:21:38.040 INFO Connection request from 723c92d6-2550-4896-aeb0-c3ba0579c262 with version 4, task: proc
20679 Sep 22 23:21:38.040 INFO Crucible Version: Crucible Version: 0.0.1
20680 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20681 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20682 rustc: 1.70.0 stable x86_64-unknown-illumos
20683 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20684 Sep 22 23:21:38.040 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20685 Sep 22 23:21:38.040 INFO upstairs UpstairsConnection { upstairs_id: 723c92d6-2550-4896-aeb0-c3ba0579c262, session_id: 23df1fb4-419d-47ca-b93d-564d049b0bdc, gen: 1 } connected, version 4, task: proc
20686 Sep 22 23:21:38.040 INFO Using address: 127.0.0.1:48412, task: main
20687 Sep 22 23:21:38.040 INFO Connection request from 723c92d6-2550-4896-aeb0-c3ba0579c262 with version 4, task: proc
20688 Sep 22 23:21:38.040 INFO upstairs UpstairsConnection { upstairs_id: 723c92d6-2550-4896-aeb0-c3ba0579c262, session_id: 23df1fb4-419d-47ca-b93d-564d049b0bdc, gen: 1 } connected, version 4, task: proc
20689 Sep 22 23:21:38.040 INFO Connection request from 723c92d6-2550-4896-aeb0-c3ba0579c262 with version 4, task: proc
20690 Sep 22 23:21:38.040 INFO upstairs UpstairsConnection { upstairs_id: 723c92d6-2550-4896-aeb0-c3ba0579c262, session_id: 23df1fb4-419d-47ca-b93d-564d049b0bdc, gen: 1 } connected, version 4, task: proc
20691 Sep 22 23:21:38.040 INFO Repair listens on 127.0.0.1:0, task: repair
20692 Sep 22 23:21:38.040 INFO [1] 723c92d6-2550-4896-aeb0-c3ba0579c262 (23df1fb4-419d-47ca-b93d-564d049b0bdc) New New New ds_transition to WaitActive
20693 Sep 22 23:21:38.040 INFO [1] Transition from New to WaitActive
20694 Sep 22 23:21:38.040 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:60475, task: repair
20695 Sep 22 23:21:38.040 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:60475, task: repair
20696 Sep 22 23:21:38.040 INFO [0] 723c92d6-2550-4896-aeb0-c3ba0579c262 (23df1fb4-419d-47ca-b93d-564d049b0bdc) New WaitActive New ds_transition to WaitActive
20697 Sep 22 23:21:38.040 INFO listening, local_addr: 127.0.0.1:60475, task: repair
20698 Sep 22 23:21:38.040 INFO [0] Transition from New to WaitActive
20699 Sep 22 23:21:38.040 INFO [2] 723c92d6-2550-4896-aeb0-c3ba0579c262 (23df1fb4-419d-47ca-b93d-564d049b0bdc) WaitActive WaitActive New ds_transition to WaitActive
20700 Sep 22 23:21:38.040 INFO [2] Transition from New to WaitActive
20701 Sep 22 23:21:38.040 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:60475, task: repair
20702 Sep 22 23:21:38.040 INFO Using repair address: 127.0.0.1:60475, task: main
20703 Sep 22 23:21:38.040 INFO No SSL acceptor configured, task: main
20704 The guest has requested activation
20705 Sep 22 23:21:38.041 INFO 252f3746-7497-4f5d-930d-d4a80969a0ed active request set
20706 Sep 22 23:21:38.041 INFO [0] received activate with gen 1
20707 Sep 22 23:21:38.041 INFO [0] client got ds_active_rx, promote! session 2f207150-16ea-4b9b-9a7d-235b5da77cb3
20708 Sep 22 23:21:38.041 INFO current number of open files limit 65536 is already the maximum
20709 Sep 22 23:21:38.041 INFO [1] received activate with gen 1
20710 Sep 22 23:21:38.041 INFO [1] client got ds_active_rx, promote! session 2f207150-16ea-4b9b-9a7d-235b5da77cb3
20711 Sep 22 23:21:38.041 INFO Created new region file "/tmp/downstairs-6LcCIRlu/region.json"
20712 Sep 22 23:21:38.041 INFO [2] received activate with gen 1
20713 Sep 22 23:21:38.041 INFO [2] client got ds_active_rx, promote! session 2f207150-16ea-4b9b-9a7d-235b5da77cb3
20714 Sep 22 23:21:38.041 INFO UpstairsConnection { upstairs_id: 252f3746-7497-4f5d-930d-d4a80969a0ed, session_id: 2f207150-16ea-4b9b-9a7d-235b5da77cb3, gen: 1 } is now active (read-write)
20715 Sep 22 23:21:38.041 INFO UpstairsConnection { upstairs_id: 252f3746-7497-4f5d-930d-d4a80969a0ed, session_id: 2f207150-16ea-4b9b-9a7d-235b5da77cb3, gen: 1 } is now active (read-write)
20716 Sep 22 23:21:38.041 INFO UpstairsConnection { upstairs_id: 252f3746-7497-4f5d-930d-d4a80969a0ed, session_id: 2f207150-16ea-4b9b-9a7d-235b5da77cb3, gen: 1 } is now active (read-write)
20717 Sep 22 23:21:38.042 INFO [0] downstairs client at 127.0.0.1:49721 has UUID 5e565886-9774-4b9d-9633-93979cb0f8ae
20718 Sep 22 23:21:38.042 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 5e565886-9774-4b9d-9633-93979cb0f8ae, encrypted: true, database_read_version: 1, database_write_version: 1 }
20719 Sep 22 23:21:38.042 INFO 252f3746-7497-4f5d-930d-d4a80969a0ed WaitActive WaitActive WaitActive
20720 Sep 22 23:21:38.042 INFO [1] downstairs client at 127.0.0.1:49229 has UUID 6c7e6774-c872-46de-b630-fdc717de2268
20721 Sep 22 23:21:38.042 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 6c7e6774-c872-46de-b630-fdc717de2268, encrypted: true, database_read_version: 1, database_write_version: 1 }
20722 Sep 22 23:21:38.042 INFO 252f3746-7497-4f5d-930d-d4a80969a0ed WaitActive WaitActive WaitActive
20723 Sep 22 23:21:38.042 INFO [2] downstairs client at 127.0.0.1:59698 has UUID 709bf894-ad93-42e1-b7a3-127d2f1f8b04
20724 Sep 22 23:21:38.042 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 709bf894-ad93-42e1-b7a3-127d2f1f8b04, encrypted: true, database_read_version: 1, database_write_version: 1 }
20725 Sep 22 23:21:38.042 DEBG IO Write 1003 has deps [JobId(1002), JobId(1001), JobId(1000)]
20726 Sep 22 23:21:38.042 INFO 252f3746-7497-4f5d-930d-d4a80969a0ed WaitActive WaitActive WaitActive
20727 Sep 22 23:21:38.042 INFO Current flush_numbers [0..12]: [0, 0]
20728 Sep 22 23:21:38.043 INFO Downstairs has completed Negotiation, task: proc
20729 Sep 22 23:21:38.043 INFO Current flush_numbers [0..12]: [0, 0]
20730 Sep 22 23:21:38.043 INFO Downstairs has completed Negotiation, task: proc
20731 Sep 22 23:21:38.044 INFO Current flush_numbers [0..12]: [0, 0]
20732 Sep 22 23:21:38.044 INFO Downstairs has completed Negotiation, task: proc
20733 Sep 22 23:21:38.044 INFO [0] 252f3746-7497-4f5d-930d-d4a80969a0ed (2f207150-16ea-4b9b-9a7d-235b5da77cb3) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
20734 Sep 22 23:21:38.044 INFO [0] Transition from WaitActive to WaitQuorum
20735 Sep 22 23:21:38.044 WARN [0] new RM replaced this: None
20736 Sep 22 23:21:38.044 INFO [0] Starts reconcile loop
20737 Sep 22 23:21:38.044 INFO [1] 252f3746-7497-4f5d-930d-d4a80969a0ed (2f207150-16ea-4b9b-9a7d-235b5da77cb3) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
20738 Sep 22 23:21:38.044 INFO [1] Transition from WaitActive to WaitQuorum
20739 Sep 22 23:21:38.044 WARN [1] new RM replaced this: None
20740 Sep 22 23:21:38.044 INFO [1] Starts reconcile loop
20741 Sep 22 23:21:38.044 INFO [2] 252f3746-7497-4f5d-930d-d4a80969a0ed (2f207150-16ea-4b9b-9a7d-235b5da77cb3) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
20742 Sep 22 23:21:38.044 INFO [2] Transition from WaitActive to WaitQuorum
20743 Sep 22 23:21:38.044 WARN [2] new RM replaced this: None
20744 Sep 22 23:21:38.044 INFO [2] Starts reconcile loop
20745 Sep 22 23:21:38.044 INFO [0] 127.0.0.1:49721 task reports connection:true
20746 Sep 22 23:21:38.044 INFO 252f3746-7497-4f5d-930d-d4a80969a0ed WaitQuorum WaitQuorum WaitQuorum
20747 Sep 22 23:21:38.044 INFO [0]R flush_numbers: [0, 0]
20748 Sep 22 23:21:38.044 INFO [0]R generation: [0, 0]
20749 Sep 22 23:21:38.044 INFO [0]R dirty: [false, false]
20750 Sep 22 23:21:38.045 INFO [1]R flush_numbers: [0, 0]
20751 Sep 22 23:21:38.045 INFO [1]R generation: [0, 0]
20752 Sep 22 23:21:38.045 INFO [1]R dirty: [false, false]
20753 Sep 22 23:21:38.045 INFO [2]R flush_numbers: [0, 0]
20754 Sep 22 23:21:38.045 INFO current number of open files limit 65536 is already the maximum
20755 Sep 22 23:21:38.045 INFO [2]R generation: [0, 0]
20756 Sep 22 23:21:38.045 INFO [2]R dirty: [false, false]
20757 Sep 22 23:21:38.045 INFO Opened existing region file "/tmp/downstairs-6LcCIRlu/region.json"
20758 Sep 22 23:21:38.045 INFO Max found gen is 1
20759 Sep 22 23:21:38.045 INFO Database read version 1
20760 Sep 22 23:21:38.045 INFO Generation requested: 1 >= found:1
20761 Sep 22 23:21:38.045 INFO Database write version 1
20762 Sep 22 23:21:38.045 INFO Next flush: 1
20763 Sep 22 23:21:38.045 INFO All extents match
20764 Sep 22 23:21:38.045 INFO No downstairs repair required
20765 Sep 22 23:21:38.045 INFO No initial repair work was required
20766 Sep 22 23:21:38.045 INFO Set Downstairs and Upstairs active
20767 Sep 22 23:21:38.045 INFO 252f3746-7497-4f5d-930d-d4a80969a0ed is now active with session: 2f207150-16ea-4b9b-9a7d-235b5da77cb3
20768 Sep 22 23:21:38.045 INFO 252f3746-7497-4f5d-930d-d4a80969a0ed Set Active after no repair
20769 Sep 22 23:21:38.045 INFO Notify all downstairs, region set compare is done.
20770 Sep 22 23:21:38.045 INFO Set check for repair
20771 Sep 22 23:21:38.045 INFO [1] 127.0.0.1:49229 task reports connection:true
20772 Sep 22 23:21:38.045 INFO 252f3746-7497-4f5d-930d-d4a80969a0ed Active Active Active
20773 Sep 22 23:21:38.045 INFO Set check for repair
20774 Sep 22 23:21:38.045 INFO [2] 127.0.0.1:59698 task reports connection:true
20775 Sep 22 23:21:38.045 INFO 252f3746-7497-4f5d-930d-d4a80969a0ed Active Active Active
20776 Sep 22 23:21:38.045 INFO Set check for repair
20777 Sep 22 23:21:38.045 INFO [0] received reconcile message
20778 Sep 22 23:21:38.045 INFO [0] All repairs completed, exit
20779 Sep 22 23:21:38.045 INFO [0] Starts cmd_loop
20780 Sep 22 23:21:38.045 INFO [1] received reconcile message
20781 Sep 22 23:21:38.045 INFO [1] All repairs completed, exit
20782 Sep 22 23:21:38.045 INFO [1] Starts cmd_loop
20783 Sep 22 23:21:38.045 INFO [2] received reconcile message
20784 Sep 22 23:21:38.045 INFO [2] All repairs completed, exit
20785 Sep 22 23:21:38.045 INFO [2] Starts cmd_loop
20786 The guest has finished waiting for activation
20787 The guest has requested activation
20788 Sep 22 23:21:38.046 INFO 723c92d6-2550-4896-aeb0-c3ba0579c262 active request set
20789 Sep 22 23:21:38.046 INFO [0] received activate with gen 1
20790 Sep 22 23:21:38.046 INFO [0] client got ds_active_rx, promote! session 23df1fb4-419d-47ca-b93d-564d049b0bdc
20791 Sep 22 23:21:38.046 INFO [1] received activate with gen 1
20792 Sep 22 23:21:38.046 INFO [1] client got ds_active_rx, promote! session 23df1fb4-419d-47ca-b93d-564d049b0bdc
20793 Sep 22 23:21:38.046 INFO [2] received activate with gen 1
20794 Sep 22 23:21:38.046 INFO [2] client got ds_active_rx, promote! session 23df1fb4-419d-47ca-b93d-564d049b0bdc
20795 Sep 22 23:21:38.046 DEBG up_ds_listen was notified
20796 Sep 22 23:21:38.046 DEBG up_ds_listen process 1003
20797 Sep 22 23:21:38.046 DEBG [A] ack job 1003:4, : downstairs
20798 Sep 22 23:21:38.046 DEBG up_ds_listen checked 1 jobs, back to waiting
20799 Sep 22 23:21:38.046 INFO UpstairsConnection { upstairs_id: 723c92d6-2550-4896-aeb0-c3ba0579c262, session_id: 23df1fb4-419d-47ca-b93d-564d049b0bdc, gen: 1 } is now active (read-write)
20800 Sep 22 23:21:38.046 INFO Scrub at offset 10/10 sp:10
20801 Sep 22 23:21:38.046 INFO UpstairsConnection { upstairs_id: 723c92d6-2550-4896-aeb0-c3ba0579c262, session_id: 23df1fb4-419d-47ca-b93d-564d049b0bdc, gen: 1 } is now active (read-write)
20802 Sep 22 23:21:38.047 INFO UpstairsConnection { upstairs_id: 723c92d6-2550-4896-aeb0-c3ba0579c262, session_id: 23df1fb4-419d-47ca-b93d-564d049b0bdc, gen: 1 } is now active (read-write)
20803 Sep 22 23:21:38.047 INFO UUID: c47df1e6-d581-419f-94ff-178a0e67a3ed
20804 Sep 22 23:21:38.047 INFO Blocks per extent:5 Total Extents: 2
20805 Sep 22 23:21:38.047 INFO Crucible Version: Crucible Version: 0.0.1
20806 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20807 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20808 rustc: 1.70.0 stable x86_64-unknown-illumos
20809 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20810 Sep 22 23:21:38.047 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20811 Sep 22 23:21:38.047 INFO Using address: 127.0.0.1:41943, task: main
20812 Sep 22 23:21:38.047 INFO [1] downstairs client at 127.0.0.1:56772 has UUID 86a54504-22a8-4d69-94ec-02ec4287a819
20813 Sep 22 23:21:38.047 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 86a54504-22a8-4d69-94ec-02ec4287a819, encrypted: true, database_read_version: 1, database_write_version: 1 }
20814 Sep 22 23:21:38.047 INFO 723c92d6-2550-4896-aeb0-c3ba0579c262 WaitActive WaitActive WaitActive
20815 Sep 22 23:21:38.047 INFO [0] downstairs client at 127.0.0.1:60338 has UUID 137bc727-67de-4ee4-a302-cef52368a28d
20816 Sep 22 23:21:38.047 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 137bc727-67de-4ee4-a302-cef52368a28d, encrypted: true, database_read_version: 1, database_write_version: 1 }
20817 Sep 22 23:21:38.047 INFO 723c92d6-2550-4896-aeb0-c3ba0579c262 WaitActive WaitActive WaitActive
20818 Sep 22 23:21:38.047 INFO Repair listens on 127.0.0.1:0, task: repair
20819 Sep 22 23:21:38.047 INFO [2] downstairs client at 127.0.0.1:62775 has UUID f98632f4-9734-43e6-b572-d9debb4ad27f
20820 Sep 22 23:21:38.047 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f98632f4-9734-43e6-b572-d9debb4ad27f, encrypted: true, database_read_version: 1, database_write_version: 1 }
20821 Sep 22 23:21:38.047 INFO Scrub f70a61ee-a9f0-49ce-8bae-d3843b954927 done in 0 seconds. Retries:0 scrub_size:5120 size:10 pause_milli:0
20822 Sep 22 23:21:38.048 INFO 723c92d6-2550-4896-aeb0-c3ba0579c262 WaitActive WaitActive WaitActive
20823 Sep 22 23:21:38.047 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:34906, task: repair
20824 Sep 22 23:21:38.048 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:34906, task: repair
20825 Sep 22 23:21:38.048 DEBG IO Flush 1004 has deps [JobId(1003), JobId(1002), JobId(1001), JobId(1000)]
20826 Sep 22 23:21:38.048 INFO listening, local_addr: 127.0.0.1:34906, task: repair
20827 Sep 22 23:21:38.048 INFO Current flush_numbers [0..12]: [0, 0]
20828 Sep 22 23:21:38.048 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:34906, task: repair
20829 Sep 22 23:21:38.048 INFO Using repair address: 127.0.0.1:34906, task: main
20830 Sep 22 23:21:38.048 INFO No SSL acceptor configured, task: main
20831 Sep 22 23:21:38.048 INFO Downstairs has completed Negotiation, task: proc
20832 Sep 22 23:21:38.048 INFO Current flush_numbers [0..12]: [0, 0]
20833 Sep 22 23:21:38.048 INFO current number of open files limit 65536 is already the maximum
20834 Sep 22 23:21:38.048 INFO Created new region file "/tmp/downstairs-lwXKwwR7/region.json"
20835 Sep 22 23:21:38.048 INFO Downstairs has completed Negotiation, task: proc
20836 Sep 22 23:21:38.049 INFO Current flush_numbers [0..12]: [0, 0]
20837 Sep 22 23:21:38.049 INFO Downstairs has completed Negotiation, task: proc
20838 Sep 22 23:21:38.049 INFO [1] 723c92d6-2550-4896-aeb0-c3ba0579c262 (23df1fb4-419d-47ca-b93d-564d049b0bdc) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
20839 Sep 22 23:21:38.049 INFO [1] Transition from WaitActive to WaitQuorum
20840 Sep 22 23:21:38.049 WARN [1] new RM replaced this: None
20841 Sep 22 23:21:38.049 INFO [1] Starts reconcile loop
20842 Sep 22 23:21:38.049 INFO [0] 723c92d6-2550-4896-aeb0-c3ba0579c262 (23df1fb4-419d-47ca-b93d-564d049b0bdc) WaitActive WaitQuorum WaitActive ds_transition to WaitQuorum
20843 Sep 22 23:21:38.049 INFO [0] Transition from WaitActive to WaitQuorum
20844 Sep 22 23:21:38.049 WARN [0] new RM replaced this: None
20845 Sep 22 23:21:38.049 INFO [0] Starts reconcile loop
20846 Sep 22 23:21:38.049 INFO [2] 723c92d6-2550-4896-aeb0-c3ba0579c262 (23df1fb4-419d-47ca-b93d-564d049b0bdc) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
20847 Sep 22 23:21:38.049 INFO [2] Transition from WaitActive to WaitQuorum
20848 Sep 22 23:21:38.049 WARN [2] new RM replaced this: None
20849 Sep 22 23:21:38.049 INFO [2] Starts reconcile loop
20850 Sep 22 23:21:38.049 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
20851 Sep 22 23:21:38.049 INFO [1] 127.0.0.1:56772 task reports connection:true
20852 Sep 22 23:21:38.049 INFO 723c92d6-2550-4896-aeb0-c3ba0579c262 WaitQuorum WaitQuorum WaitQuorum
20853 Sep 22 23:21:38.049 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
20854 Sep 22 23:21:38.049 INFO [0]R flush_numbers: [0, 0]
20855 Sep 22 23:21:38.049 INFO [0]R generation: [0, 0]
20856 Sep 22 23:21:38.049 INFO [0]R dirty: [false, false]
20857 Sep 22 23:21:38.049 INFO [1]R flush_numbers: [0, 0]
20858 Sep 22 23:21:38.049 INFO [1]R generation: [0, 0]
20859 Sep 22 23:21:38.050 INFO [1]R dirty: [false, false]
20860 Sep 22 23:21:38.050 INFO [2]R flush_numbers: [0, 0]
20861 Sep 22 23:21:38.050 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
20862 Sep 22 23:21:38.050 INFO [2]R generation: [0, 0]
20863 Sep 22 23:21:38.050 INFO [2]R dirty: [false, false]
20864 Sep 22 23:21:38.050 INFO Max found gen is 1
20865 Sep 22 23:21:38.050 INFO Generation requested: 1 >= found:1
20866 Sep 22 23:21:38.050 INFO Next flush: 1
20867 Sep 22 23:21:38.050 INFO All extents match
20868 Sep 22 23:21:38.050 INFO No downstairs repair required
20869 Sep 22 23:21:38.050 INFO No initial repair work was required
20870 Sep 22 23:21:38.050 INFO Set Downstairs and Upstairs active
20871 Sep 22 23:21:38.050 INFO 723c92d6-2550-4896-aeb0-c3ba0579c262 is now active with session: 23df1fb4-419d-47ca-b93d-564d049b0bdc
20872 Sep 22 23:21:38.050 INFO 723c92d6-2550-4896-aeb0-c3ba0579c262 Set Active after no repair
20873 Sep 22 23:21:38.050 INFO Notify all downstairs, region set compare is done.
20874 Sep 22 23:21:38.050 INFO Set check for repair
20875 Sep 22 23:21:38.050 DEBG up_ds_listen was notified
20876 Sep 22 23:21:38.050 INFO [0] 127.0.0.1:60338 task reports connection:true
20877 Sep 22 23:21:38.050 INFO 723c92d6-2550-4896-aeb0-c3ba0579c262 Active Active Active
20878 Sep 22 23:21:38.050 DEBG up_ds_listen process 1004
20879 Sep 22 23:21:38.050 INFO Set check for repair
20880 Sep 22 23:21:38.050 DEBG [A] ack job 1004:5, : downstairs
20881 Sep 22 23:21:38.050 INFO [2] 127.0.0.1:62775 task reports connection:true
20882 Sep 22 23:21:38.050 INFO 723c92d6-2550-4896-aeb0-c3ba0579c262 Active Active Active
20883 Sep 22 23:21:38.050 INFO Set check for repair
20884 Sep 22 23:21:38.050 DEBG [rc] retire 1004 clears [JobId(1000), JobId(1001), JobId(1002), JobId(1003), JobId(1004)], : downstairs
20885 Sep 22 23:21:38.050 DEBG up_ds_listen checked 1 jobs, back to waiting
20886 Sep 22 23:21:38.050 INFO [0] received reconcile message
20887 Sep 22 23:21:38.050 INFO [0] All repairs completed, exit
20888 Sep 22 23:21:38.050 INFO [0] Starts cmd_loop
20889 Sep 22 23:21:38.050 INFO [1] received reconcile message
20890 Sep 22 23:21:38.050 DEBG IO Flush 1002 has deps [JobId(1001), JobId(1000)]
20891 Sep 22 23:21:38.050 INFO [1] All repairs completed, exit
20892 Sep 22 23:21:38.050 INFO [1] Starts cmd_loop
20893 Sep 22 23:21:38.050 INFO [2] received reconcile message
20894 Sep 22 23:21:38.050 INFO [2] All repairs completed, exit
20895 Sep 22 23:21:38.050 INFO [2] Starts cmd_loop
20896 The guest has finished waiting for activation
20897 Sep 22 23:21:38.051 DEBG IO Write 1000 has deps []
20898 Sep 22 23:21:38.051 DEBG up_ds_listen was notified
20899 Sep 22 23:21:38.051 DEBG up_ds_listen process 1000
20900 Sep 22 23:21:38.051 DEBG [A] ack job 1000:1, : downstairs
20901 Sep 22 23:21:38.051 DEBG up_ds_listen checked 1 jobs, back to waiting
20902 Sep 22 23:21:38.052 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
20903 Sep 22 23:21:38.052 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
20904 Sep 22 23:21:38.052 INFO current number of open files limit 65536 is already the maximum
20905 Sep 22 23:21:38.052 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
20906 Sep 22 23:21:38.052 INFO Opened existing region file "/tmp/downstairs-lwXKwwR7/region.json"
20907 Sep 22 23:21:38.052 INFO Database read version 1
20908 Sep 22 23:21:38.052 INFO Database write version 1
20909 Sep 22 23:21:38.052 DEBG up_ds_listen was notified
20910 Sep 22 23:21:38.052 DEBG up_ds_listen process 1002
20911 Sep 22 23:21:38.052 DEBG [A] ack job 1002:3, : downstairs
20912 Sep 22 23:21:38.052 DEBG [rc] retire 1002 clears [JobId(1000), JobId(1001), JobId(1002)], : downstairs
20913 Sep 22 23:21:38.052 DEBG up_ds_listen checked 1 jobs, back to waiting
20914 Sep 22 23:21:38.052 DEBG Write :1000 deps:[] res:true
20915 Sep 22 23:21:38.053 DEBG IO Read 1005 has deps []
20916 Sep 22 23:21:38.053 DEBG Write :1000 deps:[] res:true
20917 Sep 22 23:21:38.053 DEBG Write :1000 deps:[] res:true
20918 Sep 22 23:21:38.053 DEBG Read :1005 deps:[] res:true
20919 Sep 22 23:21:38.054 DEBG IO Write 1000 has deps []
20920 Sep 22 23:21:38.054 DEBG Read :1005 deps:[] res:true
20921 Sep 22 23:21:38.054 DEBG up_ds_listen was notified
20922 Sep 22 23:21:38.054 DEBG up_ds_listen process 1000
20923 Sep 22 23:21:38.054 DEBG [A] ack job 1000:1, : downstairs
20924 Sep 22 23:21:38.054 DEBG up_ds_listen checked 1 jobs, back to waiting
20925 Sep 22 23:21:38.054 INFO UUID: d170e9eb-99a7-40aa-8e4b-74aea23bd418
20926 Sep 22 23:21:38.054 INFO Blocks per extent:5 Total Extents: 2
20927 Sep 22 23:21:38.054 INFO Crucible Version: Crucible Version: 0.0.1
20928 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20929 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20930 rustc: 1.70.0 stable x86_64-unknown-illumos
20931 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20932 Sep 22 23:21:38.054 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20933 Sep 22 23:21:38.054 INFO Using address: 127.0.0.1:33214, task: main
20934 Sep 22 23:21:38.054 DEBG Read :1005 deps:[] res:true
20935 Sep 22 23:21:38.055 INFO Repair listens on 127.0.0.1:0, task: repair
20936 Sep 22 23:21:38.055 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:62619, task: repair
20937 Sep 22 23:21:38.055 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:62619, task: repair
20938 Sep 22 23:21:38.055 INFO listening, local_addr: 127.0.0.1:62619, task: repair
20939 Sep 22 23:21:38.055 DEBG Write :1000 deps:[] res:true
20940 Sep 22 23:21:38.055 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:62619, task: repair
20941 Sep 22 23:21:38.055 INFO Using repair address: 127.0.0.1:62619, task: main
20942 Sep 22 23:21:38.055 INFO No SSL acceptor configured, task: main
20943 Sep 22 23:21:38.055 DEBG Write :1000 deps:[] res:true
20944 Sep 22 23:21:38.056 INFO Upstairs starts
20945 Sep 22 23:21:38.056 DEBG Write :1000 deps:[] res:true
20946 Sep 22 23:21:38.056 INFO Crucible Version: BuildInfo {
20947 version: "0.0.1",
20948 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
20949 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
20950 git_branch: "main",
20951 rustc_semver: "1.70.0",
20952 rustc_channel: "stable",
20953 rustc_host_triple: "x86_64-unknown-illumos",
20954 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
20955 cargo_triple: "x86_64-unknown-illumos",
20956 debug: true,
20957 opt_level: 0,
20958 }
20959 Sep 22 23:21:38.056 INFO Upstairs <-> Downstairs Message Version: 4
20960 Sep 22 23:21:38.056 INFO Crucible stats registered with UUID: e4fe1d81-dc04-41e4-b745-9219b6982323
20961 Sep 22 23:21:38.056 INFO Crucible e4fe1d81-dc04-41e4-b745-9219b6982323 has session id: b713c0d5-2ffc-4c94-a106-91b2d080c72b
20962 Sep 22 23:21:38.056 INFO Connection request from d15c167b-c380-46a4-a953-864fc02495ed with version 4, task: proc
20963 Sep 22 23:21:38.056 INFO listening on 127.0.0.1:0, task: main
20964 Sep 22 23:21:38.056 DEBG IO Read 1001 has deps [JobId(1000)]
20965 Sep 22 23:21:38.056 INFO upstairs UpstairsConnection { upstairs_id: d15c167b-c380-46a4-a953-864fc02495ed, session_id: d8b1013b-fbba-4bb2-a01a-1a978b72bafa, gen: 1 } connected, version 4, task: proc
20966 Sep 22 23:21:38.056 INFO listening on 127.0.0.1:0, task: main
20967 Sep 22 23:21:38.056 INFO listening on 127.0.0.1:0, task: main
20968 Sep 22 23:21:38.056 INFO Connection request from d15c167b-c380-46a4-a953-864fc02495ed with version 4, task: proc
20969 Sep 22 23:21:38.056 INFO upstairs UpstairsConnection { upstairs_id: d15c167b-c380-46a4-a953-864fc02495ed, session_id: d8b1013b-fbba-4bb2-a01a-1a978b72bafa, gen: 1 } connected, version 4, task: proc
20970 Sep 22 23:21:38.056 INFO [0] connecting to 127.0.0.1:48412, looper: 0
20971 Sep 22 23:21:38.056 INFO Connection request from d15c167b-c380-46a4-a953-864fc02495ed with version 4, task: proc
20972 Sep 22 23:21:38.056 INFO upstairs UpstairsConnection { upstairs_id: d15c167b-c380-46a4-a953-864fc02495ed, session_id: d8b1013b-fbba-4bb2-a01a-1a978b72bafa, gen: 1 } connected, version 4, task: proc
20973 Sep 22 23:21:38.056 INFO [1] connecting to 127.0.0.1:41943, looper: 1
20974 Sep 22 23:21:38.056 INFO [2] connecting to 127.0.0.1:33214, looper: 2
20975 Sep 22 23:21:38.057 INFO up_listen starts, task: up_listen
20976 Sep 22 23:21:38.057 INFO Wait for all three downstairs to come online
20977 Sep 22 23:21:38.057 INFO Flush timeout: 0.5
20978 Sep 22 23:21:38.057 INFO [0] d15c167b-c380-46a4-a953-864fc02495ed (d8b1013b-fbba-4bb2-a01a-1a978b72bafa) New New New ds_transition to WaitActive
20979 Sep 22 23:21:38.057 DEBG Read :1001 deps:[JobId(1000)] res:true
20980 Sep 22 23:21:38.057 INFO [0] Transition from New to WaitActive
20981 Sep 22 23:21:38.057 INFO accepted connection from 127.0.0.1:50980, task: main
20982 Sep 22 23:21:38.057 INFO [1] d15c167b-c380-46a4-a953-864fc02495ed (d8b1013b-fbba-4bb2-a01a-1a978b72bafa) WaitActive New New ds_transition to WaitActive
20983 Sep 22 23:21:38.057 INFO [1] Transition from New to WaitActive
20984 Sep 22 23:21:38.057 DEBG Read :1001 deps:[JobId(1000)] res:true
20985 Sep 22 23:21:38.057 INFO [2] d15c167b-c380-46a4-a953-864fc02495ed (d8b1013b-fbba-4bb2-a01a-1a978b72bafa) WaitActive WaitActive New ds_transition to WaitActive
20986 Sep 22 23:21:38.057 INFO accepted connection from 127.0.0.1:60993, task: main
20987 Sep 22 23:21:38.057 INFO [2] Transition from New to WaitActive
20988 Sep 22 23:21:38.057 INFO accepted connection from 127.0.0.1:39483, task: main
20989 Sep 22 23:21:38.057 INFO [0] e4fe1d81-dc04-41e4-b745-9219b6982323 looper connected, looper: 0
20990 Sep 22 23:21:38.057 DEBG Read :1001 deps:[JobId(1000)] res:true
20991 Sep 22 23:21:38.057 INFO [0] Proc runs for 127.0.0.1:48412 in state New
20992 Sep 22 23:21:38.057 INFO [1] e4fe1d81-dc04-41e4-b745-9219b6982323 looper connected, looper: 1
20993 Sep 22 23:21:38.057 INFO [1] Proc runs for 127.0.0.1:41943 in state New
20994 Sep 22 23:21:38.057 INFO [2] e4fe1d81-dc04-41e4-b745-9219b6982323 looper connected, looper: 2
20995 Sep 22 23:21:38.057 INFO [2] Proc runs for 127.0.0.1:33214 in state New
20996 Sep 22 23:21:38.058 DEBG [0] Read AckReady 1001, : downstairs
20997 Sep 22 23:21:38.058 DEBG [1] Read already AckReady 1001, : downstairs
20998 Sep 22 23:21:38.058 INFO Connection request from e4fe1d81-dc04-41e4-b745-9219b6982323 with version 4, task: proc
20999 Sep 22 23:21:38.058 INFO upstairs UpstairsConnection { upstairs_id: e4fe1d81-dc04-41e4-b745-9219b6982323, session_id: d2b38da3-e438-4e27-b550-ef7f093a76bf, gen: 1 } connected, version 4, task: proc
21000 Sep 22 23:21:38.058 INFO Connection request from e4fe1d81-dc04-41e4-b745-9219b6982323 with version 4, task: proc
21001 Sep 22 23:21:38.058 INFO upstairs UpstairsConnection { upstairs_id: e4fe1d81-dc04-41e4-b745-9219b6982323, session_id: d2b38da3-e438-4e27-b550-ef7f093a76bf, gen: 1 } connected, version 4, task: proc
21002 Sep 22 23:21:38.058 DEBG [2] Read already AckReady 1001, : downstairs
21003 Sep 22 23:21:38.058 INFO Connection request from e4fe1d81-dc04-41e4-b745-9219b6982323 with version 4, task: proc
21004 Sep 22 23:21:38.058 DEBG up_ds_listen was notified
21005 Sep 22 23:21:38.058 DEBG up_ds_listen process 1001
21006 Sep 22 23:21:38.058 INFO upstairs UpstairsConnection { upstairs_id: e4fe1d81-dc04-41e4-b745-9219b6982323, session_id: d2b38da3-e438-4e27-b550-ef7f093a76bf, gen: 1 } connected, version 4, task: proc
21007 Sep 22 23:21:38.058 DEBG [A] ack job 1001:2, : downstairs
21008 Sep 22 23:21:38.058 DEBG up_ds_listen checked 1 jobs, back to waiting
21009 Sep 22 23:21:38.058 INFO [0] e4fe1d81-dc04-41e4-b745-9219b6982323 (d2b38da3-e438-4e27-b550-ef7f093a76bf) New New New ds_transition to WaitActive
21010 The guest has requested activation
21011 Sep 22 23:21:38.058 INFO [0] Transition from New to WaitActive
21012 Sep 22 23:21:38.058 INFO d15c167b-c380-46a4-a953-864fc02495ed active request set
21013 Sep 22 23:21:38.058 INFO [1] e4fe1d81-dc04-41e4-b745-9219b6982323 (d2b38da3-e438-4e27-b550-ef7f093a76bf) WaitActive New New ds_transition to WaitActive
21014 Sep 22 23:21:38.059 INFO [1] Transition from New to WaitActive
21015 Sep 22 23:21:38.059 INFO [0] received activate with gen 1
21016 Sep 22 23:21:38.059 DEBG IO Read 1001 has deps [JobId(1000)]
21017 Sep 22 23:21:38.059 INFO [0] client got ds_active_rx, promote! session d8b1013b-fbba-4bb2-a01a-1a978b72bafa
21018 Sep 22 23:21:38.059 INFO [2] e4fe1d81-dc04-41e4-b745-9219b6982323 (d2b38da3-e438-4e27-b550-ef7f093a76bf) WaitActive WaitActive New ds_transition to WaitActive
21019 Sep 22 23:21:38.059 INFO [1] received activate with gen 1
21020 Sep 22 23:21:38.059 INFO [2] Transition from New to WaitActive
21021 Sep 22 23:21:38.059 INFO [1] client got ds_active_rx, promote! session d8b1013b-fbba-4bb2-a01a-1a978b72bafa
21022 Sep 22 23:21:38.059 INFO [2] received activate with gen 1
21023 Sep 22 23:21:38.059 DEBG [0] Read AckReady 1005, : downstairs
21024 Sep 22 23:21:38.059 INFO [2] client got ds_active_rx, promote! session d8b1013b-fbba-4bb2-a01a-1a978b72bafa
21025 The guest has requested activation
21026 Sep 22 23:21:38.059 INFO UpstairsConnection { upstairs_id: d15c167b-c380-46a4-a953-864fc02495ed, session_id: d8b1013b-fbba-4bb2-a01a-1a978b72bafa, gen: 1 } is now active (read-write)
21027 Sep 22 23:21:38.059 INFO e4fe1d81-dc04-41e4-b745-9219b6982323 active request set
21028 Sep 22 23:21:38.059 INFO UpstairsConnection { upstairs_id: d15c167b-c380-46a4-a953-864fc02495ed, session_id: d8b1013b-fbba-4bb2-a01a-1a978b72bafa, gen: 1 } is now active (read-write)
21029 Sep 22 23:21:38.059 INFO [0] received activate with gen 1
21030 Sep 22 23:21:38.059 INFO [0] client got ds_active_rx, promote! session d2b38da3-e438-4e27-b550-ef7f093a76bf
21031 Sep 22 23:21:38.059 INFO UpstairsConnection { upstairs_id: d15c167b-c380-46a4-a953-864fc02495ed, session_id: d8b1013b-fbba-4bb2-a01a-1a978b72bafa, gen: 1 } is now active (read-write)
21032 Sep 22 23:21:38.059 INFO [1] received activate with gen 1
21033 Sep 22 23:21:38.059 INFO [1] client got ds_active_rx, promote! session d2b38da3-e438-4e27-b550-ef7f093a76bf
21034 Sep 22 23:21:38.059 DEBG Read :1001 deps:[JobId(1000)] res:true
21035 Sep 22 23:21:38.059 INFO [2] received activate with gen 1
21036 Sep 22 23:21:38.059 INFO [2] client got ds_active_rx, promote! session d2b38da3-e438-4e27-b550-ef7f093a76bf
21037 Sep 22 23:21:38.059 DEBG Read :1001 deps:[JobId(1000)] res:true
21038 Sep 22 23:21:38.059 INFO UpstairsConnection { upstairs_id: e4fe1d81-dc04-41e4-b745-9219b6982323, session_id: d2b38da3-e438-4e27-b550-ef7f093a76bf, gen: 1 } is now active (read-write)
21039 Sep 22 23:21:38.059 INFO [0] downstairs client at 127.0.0.1:33968 has UUID d300473c-ae2a-4754-900f-58509b1b9961
21040 Sep 22 23:21:38.059 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: d300473c-ae2a-4754-900f-58509b1b9961, encrypted: true, database_read_version: 1, database_write_version: 1 }
21041 Sep 22 23:21:38.059 INFO UpstairsConnection { upstairs_id: e4fe1d81-dc04-41e4-b745-9219b6982323, session_id: d2b38da3-e438-4e27-b550-ef7f093a76bf, gen: 1 } is now active (read-write)
21042 Sep 22 23:21:38.059 DEBG Read :1001 deps:[JobId(1000)] res:true
21043 Sep 22 23:21:38.059 INFO d15c167b-c380-46a4-a953-864fc02495ed WaitActive WaitActive WaitActive
21044 Sep 22 23:21:38.060 INFO [1] downstairs client at 127.0.0.1:40280 has UUID 84d77caf-cd99-479d-b329-65e270f8e6d5
21045 Sep 22 23:21:38.060 INFO UpstairsConnection { upstairs_id: e4fe1d81-dc04-41e4-b745-9219b6982323, session_id: d2b38da3-e438-4e27-b550-ef7f093a76bf, gen: 1 } is now active (read-write)
21046 Sep 22 23:21:38.060 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 84d77caf-cd99-479d-b329-65e270f8e6d5, encrypted: true, database_read_version: 1, database_write_version: 1 }
21047 Sep 22 23:21:38.060 INFO d15c167b-c380-46a4-a953-864fc02495ed WaitActive WaitActive WaitActive
21048 Sep 22 23:21:38.060 INFO [2] downstairs client at 127.0.0.1:36377 has UUID 1104ac55-f7a8-4cef-b93e-5cab8f0588d7
21049 Sep 22 23:21:38.060 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 1104ac55-f7a8-4cef-b93e-5cab8f0588d7, encrypted: true, database_read_version: 1, database_write_version: 1 }
21050 Sep 22 23:21:38.060 INFO d15c167b-c380-46a4-a953-864fc02495ed WaitActive WaitActive WaitActive
21051 Sep 22 23:21:38.060 INFO Current flush_numbers [0..12]: [0, 0]
21052 Sep 22 23:21:38.060 DEBG [1] Read AckReady 1001, : downstairs
21053 Sep 22 23:21:38.060 INFO [0] downstairs client at 127.0.0.1:48412 has UUID f00b2386-14a7-47f7-ad70-b49b4e8545ea
21054 Sep 22 23:21:38.060 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f00b2386-14a7-47f7-ad70-b49b4e8545ea, encrypted: true, database_read_version: 1, database_write_version: 1 }
21055 Sep 22 23:21:38.060 INFO Downstairs has completed Negotiation, task: proc
21056 Sep 22 23:21:38.060 INFO e4fe1d81-dc04-41e4-b745-9219b6982323 WaitActive WaitActive WaitActive
21057 Sep 22 23:21:38.060 DEBG [0] Read already AckReady 1001, : downstairs
21058 Sep 22 23:21:38.060 INFO [1] downstairs client at 127.0.0.1:41943 has UUID c47df1e6-d581-419f-94ff-178a0e67a3ed
21059 Sep 22 23:21:38.060 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c47df1e6-d581-419f-94ff-178a0e67a3ed, encrypted: true, database_read_version: 1, database_write_version: 1 }
21060 Sep 22 23:21:38.060 INFO Current flush_numbers [0..12]: [0, 0]
21061 Sep 22 23:21:38.060 INFO e4fe1d81-dc04-41e4-b745-9219b6982323 WaitActive WaitActive WaitActive
21062 Sep 22 23:21:38.060 INFO [2] downstairs client at 127.0.0.1:33214 has UUID d170e9eb-99a7-40aa-8e4b-74aea23bd418
21063 Sep 22 23:21:38.060 INFO Downstairs has completed Negotiation, task: proc
21064 Sep 22 23:21:38.060 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: d170e9eb-99a7-40aa-8e4b-74aea23bd418, encrypted: true, database_read_version: 1, database_write_version: 1 }
21065 Sep 22 23:21:38.060 DEBG [2] Read already AckReady 1001, : downstairs
21066 Sep 22 23:21:38.060 INFO e4fe1d81-dc04-41e4-b745-9219b6982323 WaitActive WaitActive WaitActive
21067 Sep 22 23:21:38.060 DEBG up_ds_listen was notified
21068 Sep 22 23:21:38.060 DEBG up_ds_listen process 1001
21069 Sep 22 23:21:38.060 DEBG [A] ack job 1001:2, : downstairs
21070 Sep 22 23:21:38.061 DEBG up_ds_listen checked 1 jobs, back to waiting
21071 Sep 22 23:21:38.061 INFO Current flush_numbers [0..12]: [0, 0]
21072 Sep 22 23:21:38.061 INFO Current flush_numbers [0..12]: [0, 0]
21073 Sep 22 23:21:38.061 INFO Downstairs has completed Negotiation, task: proc
21074 Sep 22 23:21:38.061 INFO Downstairs has completed Negotiation, task: proc
21075 Sep 22 23:21:38.061 INFO [0] d15c167b-c380-46a4-a953-864fc02495ed (d8b1013b-fbba-4bb2-a01a-1a978b72bafa) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
21076 Sep 22 23:21:38.061 INFO [0] Transition from WaitActive to WaitQuorum
21077 Sep 22 23:21:38.061 WARN [0] new RM replaced this: None
21078 Sep 22 23:21:38.061 INFO [0] Starts reconcile loop
21079 Sep 22 23:21:38.061 INFO [1] d15c167b-c380-46a4-a953-864fc02495ed (d8b1013b-fbba-4bb2-a01a-1a978b72bafa) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
21080 Sep 22 23:21:38.061 INFO [1] Transition from WaitActive to WaitQuorum
21081 Sep 22 23:21:38.061 WARN [1] new RM replaced this: None
21082 Sep 22 23:21:38.061 INFO Current flush_numbers [0..12]: [0, 0]
21083 Sep 22 23:21:38.061 INFO [1] Starts reconcile loop
21084 Sep 22 23:21:38.061 INFO [2] d15c167b-c380-46a4-a953-864fc02495ed (d8b1013b-fbba-4bb2-a01a-1a978b72bafa) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
21085 Sep 22 23:21:38.061 INFO [2] Transition from WaitActive to WaitQuorum
21086 Sep 22 23:21:38.061 WARN [2] new RM replaced this: None
21087 Sep 22 23:21:38.061 INFO [2] Starts reconcile loop
21088 Sep 22 23:21:38.061 INFO [0] 127.0.0.1:33968 task reports connection:true
21089 Sep 22 23:21:38.061 INFO d15c167b-c380-46a4-a953-864fc02495ed WaitQuorum WaitQuorum WaitQuorum
21090 Sep 22 23:21:38.061 INFO Downstairs has completed Negotiation, task: proc
21091 Sep 22 23:21:38.061 INFO [0]R flush_numbers: [0, 0]
21092 Sep 22 23:21:38.061 INFO [0]R generation: [0, 0]
21093 Sep 22 23:21:38.061 INFO [0]R dirty: [false, false]
21094 Sep 22 23:21:38.061 DEBG IO Write 1002 has deps []
21095 Sep 22 23:21:38.061 INFO [1]R flush_numbers: [0, 0]
21096 Sep 22 23:21:38.061 INFO [1]R generation: [0, 0]
21097 Sep 22 23:21:38.061 INFO [1]R dirty: [false, false]
21098 Sep 22 23:21:38.061 INFO [2]R flush_numbers: [0, 0]
21099 Sep 22 23:21:38.061 INFO [2]R generation: [0, 0]
21100 Sep 22 23:21:38.061 INFO [2]R dirty: [false, false]
21101 Sep 22 23:21:38.061 INFO Max found gen is 1
21102 Sep 22 23:21:38.061 INFO Generation requested: 1 >= found:1
21103 Sep 22 23:21:38.061 DEBG up_ds_listen was notified
21104 Sep 22 23:21:38.061 INFO Next flush: 1
21105 Sep 22 23:21:38.061 DEBG [1] Read already AckReady 1005, : downstairs
21106 Sep 22 23:21:38.061 DEBG up_ds_listen process 1002
21107 Sep 22 23:21:38.061 INFO All extents match
21108 Sep 22 23:21:38.061 INFO No downstairs repair required
21109 Sep 22 23:21:38.061 DEBG [A] ack job 1002:3, : downstairs
21110 Sep 22 23:21:38.061 INFO No initial repair work was required
21111 Sep 22 23:21:38.061 INFO Set Downstairs and Upstairs active
21112 Sep 22 23:21:38.061 INFO d15c167b-c380-46a4-a953-864fc02495ed is now active with session: d8b1013b-fbba-4bb2-a01a-1a978b72bafa
21113 Sep 22 23:21:38.061 DEBG up_ds_listen checked 1 jobs, back to waiting
21114 Sep 22 23:21:38.061 INFO d15c167b-c380-46a4-a953-864fc02495ed Set Active after no repair
21115 Sep 22 23:21:38.061 INFO Notify all downstairs, region set compare is done.
21116 Sep 22 23:21:38.061 INFO Current flush_numbers [0..12]: [0, 0]
21117 Sep 22 23:21:38.061 INFO Set check for repair
21118 Sep 22 23:21:38.062 INFO [1] 127.0.0.1:40280 task reports connection:true
21119 Sep 22 23:21:38.062 INFO d15c167b-c380-46a4-a953-864fc02495ed Active Active Active
21120 Sep 22 23:21:38.062 INFO Set check for repair
21121 Sep 22 23:21:38.062 INFO [2] 127.0.0.1:36377 task reports connection:true
21122 Sep 22 23:21:38.062 INFO d15c167b-c380-46a4-a953-864fc02495ed Active Active Active
21123 Sep 22 23:21:38.062 INFO Set check for repair
21124 Sep 22 23:21:38.062 INFO [0] received reconcile message
21125 Sep 22 23:21:38.062 INFO [0] All repairs completed, exit
21126 Sep 22 23:21:38.062 INFO Downstairs has completed Negotiation, task: proc
21127 Sep 22 23:21:38.062 INFO [0] Starts cmd_loop
21128 Sep 22 23:21:38.062 INFO [1] received reconcile message
21129 Sep 22 23:21:38.062 INFO [1] All repairs completed, exit
21130 Sep 22 23:21:38.062 INFO [1] Starts cmd_loop
21131 Sep 22 23:21:38.062 INFO [2] received reconcile message
21132 Sep 22 23:21:38.062 INFO [2] All repairs completed, exit
21133 Sep 22 23:21:38.062 INFO [2] Starts cmd_loop
21134 Sep 22 23:21:38.062 INFO [0] e4fe1d81-dc04-41e4-b745-9219b6982323 (d2b38da3-e438-4e27-b550-ef7f093a76bf) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
21135 Sep 22 23:21:38.062 INFO [0] Transition from WaitActive to WaitQuorum
21136 The guest has finished waiting for activation
21137 Sep 22 23:21:38.062 WARN [0] new RM replaced this: None
21138 Sep 22 23:21:38.062 INFO [0] Starts reconcile loop
21139 Sep 22 23:21:38.062 INFO [1] e4fe1d81-dc04-41e4-b745-9219b6982323 (d2b38da3-e438-4e27-b550-ef7f093a76bf) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
21140 Sep 22 23:21:38.062 INFO [1] Transition from WaitActive to WaitQuorum
21141 Sep 22 23:21:38.062 WARN [1] new RM replaced this: None
21142 Sep 22 23:21:38.062 INFO [1] Starts reconcile loop
21143 Sep 22 23:21:38.062 DEBG IO Read 1000 has deps []
21144 Sep 22 23:21:38.062 INFO [2] e4fe1d81-dc04-41e4-b745-9219b6982323 (d2b38da3-e438-4e27-b550-ef7f093a76bf) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
21145 Sep 22 23:21:38.062 INFO [2] Transition from WaitActive to WaitQuorum
21146 Sep 22 23:21:38.062 WARN [2] new RM replaced this: None
21147 Sep 22 23:21:38.062 INFO [2] Starts reconcile loop
21148 Sep 22 23:21:38.062 DEBG Write :1002 deps:[] res:true
21149 Sep 22 23:21:38.062 INFO [0] 127.0.0.1:48412 task reports connection:true
21150 Sep 22 23:21:38.062 INFO e4fe1d81-dc04-41e4-b745-9219b6982323 WaitQuorum WaitQuorum WaitQuorum
21151 Sep 22 23:21:38.062 INFO [0]R flush_numbers: [0, 0]
21152 Sep 22 23:21:38.062 INFO [0]R generation: [0, 0]
21153 Sep 22 23:21:38.062 INFO [0]R dirty: [false, false]
21154 Sep 22 23:21:38.062 INFO [1]R flush_numbers: [0, 0]
21155 Sep 22 23:21:38.063 INFO [1]R generation: [0, 0]
21156 Sep 22 23:21:38.063 INFO [1]R dirty: [false, false]
21157 Sep 22 23:21:38.063 INFO [2]R flush_numbers: [0, 0]
21158 Sep 22 23:21:38.063 INFO [2]R generation: [0, 0]
21159 Sep 22 23:21:38.063 INFO [2]R dirty: [false, false]
21160 Sep 22 23:21:38.063 INFO Max found gen is 1
21161 Sep 22 23:21:38.063 INFO Generation requested: 1 >= found:1
21162 Sep 22 23:21:38.063 INFO Next flush: 1
21163 Sep 22 23:21:38.063 INFO All extents match
21164 Sep 22 23:21:38.063 INFO No downstairs repair required
21165 Sep 22 23:21:38.063 INFO No initial repair work was required
21166 Sep 22 23:21:38.063 INFO Set Downstairs and Upstairs active
21167 Sep 22 23:21:38.063 INFO e4fe1d81-dc04-41e4-b745-9219b6982323 is now active with session: d2b38da3-e438-4e27-b550-ef7f093a76bf
21168 Sep 22 23:21:38.063 INFO e4fe1d81-dc04-41e4-b745-9219b6982323 Set Active after no repair
21169 Sep 22 23:21:38.063 INFO Notify all downstairs, region set compare is done.
21170 Sep 22 23:21:38.063 DEBG Read :1000 deps:[] res:true
21171 Sep 22 23:21:38.063 INFO Set check for repair
21172 Sep 22 23:21:38.063 DEBG Write :1002 deps:[] res:true
21173 Sep 22 23:21:38.063 INFO [1] 127.0.0.1:41943 task reports connection:true
21174 Sep 22 23:21:38.063 INFO e4fe1d81-dc04-41e4-b745-9219b6982323 Active Active Active
21175 Sep 22 23:21:38.063 INFO Set check for repair
21176 Sep 22 23:21:38.063 INFO [2] 127.0.0.1:33214 task reports connection:true
21177 Sep 22 23:21:38.063 DEBG Read :1000 deps:[] res:true
21178 Sep 22 23:21:38.063 INFO e4fe1d81-dc04-41e4-b745-9219b6982323 Active Active Active
21179 Sep 22 23:21:38.063 INFO Set check for repair
21180 Sep 22 23:21:38.063 INFO [0] received reconcile message
21181 Sep 22 23:21:38.063 INFO [0] All repairs completed, exit
21182 Sep 22 23:21:38.063 INFO [0] Starts cmd_loop
21183 Sep 22 23:21:38.063 DEBG Read :1000 deps:[] res:true
21184 Sep 22 23:21:38.063 DEBG Write :1002 deps:[] res:true
21185 Sep 22 23:21:38.063 INFO [1] received reconcile message
21186 Sep 22 23:21:38.063 INFO [1] All repairs completed, exit
21187 Sep 22 23:21:38.063 INFO [1] Starts cmd_loop
21188 Sep 22 23:21:38.063 INFO [2] received reconcile message
21189 Sep 22 23:21:38.063 INFO [2] All repairs completed, exit
21190 Sep 22 23:21:38.063 DEBG [0] Read AckReady 1000, : downstairs
21191 Sep 22 23:21:38.063 INFO [2] Starts cmd_loop
21192 Sep 22 23:21:38.063 DEBG [1] Read already AckReady 1000, : downstairs
21193 The guest has finished waiting for activation
21194 Sep 22 23:21:38.064 DEBG [2] Read already AckReady 1000, : downstairs
21195 Sep 22 23:21:38.064 DEBG up_ds_listen was notified
21196 Sep 22 23:21:38.064 DEBG up_ds_listen process 1000
21197 Sep 22 23:21:38.064 DEBG [A] ack job 1000:1, : downstairs
21198 Sep 22 23:21:38.064 DEBG up_ds_listen checked 1 jobs, back to waiting
21199 Sep 22 23:21:38.064 DEBG IO Write 1002 has deps []
21200 Sep 22 23:21:38.064 DEBG up_ds_listen was notified
21201 Sep 22 23:21:38.064 DEBG up_ds_listen process 1002
21202 Sep 22 23:21:38.064 DEBG [A] ack job 1002:3, : downstairs
21203 Sep 22 23:21:38.064 DEBG up_ds_listen checked 1 jobs, back to waiting
21204 Sep 22 23:21:38.064 DEBG [2] Read already AckReady 1005, : downstairs
21205 Sep 22 23:21:38.064 DEBG up_ds_listen was notified
21206 Sep 22 23:21:38.064 DEBG up_ds_listen process 1005
21207 Sep 22 23:21:38.064 DEBG [A] ack job 1005:6, : downstairs
21208 Sep 22 23:21:38.064 INFO Scrub check for df9234d2-5959-4534-87ed-4a79e641aa2a
21209 Sep 22 23:21:38.064 INFO Scrub for df9234d2-5959-4534-87ed-4a79e641aa2a begins
21210 Sep 22 23:21:38.064 INFO Scrub with total_size:7680 block_size:512
21211 Sep 22 23:21:38.064 INFO Scrubs from block 0 to 15 in (256) 131072 size IOs pm:0
21212 Sep 22 23:21:38.064 INFO Adjust block_count to 15 at offset 0
21213 Sep 22 23:21:38.065 DEBG up_ds_listen checked 1 jobs, back to waiting
21214 Sep 22 23:21:38.065 DEBG IO Write 1001 has deps [JobId(1000)]
21215 Sep 22 23:21:38.065 DEBG IO Read 1003 has deps []
21216 Sep 22 23:21:38.065 DEBG up_ds_listen was notified
21217 Sep 22 23:21:38.065 DEBG up_ds_listen process 1001
21218 Sep 22 23:21:38.065 DEBG [A] ack job 1001:2, : downstairs
21219 Sep 22 23:21:38.065 DEBG up_ds_listen checked 1 jobs, back to waiting
21220 Sep 22 23:21:38.066 DEBG Read :1003 deps:[] res:true
21221 Sep 22 23:21:38.066 DEBG Write :1001 deps:[JobId(1000)] res:true
21222 Sep 22 23:21:38.066 DEBG Read :1003 deps:[] res:true
21223 Sep 22 23:21:38.066 DEBG Write :1002 deps:[] res:true
21224 Sep 22 23:21:38.066 DEBG Write :1001 deps:[JobId(1000)] res:true
21225 Sep 22 23:21:38.066 DEBG Read :1003 deps:[] res:true
21226 Sep 22 23:21:38.066 DEBG IO Write 1000 has deps []
21227 Sep 22 23:21:38.067 DEBG Write :1001 deps:[JobId(1000)] res:true
21228 Sep 22 23:21:38.067 DEBG up_ds_listen was notified
21229 Sep 22 23:21:38.067 DEBG up_ds_listen process 1000
21230 Sep 22 23:21:38.067 DEBG [A] ack job 1000:1, : downstairs
21231 Sep 22 23:21:38.067 DEBG Write :1002 deps:[] res:true
21232 Sep 22 23:21:38.067 DEBG up_ds_listen checked 1 jobs, back to waiting
21233 Sep 22 23:21:38.067 DEBG IO Read 1002 has deps [JobId(1001)]
21234 Sep 22 23:21:38.067 DEBG Read :1002 deps:[JobId(1001)] res:true
21235 Sep 22 23:21:38.067 DEBG Write :1002 deps:[] res:true
21236 Sep 22 23:21:38.067 DEBG [0] Read AckReady 1003, : downstairs
21237 Sep 22 23:21:38.067 DEBG Read :1002 deps:[JobId(1001)] res:true
21238 Sep 22 23:21:38.068 DEBG Read :1002 deps:[JobId(1001)] res:true
21239 Sep 22 23:21:38.068 DEBG [1] Read already AckReady 1003, : downstairs
21240 Sep 22 23:21:38.068 DEBG [2] Read already AckReady 1003, : downstairs
21241 Sep 22 23:21:38.068 DEBG [0] Read AckReady 1002, : downstairs
21242 Sep 22 23:21:38.068 DEBG up_ds_listen was notified
21243 Sep 22 23:21:38.068 DEBG up_ds_listen process 1003
21244 Sep 22 23:21:38.068 DEBG [A] ack job 1003:4, : downstairs
21245 Sep 22 23:21:38.068 DEBG [1] Read already AckReady 1002, : downstairs
21246 Sep 22 23:21:38.069 DEBG [2] Read already AckReady 1002, : downstairs
21247 Sep 22 23:21:38.069 DEBG up_ds_listen was notified
21248 Sep 22 23:21:38.069 DEBG up_ds_listen process 1002
21249 Sep 22 23:21:38.069 DEBG [A] ack job 1002:3, : downstairs
21250 Sep 22 23:21:38.069 DEBG up_ds_listen checked 1 jobs, back to waiting
21251 Sep 22 23:21:38.069 DEBG up_ds_listen checked 1 jobs, back to waiting
21252 Sep 22 23:21:38.069 DEBG Write :1000 deps:[] res:true
21253 Sep 22 23:21:38.070 DEBG Write :1000 deps:[] res:true
21254 Sep 22 23:21:38.070 DEBG IO Write 1003 has deps [JobId(1002), JobId(1001), JobId(1000)]
21255 Sep 22 23:21:38.071 DEBG Write :1000 deps:[] res:true
21256 Sep 22 23:21:38.071 DEBG IO Read 1001 has deps [JobId(1000)]
21257 Sep 22 23:21:38.072 DEBG Read :1001 deps:[JobId(1000)] res:true
21258 Sep 22 23:21:38.073 DEBG Read :1001 deps:[JobId(1000)] res:true
21259 Sep 22 23:21:38.074 DEBG Read :1001 deps:[JobId(1000)] res:true
21260 test test::integration_test_url ... ok
21261 Sep 22 23:21:38.076 INFO current number of open files limit 65536 is already the maximum
21262 Sep 22 23:21:38.077 INFO Created new region file "/tmp/downstairs-xCnpISU1/region.json"
21263 Sep 22 23:21:38.077 DEBG [0] Read AckReady 1001, : downstairs
21264 Sep 22 23:21:38.077 DEBG up_ds_listen was notified
21265 test test::integration_test_volume_subvols_parent_scrub_sparse ... ok
21266 Sep 22 23:21:38.077 DEBG up_ds_listen process 1003
21267 Sep 22 23:21:38.077 DEBG [A] ack job 1003:4, : downstairs
21268 Sep 22 23:21:38.077 DEBG up_ds_listen checked 1 jobs, back to waiting
21269 Sep 22 23:21:38.077 INFO current number of open files limit 65536 is already the maximum
21270 Sep 22 23:21:38.077 INFO Created new region file "/tmp/downstairs-2ztTe6bf/region.json"
21271 Sep 22 23:21:38.079 DEBG IO Write 1003 has deps [JobId(1002), JobId(1001), JobId(1000)]
21272 Sep 22 23:21:38.079 DEBG [1] Read already AckReady 1001, : downstairs
21273 Sep 22 23:21:38.079 INFO current number of open files limit 65536 is already the maximum
21274 Sep 22 23:21:38.079 INFO Opened existing region file "/tmp/downstairs-xCnpISU1/region.json"
21275 Sep 22 23:21:38.079 INFO Database read version 1
21276 Sep 22 23:21:38.079 INFO Database write version 1
21277 Sep 22 23:21:38.080 INFO current number of open files limit 65536 is already the maximum
21278 Sep 22 23:21:38.080 INFO Opened existing region file "/tmp/downstairs-2ztTe6bf/region.json"
21279 Sep 22 23:21:38.080 INFO Database read version 1
21280 Sep 22 23:21:38.080 INFO Database write version 1
21281 Sep 22 23:21:38.081 INFO UUID: cdee1758-9349-418c-9b8d-629486998c09
21282 Sep 22 23:21:38.081 INFO Blocks per extent:5 Total Extents: 2
21283 Sep 22 23:21:38.081 INFO Crucible Version: Crucible Version: 0.0.1
21284 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21285 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21286 rustc: 1.70.0 stable x86_64-unknown-illumos
21287 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21288 Sep 22 23:21:38.081 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21289 Sep 22 23:21:38.081 INFO Using address: 127.0.0.1:48328, task: main
21290 Sep 22 23:21:38.081 INFO Repair listens on 127.0.0.1:0, task: repair
21291 Sep 22 23:21:38.081 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33609, task: repair
21292 Sep 22 23:21:38.081 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33609, task: repair
21293 Sep 22 23:21:38.081 INFO listening, local_addr: 127.0.0.1:33609, task: repair
21294 Sep 22 23:21:38.081 DEBG up_ds_listen was notified
21295 Sep 22 23:21:38.081 DEBG [2] Read already AckReady 1001, : downstairs
21296 Sep 22 23:21:38.081 DEBG up_ds_listen process 1003
21297 Sep 22 23:21:38.081 DEBG [A] ack job 1003:4, : downstairs
21298 Sep 22 23:21:38.081 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33609, task: repair
21299 Sep 22 23:21:38.081 INFO Using repair address: 127.0.0.1:33609, task: main
21300 Sep 22 23:21:38.081 DEBG up_ds_listen was notified
21301 Sep 22 23:21:38.081 INFO No SSL acceptor configured, task: main
21302 Sep 22 23:21:38.081 DEBG up_ds_listen checked 1 jobs, back to waiting
21303 Sep 22 23:21:38.081 DEBG up_ds_listen process 1001
21304 Sep 22 23:21:38.081 DEBG [A] ack job 1001:2, : downstairs
21305 Sep 22 23:21:38.081 INFO Scrub at offset 15/15 sp:15
21306 Sep 22 23:21:38.082 INFO current number of open files limit 65536 is already the maximum
21307 Sep 22 23:21:38.082 INFO Created new region file "/tmp/downstairs-oZLK1PnT/region.json"
21308 Sep 22 23:21:38.082 INFO UUID: 01c46ab7-4671-4b4c-8a3d-a1a31f152199
21309 Sep 22 23:21:38.082 INFO Blocks per extent:5 Total Extents: 2
21310 Sep 22 23:21:38.082 INFO Crucible Version: Crucible Version: 0.0.1
21311 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21312 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21313 rustc: 1.70.0 stable x86_64-unknown-illumos
21314 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21315 Sep 22 23:21:38.082 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21316 Sep 22 23:21:38.082 INFO Using address: 127.0.0.1:58203, task: main
21317 Sep 22 23:21:38.082 DEBG up_ds_listen checked 1 jobs, back to waiting
21318 Sep 22 23:21:38.082 INFO Repair listens on 127.0.0.1:0, task: repair
21319 Sep 22 23:21:38.082 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38864, task: repair
21320 Sep 22 23:21:38.082 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38864, task: repair
21321 Sep 22 23:21:38.082 INFO listening, local_addr: 127.0.0.1:38864, task: repair
21322 Sep 22 23:21:38.082 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38864, task: repair
21323 Sep 22 23:21:38.082 INFO Using repair address: 127.0.0.1:38864, task: main
21324 Sep 22 23:21:38.082 INFO No SSL acceptor configured, task: main
21325 Sep 22 23:21:38.083 INFO current number of open files limit 65536 is already the maximum
21326 Sep 22 23:21:38.083 INFO Scrub df9234d2-5959-4534-87ed-4a79e641aa2a done in 0 seconds. Retries:0 scrub_size:7680 size:15 pause_milli:0
21327 Sep 22 23:21:38.083 DEBG IO Flush 1004 has deps [JobId(1003), JobId(1002), JobId(1001), JobId(1000)]
21328 Sep 22 23:21:38.083 INFO Created new region file "/tmp/downstairs-pi6CxBYH/region.json"
21329 Sep 22 23:21:38.084 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
21330 Sep 22 23:21:38.085 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
21331 Sep 22 23:21:38.085 INFO current number of open files limit 65536 is already the maximum
21332 Sep 22 23:21:38.085 INFO Opened existing region file "/tmp/downstairs-oZLK1PnT/region.json"
21333 Sep 22 23:21:38.085 INFO Database read version 1
21334 Sep 22 23:21:38.085 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
21335 Sep 22 23:21:38.085 INFO Database write version 1
21336 Sep 22 23:21:38.086 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
21337 Sep 22 23:21:38.086 DEBG up_ds_listen was notified
21338 Sep 22 23:21:38.086 DEBG up_ds_listen process 1004
21339 Sep 22 23:21:38.086 DEBG [A] ack job 1004:5, : downstairs
21340 Sep 22 23:21:38.086 DEBG [rc] retire 1004 clears [JobId(1000), JobId(1001), JobId(1002), JobId(1003), JobId(1004)], : downstairs
21341 Sep 22 23:21:38.086 DEBG up_ds_listen checked 1 jobs, back to waiting
21342 Sep 22 23:21:38.086 DEBG up_ds_listen was notified
21343 Sep 22 23:21:38.086 DEBG up_ds_listen process 1002
21344 Sep 22 23:21:38.086 DEBG [A] ack job 1002:3, : downstairs
21345 Sep 22 23:21:38.086 DEBG IO Flush 1004 has deps [JobId(1003), JobId(1002), JobId(1001), JobId(1000)]
21346 Sep 22 23:21:38.086 DEBG up_ds_listen checked 1 jobs, back to waiting
21347 Sep 22 23:21:38.086 DEBG IO Read 1003 has deps [JobId(1002), JobId(1000)]
21348 Sep 22 23:21:38.087 INFO current number of open files limit 65536 is already the maximum
21349 Sep 22 23:21:38.087 INFO Opened existing region file "/tmp/downstairs-pi6CxBYH/region.json"
21350 Sep 22 23:21:38.087 INFO Database read version 1
21351 Sep 22 23:21:38.087 INFO Database write version 1
21352 Sep 22 23:21:38.087 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
21353 Sep 22 23:21:38.087 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
21354 Sep 22 23:21:38.088 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
21355 Sep 22 23:21:38.088 INFO UUID: 5df431a9-e8cc-4535-a933-0ec6a3392f80
21356 Sep 22 23:21:38.088 INFO Blocks per extent:5 Total Extents: 2
21357 Sep 22 23:21:38.088 INFO Crucible Version: Crucible Version: 0.0.1
21358 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21359 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21360 rustc: 1.70.0 stable x86_64-unknown-illumos
21361 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21362 Sep 22 23:21:38.088 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21363 Sep 22 23:21:38.088 INFO Using address: 127.0.0.1:62933, task: main
21364 Sep 22 23:21:38.088 INFO Repair listens on 127.0.0.1:0, task: repair
21365 Sep 22 23:21:38.088 INFO UUID: 08ca7a26-86b3-4310-8bb9-1c7dab6c4622
21366 Sep 22 23:21:38.088 INFO Blocks per extent:5 Total Extents: 2
21367 Sep 22 23:21:38.088 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:43692, task: repair
21368 Sep 22 23:21:38.088 INFO Crucible Version: Crucible Version: 0.0.1
21369 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21370 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21371 rustc: 1.70.0 stable x86_64-unknown-illumos
21372 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21373 Sep 22 23:21:38.088 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21374 Sep 22 23:21:38.088 INFO Using address: 127.0.0.1:44218, task: main
21375 Sep 22 23:21:38.088 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:43692, task: repair
21376 Sep 22 23:21:38.089 INFO listening, local_addr: 127.0.0.1:43692, task: repair
21377 Sep 22 23:21:38.089 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
21378 Sep 22 23:21:38.089 INFO Repair listens on 127.0.0.1:0, task: repair
21379 Sep 22 23:21:38.089 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:43692, task: repair
21380 Sep 22 23:21:38.089 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
21381 Sep 22 23:21:38.089 INFO Using repair address: 127.0.0.1:43692, task: main
21382 Sep 22 23:21:38.089 INFO No SSL acceptor configured, task: main
21383 Sep 22 23:21:38.089 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:35997, task: repair
21384 Sep 22 23:21:38.089 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:35997, task: repair
21385 Sep 22 23:21:38.089 INFO listening, local_addr: 127.0.0.1:35997, task: repair
21386 Sep 22 23:21:38.089 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
21387 Sep 22 23:21:38.089 INFO current number of open files limit 65536 is already the maximum
21388 Sep 22 23:21:38.089 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:35997, task: repair
21389 Sep 22 23:21:38.089 INFO Using repair address: 127.0.0.1:35997, task: main
21390 Sep 22 23:21:38.089 INFO No SSL acceptor configured, task: main
21391 Sep 22 23:21:38.089 INFO Created new region file "/tmp/downstairs-3f3WKQgh/region.json"
21392 Sep 22 23:21:38.089 DEBG up_ds_listen was notified
21393 Sep 22 23:21:38.089 DEBG up_ds_listen process 1004
21394 Sep 22 23:21:38.089 DEBG [A] ack job 1004:5, : downstairs
21395 Sep 22 23:21:38.089 DEBG [rc] retire 1004 clears [JobId(1000), JobId(1001), JobId(1002), JobId(1003), JobId(1004)], : downstairs
21396 Sep 22 23:21:38.089 INFO current number of open files limit 65536 is already the maximum
21397 Sep 22 23:21:38.089 DEBG up_ds_listen checked 1 jobs, back to waiting
21398 Sep 22 23:21:38.089 INFO Created new region file "/tmp/downstairs-8RaWELXM/region.json"
21399 Sep 22 23:21:38.090 DEBG IO Read 1005 has deps []
21400 Sep 22 23:21:38.090 DEBG [0] Read AckReady 1003, : downstairs
21401 Sep 22 23:21:38.091 DEBG Read :1005 deps:[] res:true
21402 Sep 22 23:21:38.091 DEBG Read :1005 deps:[] res:true
21403 Sep 22 23:21:38.092 DEBG Read :1005 deps:[] res:true
21404 Sep 22 23:21:38.092 DEBG [1] Read already AckReady 1003, : downstairs
21405 Sep 22 23:21:38.093 INFO current number of open files limit 65536 is already the maximum
21406 Sep 22 23:21:38.093 INFO Opened existing region file "/tmp/downstairs-3f3WKQgh/region.json"
21407 Sep 22 23:21:38.093 INFO Database read version 1
21408 Sep 22 23:21:38.093 INFO Database write version 1
21409 Sep 22 23:21:38.093 INFO current number of open files limit 65536 is already the maximum
21410 Sep 22 23:21:38.093 INFO Opened existing region file "/tmp/downstairs-8RaWELXM/region.json"
21411 Sep 22 23:21:38.093 INFO Database read version 1
21412 Sep 22 23:21:38.093 INFO Database write version 1
21413 Sep 22 23:21:38.094 DEBG [2] Read already AckReady 1003, : downstairs
21414 Sep 22 23:21:38.094 DEBG up_ds_listen was notified
21415 Sep 22 23:21:38.094 DEBG up_ds_listen process 1003
21416 Sep 22 23:21:38.094 DEBG [A] ack job 1003:4, : downstairs
21417 Sep 22 23:21:38.094 DEBG up_ds_listen checked 1 jobs, back to waiting
21418 Sep 22 23:21:38.095 INFO UUID: ac9e9e82-be8d-4c20-a78c-e47f39a6b49e
21419 Sep 22 23:21:38.096 INFO Blocks per extent:5 Total Extents: 2
21420 Sep 22 23:21:38.096 INFO Crucible Version: Crucible Version: 0.0.1
21421 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21422 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21423 rustc: 1.70.0 stable x86_64-unknown-illumos
21424 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21425 Sep 22 23:21:38.096 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21426 Sep 22 23:21:38.096 INFO Using address: 127.0.0.1:65141, task: main
21427 Sep 22 23:21:38.096 DEBG [0] Read AckReady 1005, : downstairs
21428 Sep 22 23:21:38.096 INFO Repair listens on 127.0.0.1:0, task: repair
21429 Sep 22 23:21:38.096 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50256, task: repair
21430 Sep 22 23:21:38.096 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50256, task: repair
21431 Sep 22 23:21:38.096 INFO UUID: 6f2fbc1f-a9a2-41d4-9856-3b247141fc3f
21432 Sep 22 23:21:38.096 INFO listening, local_addr: 127.0.0.1:50256, task: repair
21433 Sep 22 23:21:38.096 INFO Blocks per extent:5 Total Extents: 2
21434 Sep 22 23:21:38.096 INFO Crucible Version: Crucible Version: 0.0.1
21435 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21436 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21437 rustc: 1.70.0 stable x86_64-unknown-illumos
21438 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21439 Sep 22 23:21:38.096 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21440 Sep 22 23:21:38.096 INFO Using address: 127.0.0.1:52819, task: main
21441 Sep 22 23:21:38.096 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50256, task: repair
21442 Sep 22 23:21:38.096 INFO Using repair address: 127.0.0.1:50256, task: main
21443 Sep 22 23:21:38.096 INFO No SSL acceptor configured, task: main
21444 Sep 22 23:21:38.096 INFO Repair listens on 127.0.0.1:0, task: repair
21445 Sep 22 23:21:38.097 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:56254, task: repair
21446 Sep 22 23:21:38.097 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:56254, task: repair
21447 Sep 22 23:21:38.097 INFO listening, local_addr: 127.0.0.1:56254, task: repair
21448 Sep 22 23:21:38.097 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:56254, task: repair
21449 Sep 22 23:21:38.097 INFO Using repair address: 127.0.0.1:56254, task: main
21450 Sep 22 23:21:38.097 INFO No SSL acceptor configured, task: main
21451 Sep 22 23:21:38.097 INFO Upstairs starts
21452 Sep 22 23:21:38.097 INFO Crucible Version: BuildInfo {
21453 version: "0.0.1",
21454 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
21455 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
21456 git_branch: "main",
21457 rustc_semver: "1.70.0",
21458 rustc_channel: "stable",
21459 rustc_host_triple: "x86_64-unknown-illumos",
21460 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
21461 cargo_triple: "x86_64-unknown-illumos",
21462 debug: true,
21463 opt_level: 0,
21464 }
21465 Sep 22 23:21:38.097 INFO Upstairs <-> Downstairs Message Version: 4
21466 Sep 22 23:21:38.097 INFO Crucible stats registered with UUID: 5b90282f-6bb7-4b5b-a1a9-64d086158b57
21467 Sep 22 23:21:38.097 INFO Crucible 5b90282f-6bb7-4b5b-a1a9-64d086158b57 has session id: 9d7d57fb-4df7-4fab-a327-c17718952c14
21468 Sep 22 23:21:38.097 INFO listening on 127.0.0.1:0, task: main
21469 Sep 22 23:21:38.097 INFO listening on 127.0.0.1:0, task: main
21470 Sep 22 23:21:38.097 INFO listening on 127.0.0.1:0, task: main
21471 Sep 22 23:21:38.097 INFO [0] connecting to 127.0.0.1:48328, looper: 0
21472 Sep 22 23:21:38.097 INFO [1] connecting to 127.0.0.1:62933, looper: 1
21473 Sep 22 23:21:38.097 INFO [2] connecting to 127.0.0.1:65141, looper: 2
21474 Sep 22 23:21:38.098 INFO up_listen starts, task: up_listen
21475 Sep 22 23:21:38.098 INFO Wait for all three downstairs to come online
21476 Sep 22 23:21:38.098 INFO Flush timeout: 0.5
21477 Sep 22 23:21:38.098 INFO Upstairs starts
21478 Sep 22 23:21:38.098 INFO Crucible Version: BuildInfo {
21479 version: "0.0.1",
21480 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
21481 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
21482 git_branch: "main",
21483 rustc_semver: "1.70.0",
21484 rustc_channel: "stable",
21485 rustc_host_triple: "x86_64-unknown-illumos",
21486 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
21487 cargo_triple: "x86_64-unknown-illumos",
21488 debug: true,
21489 opt_level: 0,
21490 }
21491 Sep 22 23:21:38.098 INFO Upstairs <-> Downstairs Message Version: 4
21492 Sep 22 23:21:38.098 INFO Crucible stats registered with UUID: 826051a1-df7d-43b5-adbe-a9bb32b9d0ea
21493 Sep 22 23:21:38.098 INFO Crucible 826051a1-df7d-43b5-adbe-a9bb32b9d0ea has session id: 005ed4bb-0eb0-47f1-b9b2-0af5c1a94d59
21494 test test::integration_test_volume_write_unwritten_1 ... okSep 22 23:21:38.098 INFO [0] 5b90282f-6bb7-4b5b-a1a9-64d086158b57 looper connected, looper: 0
21495 
21496 Sep 22 23:21:38.098 INFO [0] Proc runs for 127.0.0.1:48328 in state New
21497 Sep 22 23:21:38.098 INFO listening on 127.0.0.1:0, task: main
21498 Sep 22 23:21:38.098 INFO accepted connection from 127.0.0.1:53646, task: main
21499 Sep 22 23:21:38.098 INFO listening on 127.0.0.1:0, task: main
21500 Sep 22 23:21:38.098 INFO accepted connection from 127.0.0.1:61364, task: main
21501 Sep 22 23:21:38.098 INFO current number of open files limit 65536 is already the maximum
21502 Sep 22 23:21:38.098 INFO [1] 5b90282f-6bb7-4b5b-a1a9-64d086158b57 looper connected, looper: 1
21503 Sep 22 23:21:38.098 INFO [1] Proc runs for 127.0.0.1:62933 in state New
21504 Sep 22 23:21:38.098 INFO listening on 127.0.0.1:0, task: main
21505 Sep 22 23:21:38.098 INFO [2] 5b90282f-6bb7-4b5b-a1a9-64d086158b57 looper connected, looper: 2
21506 Sep 22 23:21:38.098 INFO Created new region file "/tmp/downstairs-GpQSlTJR/region.json"
21507 Sep 22 23:21:38.098 INFO [2] Proc runs for 127.0.0.1:65141 in state New
21508 Sep 22 23:21:38.098 INFO [0] connecting to 127.0.0.1:58203, looper: 0
21509 Sep 22 23:21:38.098 INFO accepted connection from 127.0.0.1:60140, task: main
21510 Sep 22 23:21:38.098 INFO [1] connecting to 127.0.0.1:44218, looper: 1
21511 Sep 22 23:21:38.098 DEBG [1] Read already AckReady 1005, : downstairs
21512 Sep 22 23:21:38.098 INFO [2] connecting to 127.0.0.1:52819, looper: 2
21513 Sep 22 23:21:38.098 INFO Connection request from 5b90282f-6bb7-4b5b-a1a9-64d086158b57 with version 4, task: proc
21514 Sep 22 23:21:38.098 INFO upstairs UpstairsConnection { upstairs_id: 5b90282f-6bb7-4b5b-a1a9-64d086158b57, session_id: 2d00b74f-dde3-4064-ba30-0de199ff9cba, gen: 1 } connected, version 4, task: proc
21515 Sep 22 23:21:38.099 INFO up_listen starts, task: up_listen
21516 Sep 22 23:21:38.099 INFO Wait for all three downstairs to come online
21517 Sep 22 23:21:38.099 INFO Flush timeout: 0.5
21518 Sep 22 23:21:38.099 INFO Connection request from 5b90282f-6bb7-4b5b-a1a9-64d086158b57 with version 4, task: proc
21519 Sep 22 23:21:38.099 INFO upstairs UpstairsConnection { upstairs_id: 5b90282f-6bb7-4b5b-a1a9-64d086158b57, session_id: 2d00b74f-dde3-4064-ba30-0de199ff9cba, gen: 1 } connected, version 4, task: proc
21520 Sep 22 23:21:38.099 INFO Connection request from 5b90282f-6bb7-4b5b-a1a9-64d086158b57 with version 4, task: proc
21521 Sep 22 23:21:38.099 INFO upstairs UpstairsConnection { upstairs_id: 5b90282f-6bb7-4b5b-a1a9-64d086158b57, session_id: 2d00b74f-dde3-4064-ba30-0de199ff9cba, gen: 1 } connected, version 4, task: proc
21522 Sep 22 23:21:38.099 INFO accepted connection from 127.0.0.1:46749, task: main
21523 Sep 22 23:21:38.099 INFO accepted connection from 127.0.0.1:65098, task: main
21524 Sep 22 23:21:38.099 INFO [0] 5b90282f-6bb7-4b5b-a1a9-64d086158b57 (2d00b74f-dde3-4064-ba30-0de199ff9cba) New New New ds_transition to WaitActive
21525 Sep 22 23:21:38.099 INFO [0] Transition from New to WaitActive
21526 Sep 22 23:21:38.099 INFO [1] 5b90282f-6bb7-4b5b-a1a9-64d086158b57 (2d00b74f-dde3-4064-ba30-0de199ff9cba) WaitActive New New ds_transition to WaitActive
21527 Sep 22 23:21:38.099 INFO [1] Transition from New to WaitActive
21528 Sep 22 23:21:38.099 INFO [0] 826051a1-df7d-43b5-adbe-a9bb32b9d0ea looper connected, looper: 0
21529 Sep 22 23:21:38.099 INFO [2] 5b90282f-6bb7-4b5b-a1a9-64d086158b57 (2d00b74f-dde3-4064-ba30-0de199ff9cba) WaitActive WaitActive New ds_transition to WaitActive
21530 Sep 22 23:21:38.099 INFO [2] Transition from New to WaitActive
21531 Sep 22 23:21:38.099 INFO [0] Proc runs for 127.0.0.1:58203 in state New
21532 Sep 22 23:21:38.099 INFO [1] 826051a1-df7d-43b5-adbe-a9bb32b9d0ea looper connected, looper: 1
21533 The guest has requested activation
21534 Sep 22 23:21:38.099 INFO 5b90282f-6bb7-4b5b-a1a9-64d086158b57 active request set
21535 Sep 22 23:21:38.099 INFO [1] Proc runs for 127.0.0.1:44218 in state New
21536 Sep 22 23:21:38.099 INFO [0] received activate with gen 1
21537 Sep 22 23:21:38.099 INFO [2] 826051a1-df7d-43b5-adbe-a9bb32b9d0ea looper connected, looper: 2
21538 Sep 22 23:21:38.099 INFO [0] client got ds_active_rx, promote! session 2d00b74f-dde3-4064-ba30-0de199ff9cba
21539 Sep 22 23:21:38.099 INFO [2] Proc runs for 127.0.0.1:52819 in state New
21540 Sep 22 23:21:38.099 INFO [1] received activate with gen 1
21541 Sep 22 23:21:38.099 INFO [1] client got ds_active_rx, promote! session 2d00b74f-dde3-4064-ba30-0de199ff9cba
21542 Sep 22 23:21:38.099 INFO [2] received activate with gen 1
21543 Sep 22 23:21:38.099 INFO [2] client got ds_active_rx, promote! session 2d00b74f-dde3-4064-ba30-0de199ff9cba
21544 Sep 22 23:21:38.099 INFO accepted connection from 127.0.0.1:46152, task: main
21545 Sep 22 23:21:38.099 INFO UpstairsConnection { upstairs_id: 5b90282f-6bb7-4b5b-a1a9-64d086158b57, session_id: 2d00b74f-dde3-4064-ba30-0de199ff9cba, gen: 1 } is now active (read-write)
21546 Sep 22 23:21:38.100 INFO UpstairsConnection { upstairs_id: 5b90282f-6bb7-4b5b-a1a9-64d086158b57, session_id: 2d00b74f-dde3-4064-ba30-0de199ff9cba, gen: 1 } is now active (read-write)
21547 Sep 22 23:21:38.100 INFO UpstairsConnection { upstairs_id: 5b90282f-6bb7-4b5b-a1a9-64d086158b57, session_id: 2d00b74f-dde3-4064-ba30-0de199ff9cba, gen: 1 } is now active (read-write)
21548 Sep 22 23:21:38.100 INFO Connection request from 826051a1-df7d-43b5-adbe-a9bb32b9d0ea with version 4, task: proc
21549 Sep 22 23:21:38.100 INFO upstairs UpstairsConnection { upstairs_id: 826051a1-df7d-43b5-adbe-a9bb32b9d0ea, session_id: 77674847-2bf1-45c2-9d96-dadd8139fd25, gen: 1 } connected, version 4, task: proc
21550 Sep 22 23:21:38.100 INFO Connection request from 826051a1-df7d-43b5-adbe-a9bb32b9d0ea with version 4, task: proc
21551 Sep 22 23:21:38.100 INFO upstairs UpstairsConnection { upstairs_id: 826051a1-df7d-43b5-adbe-a9bb32b9d0ea, session_id: 77674847-2bf1-45c2-9d96-dadd8139fd25, gen: 1 } connected, version 4, task: proc
21552 Sep 22 23:21:38.100 INFO Connection request from 826051a1-df7d-43b5-adbe-a9bb32b9d0ea with version 4, task: proc
21553 Sep 22 23:21:38.100 INFO upstairs UpstairsConnection { upstairs_id: 826051a1-df7d-43b5-adbe-a9bb32b9d0ea, session_id: 77674847-2bf1-45c2-9d96-dadd8139fd25, gen: 1 } connected, version 4, task: proc
21554 Sep 22 23:21:38.100 INFO [0] downstairs client at 127.0.0.1:48328 has UUID cdee1758-9349-418c-9b8d-629486998c09
21555 Sep 22 23:21:38.100 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: cdee1758-9349-418c-9b8d-629486998c09, encrypted: true, database_read_version: 1, database_write_version: 1 }
21556 Sep 22 23:21:38.100 INFO 5b90282f-6bb7-4b5b-a1a9-64d086158b57 WaitActive WaitActive WaitActive
21557 Sep 22 23:21:38.100 INFO [1] downstairs client at 127.0.0.1:62933 has UUID 5df431a9-e8cc-4535-a933-0ec6a3392f80
21558 Sep 22 23:21:38.100 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 5df431a9-e8cc-4535-a933-0ec6a3392f80, encrypted: true, database_read_version: 1, database_write_version: 1 }
21559 Sep 22 23:21:38.100 DEBG [2] Read already AckReady 1005, : downstairs
21560 Sep 22 23:21:38.100 INFO 5b90282f-6bb7-4b5b-a1a9-64d086158b57 WaitActive WaitActive WaitActive
21561 Sep 22 23:21:38.100 DEBG up_ds_listen was notified
21562 Sep 22 23:21:38.100 DEBG up_ds_listen process 1005
21563 Sep 22 23:21:38.100 INFO [0] 826051a1-df7d-43b5-adbe-a9bb32b9d0ea (77674847-2bf1-45c2-9d96-dadd8139fd25) New New New ds_transition to WaitActive
21564 Sep 22 23:21:38.100 INFO [2] downstairs client at 127.0.0.1:65141 has UUID ac9e9e82-be8d-4c20-a78c-e47f39a6b49e
21565 Sep 22 23:21:38.100 DEBG [A] ack job 1005:6, : downstairs
21566 Sep 22 23:21:38.100 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ac9e9e82-be8d-4c20-a78c-e47f39a6b49e, encrypted: true, database_read_version: 1, database_write_version: 1 }
21567 Sep 22 23:21:38.100 INFO [0] Transition from New to WaitActive
21568 Sep 22 23:21:38.100 INFO 5b90282f-6bb7-4b5b-a1a9-64d086158b57 WaitActive WaitActive WaitActive
21569 Sep 22 23:21:38.100 INFO [1] 826051a1-df7d-43b5-adbe-a9bb32b9d0ea (77674847-2bf1-45c2-9d96-dadd8139fd25) WaitActive New New ds_transition to WaitActive
21570 Sep 22 23:21:38.100 INFO [1] Transition from New to WaitActive
21571 Sep 22 23:21:38.100 INFO Current flush_numbers [0..12]: [0, 0]
21572 Sep 22 23:21:38.100 INFO [2] 826051a1-df7d-43b5-adbe-a9bb32b9d0ea (77674847-2bf1-45c2-9d96-dadd8139fd25) WaitActive WaitActive New ds_transition to WaitActive
21573 Sep 22 23:21:38.100 INFO [2] Transition from New to WaitActive
21574 Sep 22 23:21:38.101 DEBG up_ds_listen checked 1 jobs, back to waiting
21575 Sep 22 23:21:38.101 INFO Downstairs has completed Negotiation, task: proc
21576 The guest has requested activation
21577 Sep 22 23:21:38.101 INFO 826051a1-df7d-43b5-adbe-a9bb32b9d0ea active request set
21578 Sep 22 23:21:38.101 INFO [0] received activate with gen 1
21579 Sep 22 23:21:38.101 DEBG IO Read 1005 has deps []
21580 Sep 22 23:21:38.101 INFO [0] client got ds_active_rx, promote! session 77674847-2bf1-45c2-9d96-dadd8139fd25
21581 Sep 22 23:21:38.101 INFO Current flush_numbers [0..12]: [0, 0]
21582 Sep 22 23:21:38.101 INFO [1] received activate with gen 1
21583 Sep 22 23:21:38.101 INFO [1] client got ds_active_rx, promote! session 77674847-2bf1-45c2-9d96-dadd8139fd25
21584 Sep 22 23:21:38.101 INFO [2] received activate with gen 1
21585 Sep 22 23:21:38.101 INFO [2] client got ds_active_rx, promote! session 77674847-2bf1-45c2-9d96-dadd8139fd25
21586 Sep 22 23:21:38.101 INFO Downstairs has completed Negotiation, task: proc
21587 Sep 22 23:21:38.101 INFO UpstairsConnection { upstairs_id: 826051a1-df7d-43b5-adbe-a9bb32b9d0ea, session_id: 77674847-2bf1-45c2-9d96-dadd8139fd25, gen: 1 } is now active (read-write)
21588 Sep 22 23:21:38.101 INFO current number of open files limit 65536 is already the maximum
21589 Sep 22 23:21:38.101 INFO Opened existing region file "/tmp/downstairs-GpQSlTJR/region.json"
21590 Sep 22 23:21:38.101 INFO Database read version 1
21591 Sep 22 23:21:38.101 INFO Database write version 1
21592 Sep 22 23:21:38.101 INFO UpstairsConnection { upstairs_id: 826051a1-df7d-43b5-adbe-a9bb32b9d0ea, session_id: 77674847-2bf1-45c2-9d96-dadd8139fd25, gen: 1 } is now active (read-write)
21593 Sep 22 23:21:38.101 INFO Current flush_numbers [0..12]: [0, 0]
21594 Sep 22 23:21:38.101 INFO UpstairsConnection { upstairs_id: 826051a1-df7d-43b5-adbe-a9bb32b9d0ea, session_id: 77674847-2bf1-45c2-9d96-dadd8139fd25, gen: 1 } is now active (read-write)
21595 Sep 22 23:21:38.101 INFO Downstairs has completed Negotiation, task: proc
21596 Sep 22 23:21:38.102 INFO [0] 5b90282f-6bb7-4b5b-a1a9-64d086158b57 (2d00b74f-dde3-4064-ba30-0de199ff9cba) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
21597 Sep 22 23:21:38.102 INFO [0] Transition from WaitActive to WaitQuorum
21598 Sep 22 23:21:38.102 WARN [0] new RM replaced this: None
21599 Sep 22 23:21:38.102 INFO [0] Starts reconcile loop
21600 Sep 22 23:21:38.102 INFO [1] 5b90282f-6bb7-4b5b-a1a9-64d086158b57 (2d00b74f-dde3-4064-ba30-0de199ff9cba) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
21601 Sep 22 23:21:38.102 INFO [1] Transition from WaitActive to WaitQuorum
21602 Sep 22 23:21:38.102 WARN [1] new RM replaced this: None
21603 Sep 22 23:21:38.102 INFO [0] downstairs client at 127.0.0.1:58203 has UUID 01c46ab7-4671-4b4c-8a3d-a1a31f152199
21604 Sep 22 23:21:38.102 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 01c46ab7-4671-4b4c-8a3d-a1a31f152199, encrypted: true, database_read_version: 1, database_write_version: 1 }
21605 Sep 22 23:21:38.102 INFO [1] Starts reconcile loop
21606 Sep 22 23:21:38.102 INFO 826051a1-df7d-43b5-adbe-a9bb32b9d0ea WaitActive WaitActive WaitActive
21607 Sep 22 23:21:38.102 INFO [2] 5b90282f-6bb7-4b5b-a1a9-64d086158b57 (2d00b74f-dde3-4064-ba30-0de199ff9cba) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
21608 Sep 22 23:21:38.102 DEBG Read :1005 deps:[] res:true
21609 Sep 22 23:21:38.102 INFO [1] downstairs client at 127.0.0.1:44218 has UUID 08ca7a26-86b3-4310-8bb9-1c7dab6c4622
21610 Sep 22 23:21:38.102 INFO [2] Transition from WaitActive to WaitQuorum
21611 Sep 22 23:21:38.102 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 08ca7a26-86b3-4310-8bb9-1c7dab6c4622, encrypted: true, database_read_version: 1, database_write_version: 1 }
21612 Sep 22 23:21:38.102 WARN [2] new RM replaced this: None
21613 Sep 22 23:21:38.102 INFO [2] Starts reconcile loop
21614 Sep 22 23:21:38.102 INFO 826051a1-df7d-43b5-adbe-a9bb32b9d0ea WaitActive WaitActive WaitActive
21615 Sep 22 23:21:38.102 INFO [2] downstairs client at 127.0.0.1:52819 has UUID 6f2fbc1f-a9a2-41d4-9856-3b247141fc3f
21616 Sep 22 23:21:38.102 INFO [0] 127.0.0.1:48328 task reports connection:true
21617 Sep 22 23:21:38.102 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 6f2fbc1f-a9a2-41d4-9856-3b247141fc3f, encrypted: true, database_read_version: 1, database_write_version: 1 }
21618 Sep 22 23:21:38.102 INFO 5b90282f-6bb7-4b5b-a1a9-64d086158b57 WaitQuorum WaitQuorum WaitQuorum
21619 Sep 22 23:21:38.102 INFO 826051a1-df7d-43b5-adbe-a9bb32b9d0ea WaitActive WaitActive WaitActive
21620 Sep 22 23:21:38.102 INFO [0]R flush_numbers: [0, 0]
21621 Sep 22 23:21:38.102 INFO [0]R generation: [0, 0]
21622 Sep 22 23:21:38.102 INFO [0]R dirty: [false, false]
21623 Sep 22 23:21:38.102 INFO [1]R flush_numbers: [0, 0]
21624 Sep 22 23:21:38.102 INFO [1]R generation: [0, 0]
21625 Sep 22 23:21:38.102 INFO [1]R dirty: [false, false]
21626 Sep 22 23:21:38.102 INFO [2]R flush_numbers: [0, 0]
21627 Sep 22 23:21:38.102 INFO [2]R generation: [0, 0]
21628 Sep 22 23:21:38.102 INFO [2]R dirty: [false, false]
21629 Sep 22 23:21:38.102 DEBG Read :1005 deps:[] res:true
21630 Sep 22 23:21:38.102 INFO Current flush_numbers [0..12]: [0, 0]
21631 Sep 22 23:21:38.102 INFO Max found gen is 1
21632 Sep 22 23:21:38.102 INFO Generation requested: 1 >= found:1
21633 Sep 22 23:21:38.102 INFO Next flush: 1
21634 Sep 22 23:21:38.102 INFO All extents match
21635 Sep 22 23:21:38.102 INFO No downstairs repair required
21636 Sep 22 23:21:38.102 INFO No initial repair work was required
21637 Sep 22 23:21:38.102 INFO Set Downstairs and Upstairs active
21638 Sep 22 23:21:38.102 INFO 5b90282f-6bb7-4b5b-a1a9-64d086158b57 is now active with session: 2d00b74f-dde3-4064-ba30-0de199ff9cba
21639 Sep 22 23:21:38.102 INFO 5b90282f-6bb7-4b5b-a1a9-64d086158b57 Set Active after no repair
21640 Sep 22 23:21:38.102 INFO Notify all downstairs, region set compare is done.
21641 Sep 22 23:21:38.102 INFO Downstairs has completed Negotiation, task: proc
21642 Sep 22 23:21:38.102 INFO Set check for repair
21643 Sep 22 23:21:38.102 INFO [1] 127.0.0.1:62933 task reports connection:true
21644 Sep 22 23:21:38.103 INFO 5b90282f-6bb7-4b5b-a1a9-64d086158b57 Active Active Active
21645 Sep 22 23:21:38.103 INFO Set check for repair
21646 Sep 22 23:21:38.103 DEBG Read :1005 deps:[] res:true
21647 Sep 22 23:21:38.103 INFO [2] 127.0.0.1:65141 task reports connection:true
21648 Sep 22 23:21:38.103 INFO Current flush_numbers [0..12]: [0, 0]
21649 Sep 22 23:21:38.103 INFO 5b90282f-6bb7-4b5b-a1a9-64d086158b57 Active Active Active
21650 Sep 22 23:21:38.103 INFO Set check for repair
21651 Sep 22 23:21:38.103 INFO [0] received reconcile message
21652 Sep 22 23:21:38.103 INFO Downstairs has completed Negotiation, task: proc
21653 Sep 22 23:21:38.103 INFO [0] All repairs completed, exit
21654 Sep 22 23:21:38.103 INFO [0] Starts cmd_loop
21655 Sep 22 23:21:38.103 INFO [1] received reconcile message
21656 Sep 22 23:21:38.103 INFO [1] All repairs completed, exit
21657 Sep 22 23:21:38.103 INFO [1] Starts cmd_loop
21658 Sep 22 23:21:38.103 INFO Current flush_numbers [0..12]: [0, 0]
21659 Sep 22 23:21:38.103 INFO [2] received reconcile message
21660 Sep 22 23:21:38.103 INFO [2] All repairs completed, exit
21661 Sep 22 23:21:38.103 INFO [2] Starts cmd_loop
21662 The guest has finished waiting for activation
21663 Sep 22 23:21:38.103 INFO Downstairs has completed Negotiation, task: proc
21664 Sep 22 23:21:38.103 INFO UUID: 1f4a8f3f-442f-484d-8ab0-408b22284518
21665 Sep 22 23:21:38.103 INFO Blocks per extent:5 Total Extents: 2
21666 Sep 22 23:21:38.103 INFO Crucible Version: Crucible Version: 0.0.1
21667 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21668 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21669 rustc: 1.70.0 stable x86_64-unknown-illumos
21670 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21671 Sep 22 23:21:38.103 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21672 Sep 22 23:21:38.103 INFO Using address: 127.0.0.1:49227, task: main
21673 Sep 22 23:21:38.103 INFO [0] 826051a1-df7d-43b5-adbe-a9bb32b9d0ea (77674847-2bf1-45c2-9d96-dadd8139fd25) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
21674 Sep 22 23:21:38.103 INFO [0] Transition from WaitActive to WaitQuorum
21675 Sep 22 23:21:38.103 WARN [0] new RM replaced this: None
21676 Sep 22 23:21:38.103 INFO [0] Starts reconcile loop
21677 Sep 22 23:21:38.104 INFO [1] 826051a1-df7d-43b5-adbe-a9bb32b9d0ea (77674847-2bf1-45c2-9d96-dadd8139fd25) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
21678 Sep 22 23:21:38.104 INFO [1] Transition from WaitActive to WaitQuorum
21679 Sep 22 23:21:38.104 INFO Repair listens on 127.0.0.1:0, task: repair
21680 Sep 22 23:21:38.104 WARN [1] new RM replaced this: None
21681 Sep 22 23:21:38.104 INFO [1] Starts reconcile loop
21682 Sep 22 23:21:38.104 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:61825, task: repair
21683 Sep 22 23:21:38.104 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:61825, task: repair
21684 Sep 22 23:21:38.104 INFO [2] 826051a1-df7d-43b5-adbe-a9bb32b9d0ea (77674847-2bf1-45c2-9d96-dadd8139fd25) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
21685 Sep 22 23:21:38.104 INFO listening, local_addr: 127.0.0.1:61825, task: repair
21686 Sep 22 23:21:38.104 INFO [2] Transition from WaitActive to WaitQuorum
21687 Sep 22 23:21:38.104 WARN [2] new RM replaced this: None
21688 Sep 22 23:21:38.104 INFO [2] Starts reconcile loop
21689 Sep 22 23:21:38.104 INFO [0] 127.0.0.1:58203 task reports connection:true
21690 Sep 22 23:21:38.104 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:61825, task: repair
21691 Sep 22 23:21:38.104 INFO 826051a1-df7d-43b5-adbe-a9bb32b9d0ea WaitQuorum WaitQuorum WaitQuorum
21692 Sep 22 23:21:38.104 INFO Using repair address: 127.0.0.1:61825, task: main
21693 Sep 22 23:21:38.104 INFO [0]R flush_numbers: [0, 0]
21694 Sep 22 23:21:38.104 INFO No SSL acceptor configured, task: main
21695 Sep 22 23:21:38.104 INFO [0]R generation: [0, 0]
21696 Sep 22 23:21:38.104 INFO [0]R dirty: [false, false]
21697 Sep 22 23:21:38.104 INFO [1]R flush_numbers: [0, 0]
21698 Sep 22 23:21:38.104 INFO [1]R generation: [0, 0]
21699 Sep 22 23:21:38.104 INFO [1]R dirty: [false, false]
21700 Sep 22 23:21:38.104 INFO [2]R flush_numbers: [0, 0]
21701 Sep 22 23:21:38.104 INFO [2]R generation: [0, 0]
21702 Sep 22 23:21:38.104 INFO current number of open files limit 65536 is already the maximum
21703 Sep 22 23:21:38.104 INFO [2]R dirty: [false, false]
21704 Sep 22 23:21:38.104 INFO Max found gen is 1
21705 Sep 22 23:21:38.104 INFO Generation requested: 1 >= found:1
21706 Sep 22 23:21:38.104 INFO Next flush: 1
21707 Sep 22 23:21:38.104 INFO All extents match
21708 Sep 22 23:21:38.104 INFO Created new region file "/tmp/downstairs-wG6GScam/region.json"
21709 Sep 22 23:21:38.104 INFO No downstairs repair required
21710 Sep 22 23:21:38.104 INFO No initial repair work was required
21711 Sep 22 23:21:38.104 INFO Set Downstairs and Upstairs active
21712 Sep 22 23:21:38.104 INFO 826051a1-df7d-43b5-adbe-a9bb32b9d0ea is now active with session: 77674847-2bf1-45c2-9d96-dadd8139fd25
21713 Sep 22 23:21:38.104 INFO 826051a1-df7d-43b5-adbe-a9bb32b9d0ea Set Active after no repair
21714 Sep 22 23:21:38.104 INFO Notify all downstairs, region set compare is done.
21715 Sep 22 23:21:38.104 INFO Set check for repair
21716 Sep 22 23:21:38.104 INFO [1] 127.0.0.1:44218 task reports connection:true
21717 Sep 22 23:21:38.104 INFO 826051a1-df7d-43b5-adbe-a9bb32b9d0ea Active Active Active
21718 Sep 22 23:21:38.104 INFO Set check for repair
21719 Sep 22 23:21:38.104 INFO [2] 127.0.0.1:52819 task reports connection:true
21720 Sep 22 23:21:38.104 INFO 826051a1-df7d-43b5-adbe-a9bb32b9d0ea Active Active Active
21721 Sep 22 23:21:38.104 INFO Set check for repair
21722 Sep 22 23:21:38.105 INFO [0] received reconcile message
21723 Sep 22 23:21:38.105 INFO [0] All repairs completed, exit
21724 Sep 22 23:21:38.105 INFO [0] Starts cmd_loop
21725 Sep 22 23:21:38.105 DEBG [1] Read AckReady 1005, : downstairs
21726 Sep 22 23:21:38.105 INFO [1] received reconcile message
21727 Sep 22 23:21:38.105 INFO [1] All repairs completed, exit
21728 Sep 22 23:21:38.105 INFO [1] Starts cmd_loop
21729 Sep 22 23:21:38.105 INFO [2] received reconcile message
21730 Sep 22 23:21:38.105 INFO [2] All repairs completed, exit
21731 Sep 22 23:21:38.105 INFO [2] Starts cmd_loop
21732 The guest has finished waiting for activation
21733 Sep 22 23:21:38.105 DEBG IO Write 1000 has deps []
21734 Sep 22 23:21:38.106 DEBG up_ds_listen was notified
21735 Sep 22 23:21:38.106 DEBG up_ds_listen process 1000
21736 Sep 22 23:21:38.106 DEBG [A] ack job 1000:1, : downstairs
21737 Sep 22 23:21:38.106 DEBG up_ds_listen checked 1 jobs, back to waiting
21738 Sep 22 23:21:38.106 DEBG [0] Read already AckReady 1005, : downstairs
21739 Sep 22 23:21:38.106 DEBG IO Write 1000 has deps []
21740 Sep 22 23:21:38.107 DEBG [2] Read already AckReady 1005, : downstairs
21741 Sep 22 23:21:38.107 DEBG Write :1000 deps:[] res:true
21742 Sep 22 23:21:38.107 DEBG up_ds_listen was notified
21743 Sep 22 23:21:38.107 DEBG up_ds_listen process 1005
21744 Sep 22 23:21:38.107 DEBG [A] ack job 1005:6, : downstairs
21745 Sep 22 23:21:38.107 INFO current number of open files limit 65536 is already the maximum
21746 Sep 22 23:21:38.107 INFO Opened existing region file "/tmp/downstairs-wG6GScam/region.json"
21747 Sep 22 23:21:38.107 DEBG up_ds_listen checked 1 jobs, back to waiting
21748 Sep 22 23:21:38.107 INFO Database read version 1
21749 Sep 22 23:21:38.107 INFO Database write version 1
21750 Sep 22 23:21:38.107 DEBG Write :1000 deps:[] res:true
21751 Sep 22 23:21:38.108 DEBG Write :1000 deps:[] res:true
21752 Sep 22 23:21:38.110 INFO UUID: b8e5d869-788a-45ac-ae38-e8f5cf16b888
21753 Sep 22 23:21:38.110 INFO Blocks per extent:5 Total Extents: 2
21754 Sep 22 23:21:38.110 INFO Crucible Version: Crucible Version: 0.0.1
21755 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21756 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21757 rustc: 1.70.0 stable x86_64-unknown-illumos
21758 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21759 Sep 22 23:21:38.110 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21760 Sep 22 23:21:38.110 INFO Using address: 127.0.0.1:44178, task: main
21761 Sep 22 23:21:38.110 INFO Repair listens on 127.0.0.1:0, task: repair
21762 Sep 22 23:21:38.110 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:48399, task: repair
21763 Sep 22 23:21:38.110 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:48399, task: repair
21764 Sep 22 23:21:38.110 DEBG IO Write 1001 has deps [JobId(1000)]
21765 Sep 22 23:21:38.110 INFO listening, local_addr: 127.0.0.1:48399, task: repair
21766 Sep 22 23:21:38.111 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:48399, task: repair
21767 Sep 22 23:21:38.111 INFO Using repair address: 127.0.0.1:48399, task: main
21768 Sep 22 23:21:38.111 INFO No SSL acceptor configured, task: main
21769 Sep 22 23:21:38.111 INFO current number of open files limit 65536 is already the maximum
21770 Sep 22 23:21:38.111 INFO Created new region file "/tmp/downstairs-VNELpEv2/region.json"
21771 Sep 22 23:21:38.112 DEBG up_ds_listen was notified
21772 Sep 22 23:21:38.112 DEBG up_ds_listen process 1000
21773 Sep 22 23:21:38.112 DEBG [A] ack job 1000:1, : downstairs
21774 Sep 22 23:21:38.112 DEBG up_ds_listen checked 1 jobs, back to waiting
21775 Sep 22 23:21:38.112 DEBG IO Read 1001 has deps [JobId(1000)]
21776 Sep 22 23:21:38.113 DEBG Read :1001 deps:[JobId(1000)] res:true
21777 Sep 22 23:21:38.113 DEBG Read :1001 deps:[JobId(1000)] res:true
21778 test test::integration_test_volume_subvols_parent_scrub_sparse_2 ... ok
21779 Sep 22 23:21:38.114 INFO current number of open files limit 65536 is already the maximum
21780 Sep 22 23:21:38.114 DEBG Read :1001 deps:[JobId(1000)] res:true
21781 Sep 22 23:21:38.114 INFO Created new region file "/tmp/downstairs-2f4SSbKQ/region.json"
21782 Sep 22 23:21:38.115 INFO current number of open files limit 65536 is already the maximum
21783 Sep 22 23:21:38.115 INFO Opened existing region file "/tmp/downstairs-VNELpEv2/region.json"
21784 Sep 22 23:21:38.115 INFO Database read version 1
21785 Sep 22 23:21:38.115 INFO Database write version 1
21786 Sep 22 23:21:38.116 INFO UUID: 8025f645-3aa5-482f-9a7b-5b38782d7449
21787 Sep 22 23:21:38.116 INFO Blocks per extent:5 Total Extents: 2
21788 Sep 22 23:21:38.116 INFO Crucible Version: Crucible Version: 0.0.1
21789 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21790 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21791 rustc: 1.70.0 stable x86_64-unknown-illumos
21792 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21793 Sep 22 23:21:38.116 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21794 Sep 22 23:21:38.116 INFO Using address: 127.0.0.1:53578, task: main
21795 Sep 22 23:21:38.116 DEBG up_ds_listen was notified
21796 Sep 22 23:21:38.116 DEBG up_ds_listen process 1001
21797 Sep 22 23:21:38.116 DEBG [A] ack job 1001:2, : downstairs
21798 Sep 22 23:21:38.116 INFO Repair listens on 127.0.0.1:0, task: repair
21799 Sep 22 23:21:38.116 DEBG up_ds_listen checked 1 jobs, back to waiting
21800 Sep 22 23:21:38.116 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:65488, task: repair
21801 Sep 22 23:21:38.116 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:65488, task: repair
21802 Sep 22 23:21:38.116 INFO listening, local_addr: 127.0.0.1:65488, task: repair
21803 Sep 22 23:21:38.117 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:65488, task: repair
21804 Sep 22 23:21:38.117 INFO Using repair address: 127.0.0.1:65488, task: main
21805 Sep 22 23:21:38.117 INFO No SSL acceptor configured, task: main
21806 Sep 22 23:21:38.117 DEBG IO Read 1002 has deps [JobId(1001), JobId(1000)]
21807 Sep 22 23:21:38.117 INFO current number of open files limit 65536 is already the maximum
21808 Sep 22 23:21:38.117 INFO Created new region file "/tmp/downstairs-H08UzdlI/region.json"
21809 Sep 22 23:21:38.117 INFO current number of open files limit 65536 is already the maximum
21810 Sep 22 23:21:38.117 INFO Opened existing region file "/tmp/downstairs-2f4SSbKQ/region.json"
21811 Sep 22 23:21:38.117 INFO Database read version 1
21812 Sep 22 23:21:38.117 INFO Database write version 1
21813 Sep 22 23:21:38.117 DEBG [0] Read AckReady 1001, : downstairs
21814 Sep 22 23:21:38.117 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
21815 Sep 22 23:21:38.118 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
21816 Sep 22 23:21:38.118 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
21817 Sep 22 23:21:38.119 DEBG [1] Read already AckReady 1001, : downstairs
21818 Sep 22 23:21:38.119 INFO UUID: 71f89d71-fca2-4667-8b12-c125e7772d19
21819 Sep 22 23:21:38.119 INFO Blocks per extent:5 Total Extents: 2
21820 Sep 22 23:21:38.119 INFO Crucible Version: Crucible Version: 0.0.1
21821 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21822 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21823 rustc: 1.70.0 stable x86_64-unknown-illumos
21824 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21825 Sep 22 23:21:38.119 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21826 Sep 22 23:21:38.119 INFO Using address: 127.0.0.1:51219, task: main
21827 Sep 22 23:21:38.119 INFO Repair listens on 127.0.0.1:0, task: repair
21828 Sep 22 23:21:38.119 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57315, task: repair
21829 Sep 22 23:21:38.119 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57315, task: repair
21830 Sep 22 23:21:38.119 INFO listening, local_addr: 127.0.0.1:57315, task: repair
21831 Sep 22 23:21:38.120 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57315, task: repair
21832 Sep 22 23:21:38.120 INFO Using repair address: 127.0.0.1:57315, task: main
21833 Sep 22 23:21:38.120 INFO No SSL acceptor configured, task: main
21834 Sep 22 23:21:38.120 INFO current number of open files limit 65536 is already the maximum
21835 Sep 22 23:21:38.120 INFO Created new region file "/tmp/downstairs-mVZN7GWg/region.json"
21836 Sep 22 23:21:38.121 DEBG [2] Read already AckReady 1001, : downstairs
21837 Sep 22 23:21:38.121 DEBG up_ds_listen was notified
21838 Sep 22 23:21:38.121 DEBG up_ds_listen process 1001
21839 Sep 22 23:21:38.121 DEBG [A] ack job 1001:2, : downstairs
21840 Sep 22 23:21:38.121 INFO current number of open files limit 65536 is already the maximum
21841 Sep 22 23:21:38.121 INFO Opened existing region file "/tmp/downstairs-H08UzdlI/region.json"
21842 Sep 22 23:21:38.121 INFO Database read version 1
21843 Sep 22 23:21:38.121 INFO Database write version 1
21844 Sep 22 23:21:38.121 DEBG up_ds_listen checked 1 jobs, back to waiting
21845 Sep 22 23:21:38.122 DEBG [0] Read AckReady 1002, : downstairs
21846 Sep 22 23:21:38.123 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
21847 Sep 22 23:21:38.124 INFO UUID: 481d7429-1224-42e4-a2f2-287bd0cfeaaf
21848 Sep 22 23:21:38.124 INFO Blocks per extent:5 Total Extents: 2
21849 Sep 22 23:21:38.124 INFO current number of open files limit 65536 is already the maximum
21850 Sep 22 23:21:38.124 INFO Opened existing region file "/tmp/downstairs-mVZN7GWg/region.json"
21851 Sep 22 23:21:38.124 INFO Crucible Version: Crucible Version: 0.0.1
21852 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21853 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21854 rustc: 1.70.0 stable x86_64-unknown-illumos
21855 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21856 Sep 22 23:21:38.124 INFO Database read version 1
21857 Sep 22 23:21:38.124 INFO Database write version 1
21858 Sep 22 23:21:38.124 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21859 Sep 22 23:21:38.124 INFO Using address: 127.0.0.1:58376, task: main
21860 Sep 22 23:21:38.124 INFO Repair listens on 127.0.0.1:0, task: repair
21861 Sep 22 23:21:38.124 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52690, task: repair
21862 Sep 22 23:21:38.124 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52690, task: repair
21863 Sep 22 23:21:38.124 INFO listening, local_addr: 127.0.0.1:52690, task: repair
21864 Sep 22 23:21:38.125 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52690, task: repair
21865 Sep 22 23:21:38.125 INFO Using repair address: 127.0.0.1:52690, task: main
21866 Sep 22 23:21:38.125 INFO No SSL acceptor configured, task: main
21867 Sep 22 23:21:38.125 DEBG [1] Read already AckReady 1002, : downstairs
21868 Sep 22 23:21:38.125 INFO current number of open files limit 65536 is already the maximum
21869 Sep 22 23:21:38.125 INFO Created new region file "/tmp/downstairs-L6iP4aDB/region.json"
21870 Sep 22 23:21:38.125 DEBG up_ds_listen was notified
21871 Sep 22 23:21:38.125 DEBG up_ds_listen process 1002
21872 Sep 22 23:21:38.125 DEBG [A] ack job 1002:3, : downstairs
21873 Sep 22 23:21:38.125 DEBG up_ds_listen checked 1 jobs, back to waiting
21874 Sep 22 23:21:38.126 DEBG IO Read 1003 has deps [JobId(1002), JobId(1000)]
21875 Sep 22 23:21:38.126 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
21876 Sep 22 23:21:38.126 INFO UUID: 7c9decb7-dee6-4a23-a2b7-7860a60db1d6
21877 Sep 22 23:21:38.126 INFO Blocks per extent:5 Total Extents: 2
21878 Sep 22 23:21:38.127 INFO Crucible Version: Crucible Version: 0.0.1
21879 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21880 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21881 rustc: 1.70.0 stable x86_64-unknown-illumos
21882 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21883 Sep 22 23:21:38.127 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21884 Sep 22 23:21:38.127 INFO Using address: 127.0.0.1:44324, task: main
21885 Sep 22 23:21:38.127 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
21886 Sep 22 23:21:38.127 INFO Repair listens on 127.0.0.1:0, task: repair
21887 Sep 22 23:21:38.127 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:39540, task: repair
21888 Sep 22 23:21:38.127 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:39540, task: repair
21889 Sep 22 23:21:38.127 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
21890 Sep 22 23:21:38.127 INFO listening, local_addr: 127.0.0.1:39540, task: repair
21891 Sep 22 23:21:38.127 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:39540, task: repair
21892 Sep 22 23:21:38.127 INFO Using repair address: 127.0.0.1:39540, task: main
21893 Sep 22 23:21:38.127 INFO No SSL acceptor configured, task: main
21894 Sep 22 23:21:38.127 DEBG [2] Read already AckReady 1002, : downstairs
21895 Sep 22 23:21:38.127 DEBG up_ds_listen was notified
21896 Sep 22 23:21:38.127 DEBG up_ds_listen process 1002
21897 Sep 22 23:21:38.127 DEBG [A] ack job 1002:3, : downstairs
21898 Sep 22 23:21:38.128 INFO current number of open files limit 65536 is already the maximum
21899 Sep 22 23:21:38.128 INFO Created new region file "/tmp/downstairs-a3HQe4PA/region.json"
21900 Sep 22 23:21:38.128 DEBG up_ds_listen checked 1 jobs, back to waiting
21901 Sep 22 23:21:38.130 DEBG [0] Read AckReady 1003, : downstairs
21902 Sep 22 23:21:38.130 INFO current number of open files limit 65536 is already the maximum
21903 Sep 22 23:21:38.130 INFO Opened existing region file "/tmp/downstairs-L6iP4aDB/region.json"
21904 Sep 22 23:21:38.130 INFO Database read version 1
21905 Sep 22 23:21:38.130 INFO Database write version 1
21906 Sep 22 23:21:38.132 DEBG [1] Read already AckReady 1003, : downstairs
21907 test test::integration_test_volume_write_unwritten_sparse ... ok
21908 Sep 22 23:21:38.132 INFO current number of open files limit 65536 is already the maximum
21909 Sep 22 23:21:38.132 INFO Opened existing region file "/tmp/downstairs-a3HQe4PA/region.json"
21910 Sep 22 23:21:38.132 INFO Database read version 1
21911 Sep 22 23:21:38.132 INFO UUID: f6750c55-7697-440f-81c9-8c2e90dcc4ce
21912 Sep 22 23:21:38.132 INFO Database write version 1
21913 Sep 22 23:21:38.132 INFO Blocks per extent:5 Total Extents: 2
21914 Sep 22 23:21:38.132 INFO Crucible Version: Crucible Version: 0.0.1
21915 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21916 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21917 rustc: 1.70.0 stable x86_64-unknown-illumos
21918 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21919 Sep 22 23:21:38.133 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21920 Sep 22 23:21:38.133 INFO current number of open files limit 65536 is already the maximum
21921 Sep 22 23:21:38.133 INFO Using address: 127.0.0.1:47267, task: main
21922 Sep 22 23:21:38.133 INFO Created new region file "/tmp/downstairs-cjWQt5F2/region.json"
21923 Sep 22 23:21:38.133 INFO Repair listens on 127.0.0.1:0, task: repair
21924 Sep 22 23:21:38.133 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:39855, task: repair
21925 Sep 22 23:21:38.133 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:39855, task: repair
21926 Sep 22 23:21:38.133 INFO listening, local_addr: 127.0.0.1:39855, task: repair
21927 Sep 22 23:21:38.133 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:39855, task: repair
21928 Sep 22 23:21:38.133 INFO Using repair address: 127.0.0.1:39855, task: main
21929 Sep 22 23:21:38.133 INFO No SSL acceptor configured, task: main
21930 Sep 22 23:21:38.133 DEBG [2] Read already AckReady 1003, : downstairs
21931 Sep 22 23:21:38.133 DEBG up_ds_listen was notified
21932 Sep 22 23:21:38.133 DEBG up_ds_listen process 1003
21933 Sep 22 23:21:38.133 DEBG [A] ack job 1003:4, : downstairs
21934 Sep 22 23:21:38.134 INFO current number of open files limit 65536 is already the maximum
21935 Sep 22 23:21:38.134 INFO Created new region file "/tmp/downstairs-qbyafYjG/region.json"
21936 Sep 22 23:21:38.134 DEBG up_ds_listen checked 1 jobs, back to waiting
21937 Sep 22 23:21:38.134 INFO UUID: 6e4f126c-20d7-47c5-9d94-936338c4e38d
21938 Sep 22 23:21:38.135 INFO Blocks per extent:5 Total Extents: 2
21939 Sep 22 23:21:38.135 INFO Crucible Version: Crucible Version: 0.0.1
21940 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21941 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21942 rustc: 1.70.0 stable x86_64-unknown-illumos
21943 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21944 Sep 22 23:21:38.135 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21945 Sep 22 23:21:38.135 INFO Using address: 127.0.0.1:52939, task: main
21946 Sep 22 23:21:38.135 INFO Repair listens on 127.0.0.1:0, task: repair
21947 Sep 22 23:21:38.135 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53370, task: repair
21948 Sep 22 23:21:38.135 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53370, task: repair
21949 Sep 22 23:21:38.135 INFO listening, local_addr: 127.0.0.1:53370, task: repair
21950 Sep 22 23:21:38.135 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53370, task: repair
21951 Sep 22 23:21:38.136 INFO Using repair address: 127.0.0.1:53370, task: main
21952 Sep 22 23:21:38.136 INFO No SSL acceptor configured, task: main
21953 Sep 22 23:21:38.136 INFO current number of open files limit 65536 is already the maximum
21954 Sep 22 23:21:38.136 INFO Created new region file "/tmp/downstairs-Rn8xeVoQ/region.json"
21955 Sep 22 23:21:38.137 INFO current number of open files limit 65536 is already the maximum
21956 Sep 22 23:21:38.137 INFO Opened existing region file "/tmp/downstairs-cjWQt5F2/region.json"
21957 Sep 22 23:21:38.137 INFO Database read version 1
21958 Sep 22 23:21:38.137 INFO Database write version 1
21959 test test::integration_test_volume_write_unwritten_2 ... ok
21960 Sep 22 23:21:38.139 INFO current number of open files limit 65536 is already the maximum
21961 Sep 22 23:21:38.139 INFO Created new region file "/tmp/downstairs-xyUXgpCG/region.json"
21962 Sep 22 23:21:38.139 INFO UUID: 6a3090e7-a0f2-4a80-947f-1db172a31a5f
21963 Sep 22 23:21:38.139 INFO Blocks per extent:5 Total Extents: 2
21964 Sep 22 23:21:38.139 INFO Crucible Version: Crucible Version: 0.0.1
21965 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21966 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21967 rustc: 1.70.0 stable x86_64-unknown-illumos
21968 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21969 Sep 22 23:21:38.139 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21970 Sep 22 23:21:38.139 INFO Using address: 127.0.0.1:62388, task: main
21971 Sep 22 23:21:38.140 INFO Repair listens on 127.0.0.1:0, task: repair
21972 Sep 22 23:21:38.140 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33943, task: repair
21973 Sep 22 23:21:38.140 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33943, task: repair
21974 Sep 22 23:21:38.140 INFO listening, local_addr: 127.0.0.1:33943, task: repair
21975 Sep 22 23:21:38.140 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33943, task: repair
21976 Sep 22 23:21:38.140 INFO Using repair address: 127.0.0.1:33943, task: main
21977 Sep 22 23:21:38.140 INFO No SSL acceptor configured, task: main
21978 Sep 22 23:21:38.140 INFO current number of open files limit 65536 is already the maximum
21979 Sep 22 23:21:38.140 INFO Opened existing region file "/tmp/downstairs-qbyafYjG/region.json"
21980 Sep 22 23:21:38.140 INFO Database read version 1
21981 Sep 22 23:21:38.140 INFO Database write version 1
21982 Sep 22 23:21:38.141 INFO current number of open files limit 65536 is already the maximum
21983 Sep 22 23:21:38.141 INFO Created new region file "/tmp/downstairs-NfMGr4cT/region.json"
21984 Sep 22 23:21:38.142 INFO current number of open files limit 65536 is already the maximum
21985 Sep 22 23:21:38.142 INFO Opened existing region file "/tmp/downstairs-Rn8xeVoQ/region.json"
21986 Sep 22 23:21:38.142 INFO Database read version 1
21987 Sep 22 23:21:38.142 INFO Database write version 1
21988 Sep 22 23:21:38.143 INFO current number of open files limit 65536 is already the maximum
21989 Sep 22 23:21:38.143 INFO Opened existing region file "/tmp/downstairs-xyUXgpCG/region.json"
21990 Sep 22 23:21:38.143 INFO Database read version 1
21991 Sep 22 23:21:38.143 INFO Database write version 1
21992 Sep 22 23:21:38.145 INFO UUID: 4bd08688-0a92-4b76-9dcc-9afb5f6f7cc8
21993 Sep 22 23:21:38.145 INFO Blocks per extent:5 Total Extents: 2
21994 Sep 22 23:21:38.145 INFO Crucible Version: Crucible Version: 0.0.1
21995 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21996 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21997 rustc: 1.70.0 stable x86_64-unknown-illumos
21998 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21999 Sep 22 23:21:38.145 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22000 Sep 22 23:21:38.145 INFO Using address: 127.0.0.1:54868, task: main
22001 Sep 22 23:21:38.146 INFO Repair listens on 127.0.0.1:0, task: repair
22002 Sep 22 23:21:38.146 INFO UUID: 0d1968cb-b30f-4a44-a680-9f3b52cfb45a
22003 Sep 22 23:21:38.146 INFO Blocks per extent:5 Total Extents: 2
22004 Sep 22 23:21:38.146 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:60848, task: repair
22005 Sep 22 23:21:38.146 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:60848, task: repair
22006 Sep 22 23:21:38.146 INFO Crucible Version: Crucible Version: 0.0.1
22007 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22008 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22009 rustc: 1.70.0 stable x86_64-unknown-illumos
22010 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22011 Sep 22 23:21:38.146 INFO listening, local_addr: 127.0.0.1:60848, task: repair
22012 Sep 22 23:21:38.146 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22013 Sep 22 23:21:38.146 INFO Using address: 127.0.0.1:48752, task: main
22014 Sep 22 23:21:38.146 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:60848, task: repair
22015 Sep 22 23:21:38.146 INFO UUID: 07ed70f5-1d11-4ea6-9c11-5ff16ec9fcb3
22016 Sep 22 23:21:38.146 INFO Blocks per extent:5 Total Extents: 2
22017 Sep 22 23:21:38.146 INFO Using repair address: 127.0.0.1:60848, task: main
22018 Sep 22 23:21:38.146 INFO No SSL acceptor configured, task: main
22019 Sep 22 23:21:38.146 INFO Crucible Version: Crucible Version: 0.0.1
22020 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22021 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22022 rustc: 1.70.0 stable x86_64-unknown-illumos
22023 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22024 Sep 22 23:21:38.146 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22025 Sep 22 23:21:38.146 INFO Using address: 127.0.0.1:39369, task: main
22026 Sep 22 23:21:38.146 INFO Repair listens on 127.0.0.1:0, task: repair
22027 Sep 22 23:21:38.146 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:56982, task: repair
22028 Sep 22 23:21:38.147 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:56982, task: repair
22029 Sep 22 23:21:38.147 INFO listening, local_addr: 127.0.0.1:56982, task: repair
22030 Sep 22 23:21:38.147 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:56982, task: repair
22031 Sep 22 23:21:38.147 INFO Repair listens on 127.0.0.1:0, task: repair
22032 Sep 22 23:21:38.147 INFO Using repair address: 127.0.0.1:56982, task: main
22033 Sep 22 23:21:38.147 INFO No SSL acceptor configured, task: main
22034 Sep 22 23:21:38.147 INFO current number of open files limit 65536 is already the maximum
22035 Sep 22 23:21:38.147 INFO Opened existing region file "/tmp/downstairs-NfMGr4cT/region.json"
22036 Sep 22 23:21:38.147 INFO Database read version 1
22037 Sep 22 23:21:38.147 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38332, task: repair
22038 Sep 22 23:21:38.147 INFO Database write version 1
22039 Sep 22 23:21:38.147 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38332, task: repair
22040 Sep 22 23:21:38.147 INFO listening, local_addr: 127.0.0.1:38332, task: repair
22041 Sep 22 23:21:38.147 INFO Upstairs starts
22042 Sep 22 23:21:38.147 INFO current number of open files limit 65536 is already the maximum
22043 Sep 22 23:21:38.147 INFO Crucible Version: BuildInfo {
22044 version: "0.0.1",
22045 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
22046 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
22047 git_branch: "main",
22048 rustc_semver: "1.70.0",
22049 rustc_channel: "stable",
22050 rustc_host_triple: "x86_64-unknown-illumos",
22051 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
22052 cargo_triple: "x86_64-unknown-illumos",
22053 debug: true,
22054 opt_level: 0,
22055 }
22056 Sep 22 23:21:38.147 INFO Upstairs <-> Downstairs Message Version: 4
22057 Sep 22 23:21:38.147 INFO Crucible stats registered with UUID: 5e3310b1-e4cc-48c6-92c2-40a58c307dae
22058 Sep 22 23:21:38.147 INFO Created new region file "/tmp/downstairs-dJ35WBCH/region.json"
22059 Sep 22 23:21:38.147 INFO Crucible 5e3310b1-e4cc-48c6-92c2-40a58c307dae has session id: d0d5375e-5500-463d-ad8f-46e4f7ad8e6d
22060 Sep 22 23:21:38.147 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38332, task: repair
22061 Sep 22 23:21:38.147 INFO Using repair address: 127.0.0.1:38332, task: main
22062 Sep 22 23:21:38.147 INFO No SSL acceptor configured, task: main
22063 Sep 22 23:21:38.147 INFO listening on 127.0.0.1:0, task: main
22064 Sep 22 23:21:38.147 INFO listening on 127.0.0.1:0, task: main
22065 Sep 22 23:21:38.147 INFO listening on 127.0.0.1:0, task: main
22066 Sep 22 23:21:38.147 INFO listening on 127.0.0.1:0, task: main
22067 Sep 22 23:21:38.147 INFO listening on 127.0.0.1:0, task: main
22068 Sep 22 23:21:38.147 INFO listening on 127.0.0.1:0, task: main
22069 Sep 22 23:21:38.148 INFO [0] connecting to 127.0.0.1:49227, looper: 0
22070 Sep 22 23:21:38.148 INFO [1] connecting to 127.0.0.1:44178, looper: 1
22071 Sep 22 23:21:38.148 INFO current number of open files limit 65536 is already the maximum
22072 Sep 22 23:21:38.148 INFO [2] connecting to 127.0.0.1:53578, looper: 2
22073 Sep 22 23:21:38.148 INFO Created new region file "/tmp/downstairs-W0JvBvQ4/region.json"
22074 Sep 22 23:21:38.148 INFO up_listen starts, task: up_listen
22075 Sep 22 23:21:38.148 INFO Wait for all three downstairs to come online
22076 Sep 22 23:21:38.148 INFO Flush timeout: 0.5
22077 Sep 22 23:21:38.148 INFO accepted connection from 127.0.0.1:52966, task: main
22078 Sep 22 23:21:38.148 INFO accepted connection from 127.0.0.1:45823, task: main
22079 Sep 22 23:21:38.148 INFO accepted connection from 127.0.0.1:33967, task: main
22080 Sep 22 23:21:38.148 INFO [0] 5e3310b1-e4cc-48c6-92c2-40a58c307dae looper connected, looper: 0
22081 Sep 22 23:21:38.148 INFO [0] Proc runs for 127.0.0.1:49227 in state New
22082 Sep 22 23:21:38.148 INFO [1] 5e3310b1-e4cc-48c6-92c2-40a58c307dae looper connected, looper: 1
22083 Sep 22 23:21:38.149 INFO [1] Proc runs for 127.0.0.1:44178 in state New
22084 Sep 22 23:21:38.149 INFO [2] 5e3310b1-e4cc-48c6-92c2-40a58c307dae looper connected, looper: 2
22085 Sep 22 23:21:38.149 INFO [2] Proc runs for 127.0.0.1:53578 in state New
22086 Sep 22 23:21:38.149 INFO Upstairs starts
22087 Sep 22 23:21:38.149 INFO Crucible Version: BuildInfo {
22088 version: "0.0.1",
22089 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
22090 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
22091 git_branch: "main",
22092 rustc_semver: "1.70.0",
22093 rustc_channel: "stable",
22094 rustc_host_triple: "x86_64-unknown-illumos",
22095 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
22096 cargo_triple: "x86_64-unknown-illumos",
22097 debug: true,
22098 opt_level: 0,
22099 }
22100 Sep 22 23:21:38.149 INFO Upstairs <-> Downstairs Message Version: 4
22101 Sep 22 23:21:38.149 INFO Crucible stats registered with UUID: e89b9f37-20f1-48ba-8434-b618ba5c23aa
22102 Sep 22 23:21:38.149 INFO Crucible e89b9f37-20f1-48ba-8434-b618ba5c23aa has session id: f7efcb29-a668-43c1-b069-9cfda4629be8
22103 Sep 22 23:21:38.149 INFO UUID: ab814286-bbb3-4f61-a468-0a3953430633
22104 Sep 22 23:21:38.149 INFO Blocks per extent:5 Total Extents: 2
22105 Sep 22 23:21:38.149 INFO Crucible Version: Crucible Version: 0.0.1
22106 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22107 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22108 rustc: 1.70.0 stable x86_64-unknown-illumos
22109 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22110 Sep 22 23:21:38.149 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22111 Sep 22 23:21:38.149 INFO Using address: 127.0.0.1:43611, task: main
22112 Sep 22 23:21:38.149 INFO Connection request from 5e3310b1-e4cc-48c6-92c2-40a58c307dae with version 4, task: proc
22113 Sep 22 23:21:38.149 INFO upstairs UpstairsConnection { upstairs_id: 5e3310b1-e4cc-48c6-92c2-40a58c307dae, session_id: 2b11eab5-bc61-44bd-8216-a5695656c94c, gen: 1 } connected, version 4, task: proc
22114 Sep 22 23:21:38.149 INFO Connection request from 5e3310b1-e4cc-48c6-92c2-40a58c307dae with version 4, task: proc
22115 Sep 22 23:21:38.149 INFO upstairs UpstairsConnection { upstairs_id: 5e3310b1-e4cc-48c6-92c2-40a58c307dae, session_id: 2b11eab5-bc61-44bd-8216-a5695656c94c, gen: 1 } connected, version 4, task: proc
22116 Sep 22 23:21:38.150 INFO Connection request from 5e3310b1-e4cc-48c6-92c2-40a58c307dae with version 4, task: proc
22117 Sep 22 23:21:38.150 INFO Repair listens on 127.0.0.1:0, task: repair
22118 Sep 22 23:21:38.150 INFO upstairs UpstairsConnection { upstairs_id: 5e3310b1-e4cc-48c6-92c2-40a58c307dae, session_id: 2b11eab5-bc61-44bd-8216-a5695656c94c, gen: 1 } connected, version 4, task: proc
22119 Sep 22 23:21:38.150 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:60439, task: repair
22120 Sep 22 23:21:38.150 INFO [0] connecting to 127.0.0.1:58376, looper: 0
22121 Sep 22 23:21:38.150 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:60439, task: repair
22122 Sep 22 23:21:38.150 INFO listening, local_addr: 127.0.0.1:60439, task: repair
22123 Sep 22 23:21:38.150 INFO [1] connecting to 127.0.0.1:47267, looper: 1
22124 Sep 22 23:21:38.150 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:60439, task: repair
22125 Sep 22 23:21:38.150 INFO Using repair address: 127.0.0.1:60439, task: main
22126 Sep 22 23:21:38.150 INFO [2] connecting to 127.0.0.1:54868, looper: 2
22127 Sep 22 23:21:38.150 INFO No SSL acceptor configured, task: main
22128 Sep 22 23:21:38.150 INFO up_listen starts, task: up_listen
22129 Sep 22 23:21:38.150 INFO Wait for all three downstairs to come online
22130 Sep 22 23:21:38.150 INFO Flush timeout: 0.5
22131 Sep 22 23:21:38.150 INFO accepted connection from 127.0.0.1:32910, task: main
22132 Sep 22 23:21:38.150 INFO accepted connection from 127.0.0.1:42257, task: main
22133 Sep 22 23:21:38.150 INFO current number of open files limit 65536 is already the maximum
22134 Sep 22 23:21:38.150 INFO [0] 5e3310b1-e4cc-48c6-92c2-40a58c307dae (2b11eab5-bc61-44bd-8216-a5695656c94c) New New New ds_transition to WaitActive
22135 Sep 22 23:21:38.151 INFO [0] Transition from New to WaitActive
22136 Sep 22 23:21:38.151 INFO Created new region file "/tmp/downstairs-IDeF0yuc/region.json"
22137 Sep 22 23:21:38.151 INFO accepted connection from 127.0.0.1:42120, task: main
22138 Sep 22 23:21:38.151 INFO [1] 5e3310b1-e4cc-48c6-92c2-40a58c307dae (2b11eab5-bc61-44bd-8216-a5695656c94c) WaitActive New New ds_transition to WaitActive
22139 Sep 22 23:21:38.151 INFO [1] Transition from New to WaitActive
22140 Sep 22 23:21:38.151 INFO [2] 5e3310b1-e4cc-48c6-92c2-40a58c307dae (2b11eab5-bc61-44bd-8216-a5695656c94c) WaitActive WaitActive New ds_transition to WaitActive
22141 Sep 22 23:21:38.151 INFO [2] Transition from New to WaitActive
22142 Sep 22 23:21:38.151 INFO [0] e89b9f37-20f1-48ba-8434-b618ba5c23aa looper connected, looper: 0
22143 Sep 22 23:21:38.151 INFO [0] Proc runs for 127.0.0.1:58376 in state New
22144 Sep 22 23:21:38.151 INFO [1] e89b9f37-20f1-48ba-8434-b618ba5c23aa looper connected, looper: 1
22145 Sep 22 23:21:38.151 INFO [1] Proc runs for 127.0.0.1:47267 in state New
22146 Sep 22 23:21:38.151 INFO [2] e89b9f37-20f1-48ba-8434-b618ba5c23aa looper connected, looper: 2
22147 Sep 22 23:21:38.151 INFO [2] Proc runs for 127.0.0.1:54868 in state New
22148 Sep 22 23:21:38.152 INFO Connection request from e89b9f37-20f1-48ba-8434-b618ba5c23aa with version 4, task: proc
22149 Sep 22 23:21:38.152 INFO upstairs UpstairsConnection { upstairs_id: e89b9f37-20f1-48ba-8434-b618ba5c23aa, session_id: 902609d5-e2d5-4264-985b-fc9832ca6874, gen: 1 } connected, version 4, task: proc
22150 Sep 22 23:21:38.152 INFO Connection request from e89b9f37-20f1-48ba-8434-b618ba5c23aa with version 4, task: proc
22151 Sep 22 23:21:38.152 INFO upstairs UpstairsConnection { upstairs_id: e89b9f37-20f1-48ba-8434-b618ba5c23aa, session_id: 902609d5-e2d5-4264-985b-fc9832ca6874, gen: 1 } connected, version 4, task: proc
22152 Sep 22 23:21:38.152 INFO Connection request from e89b9f37-20f1-48ba-8434-b618ba5c23aa with version 4, task: proc
22153 Sep 22 23:21:38.152 INFO upstairs UpstairsConnection { upstairs_id: e89b9f37-20f1-48ba-8434-b618ba5c23aa, session_id: 902609d5-e2d5-4264-985b-fc9832ca6874, gen: 1 } connected, version 4, task: proc
22154 Sep 22 23:21:38.152 INFO [0] e89b9f37-20f1-48ba-8434-b618ba5c23aa (902609d5-e2d5-4264-985b-fc9832ca6874) New New New ds_transition to WaitActive
22155 Sep 22 23:21:38.152 INFO [0] Transition from New to WaitActive
22156 Sep 22 23:21:38.152 INFO [1] e89b9f37-20f1-48ba-8434-b618ba5c23aa (902609d5-e2d5-4264-985b-fc9832ca6874) WaitActive New New ds_transition to WaitActive
22157 Sep 22 23:21:38.152 INFO [1] Transition from New to WaitActive
22158 Sep 22 23:21:38.152 INFO [2] e89b9f37-20f1-48ba-8434-b618ba5c23aa (902609d5-e2d5-4264-985b-fc9832ca6874) WaitActive WaitActive New ds_transition to WaitActive
22159 Sep 22 23:21:38.152 INFO [2] Transition from New to WaitActive
22160 Sep 22 23:21:38.152 INFO current number of open files limit 65536 is already the maximum
22161 The guest has requested activation
22162 Sep 22 23:21:38.153 INFO Opened existing region file "/tmp/downstairs-W0JvBvQ4/region.json"
22163 Sep 22 23:21:38.153 INFO Database read version 1
22164 Sep 22 23:21:38.153 INFO Database write version 1
22165 Sep 22 23:21:38.153 INFO 5e3310b1-e4cc-48c6-92c2-40a58c307dae active request set
22166 Sep 22 23:21:38.153 INFO [0] received activate with gen 1
22167 Sep 22 23:21:38.153 INFO [0] client got ds_active_rx, promote! session 2b11eab5-bc61-44bd-8216-a5695656c94c
22168 Sep 22 23:21:38.153 INFO [1] received activate with gen 1
22169 Sep 22 23:21:38.153 INFO [1] client got ds_active_rx, promote! session 2b11eab5-bc61-44bd-8216-a5695656c94c
22170 Sep 22 23:21:38.153 INFO [2] received activate with gen 1
22171 Sep 22 23:21:38.153 INFO [2] client got ds_active_rx, promote! session 2b11eab5-bc61-44bd-8216-a5695656c94c
22172 Sep 22 23:21:38.153 INFO UpstairsConnection { upstairs_id: 5e3310b1-e4cc-48c6-92c2-40a58c307dae, session_id: 2b11eab5-bc61-44bd-8216-a5695656c94c, gen: 1 } is now active (read-write)
22173 Sep 22 23:21:38.153 INFO UpstairsConnection { upstairs_id: 5e3310b1-e4cc-48c6-92c2-40a58c307dae, session_id: 2b11eab5-bc61-44bd-8216-a5695656c94c, gen: 1 } is now active (read-write)
22174 Sep 22 23:21:38.153 INFO UpstairsConnection { upstairs_id: 5e3310b1-e4cc-48c6-92c2-40a58c307dae, session_id: 2b11eab5-bc61-44bd-8216-a5695656c94c, gen: 1 } is now active (read-write)
22175 Sep 22 23:21:38.153 INFO current number of open files limit 65536 is already the maximum
22176 Sep 22 23:21:38.153 INFO Opened existing region file "/tmp/downstairs-dJ35WBCH/region.json"
22177 Sep 22 23:21:38.153 INFO Database read version 1
22178 Sep 22 23:21:38.153 INFO Database write version 1
22179 Sep 22 23:21:38.154 INFO [0] downstairs client at 127.0.0.1:49227 has UUID 1f4a8f3f-442f-484d-8ab0-408b22284518
22180 Sep 22 23:21:38.154 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 1f4a8f3f-442f-484d-8ab0-408b22284518, encrypted: true, database_read_version: 1, database_write_version: 1 }
22181 Sep 22 23:21:38.154 INFO 5e3310b1-e4cc-48c6-92c2-40a58c307dae WaitActive WaitActive WaitActive
22182 Sep 22 23:21:38.154 INFO [1] downstairs client at 127.0.0.1:44178 has UUID b8e5d869-788a-45ac-ae38-e8f5cf16b888
22183 Sep 22 23:21:38.154 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: b8e5d869-788a-45ac-ae38-e8f5cf16b888, encrypted: true, database_read_version: 1, database_write_version: 1 }
22184 Sep 22 23:21:38.154 INFO 5e3310b1-e4cc-48c6-92c2-40a58c307dae WaitActive WaitActive WaitActive
22185 Sep 22 23:21:38.154 INFO [2] downstairs client at 127.0.0.1:53578 has UUID 8025f645-3aa5-482f-9a7b-5b38782d7449
22186 Sep 22 23:21:38.154 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 8025f645-3aa5-482f-9a7b-5b38782d7449, encrypted: true, database_read_version: 1, database_write_version: 1 }
22187 Sep 22 23:21:38.154 INFO 5e3310b1-e4cc-48c6-92c2-40a58c307dae WaitActive WaitActive WaitActive
22188 Sep 22 23:21:38.154 INFO Current flush_numbers [0..12]: [0, 0]
22189 Sep 22 23:21:38.155 INFO Downstairs has completed Negotiation, task: proc
22190 Sep 22 23:21:38.155 INFO Current flush_numbers [0..12]: [0, 0]
22191 Sep 22 23:21:38.155 INFO Downstairs has completed Negotiation, task: proc
22192 Sep 22 23:21:38.155 INFO Current flush_numbers [0..12]: [0, 0]
22193 Sep 22 23:21:38.155 INFO UUID: fbc1ff7a-5e2f-4a0a-a554-498b43e005a3
22194 Sep 22 23:21:38.155 INFO Blocks per extent:5 Total Extents: 2
22195 Sep 22 23:21:38.155 INFO Crucible Version: Crucible Version: 0.0.1
22196 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22197 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22198 rustc: 1.70.0 stable x86_64-unknown-illumos
22199 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22200 Sep 22 23:21:38.155 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22201 Sep 22 23:21:38.155 INFO Using address: 127.0.0.1:55268, task: main
22202 Sep 22 23:21:38.155 INFO Downstairs has completed Negotiation, task: proc
22203 Sep 22 23:21:38.155 INFO [0] 5e3310b1-e4cc-48c6-92c2-40a58c307dae (2b11eab5-bc61-44bd-8216-a5695656c94c) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
22204 Sep 22 23:21:38.155 INFO [0] Transition from WaitActive to WaitQuorum
22205 Sep 22 23:21:38.155 WARN [0] new RM replaced this: None
22206 Sep 22 23:21:38.155 INFO Repair listens on 127.0.0.1:0, task: repair
22207 Sep 22 23:21:38.155 INFO [0] Starts reconcile loop
22208 Sep 22 23:21:38.156 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50712, task: repair
22209 Sep 22 23:21:38.156 INFO [1] 5e3310b1-e4cc-48c6-92c2-40a58c307dae (2b11eab5-bc61-44bd-8216-a5695656c94c) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
22210 Sep 22 23:21:38.156 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50712, task: repair
22211 Sep 22 23:21:38.156 INFO [1] Transition from WaitActive to WaitQuorum
22212 Sep 22 23:21:38.156 WARN [1] new RM replaced this: None
22213 Sep 22 23:21:38.156 INFO [1] Starts reconcile loop
22214 Sep 22 23:21:38.156 INFO listening, local_addr: 127.0.0.1:50712, task: repair
22215 Sep 22 23:21:38.156 INFO [2] 5e3310b1-e4cc-48c6-92c2-40a58c307dae (2b11eab5-bc61-44bd-8216-a5695656c94c) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
22216 Sep 22 23:21:38.156 INFO [2] Transition from WaitActive to WaitQuorum
22217 Sep 22 23:21:38.156 WARN [2] new RM replaced this: None
22218 Sep 22 23:21:38.156 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50712, task: repair
22219 Sep 22 23:21:38.156 INFO [2] Starts reconcile loop
22220 Sep 22 23:21:38.156 INFO Using repair address: 127.0.0.1:50712, task: main
22221 Sep 22 23:21:38.156 INFO No SSL acceptor configured, task: main
22222 Sep 22 23:21:38.156 INFO [0] 127.0.0.1:49227 task reports connection:true
22223 Sep 22 23:21:38.156 INFO 5e3310b1-e4cc-48c6-92c2-40a58c307dae WaitQuorum WaitQuorum WaitQuorum
22224 Sep 22 23:21:38.156 INFO [0]R flush_numbers: [0, 0]
22225 Sep 22 23:21:38.156 INFO [0]R generation: [0, 0]
22226 Sep 22 23:21:38.156 INFO [0]R dirty: [false, false]
22227 Sep 22 23:21:38.156 INFO [1]R flush_numbers: [0, 0]
22228 Sep 22 23:21:38.156 INFO [1]R generation: [0, 0]
22229 Sep 22 23:21:38.156 INFO [1]R dirty: [false, false]
22230 Sep 22 23:21:38.156 INFO [2]R flush_numbers: [0, 0]
22231 Sep 22 23:21:38.156 INFO [2]R generation: [0, 0]
22232 Sep 22 23:21:38.156 INFO [2]R dirty: [false, false]
22233 Sep 22 23:21:38.156 INFO Max found gen is 1
22234 Sep 22 23:21:38.156 INFO Generation requested: 1 >= found:1
22235 Sep 22 23:21:38.156 INFO Next flush: 1
22236 Sep 22 23:21:38.156 INFO All extents match
22237 Sep 22 23:21:38.156 INFO No downstairs repair required
22238 Sep 22 23:21:38.156 INFO No initial repair work was required
22239 Sep 22 23:21:38.156 INFO Set Downstairs and Upstairs active
22240 Sep 22 23:21:38.156 INFO 5e3310b1-e4cc-48c6-92c2-40a58c307dae is now active with session: 2b11eab5-bc61-44bd-8216-a5695656c94c
22241 Sep 22 23:21:38.156 INFO 5e3310b1-e4cc-48c6-92c2-40a58c307dae Set Active after no repair
22242 Sep 22 23:21:38.156 INFO current number of open files limit 65536 is already the maximum
22243 Sep 22 23:21:38.156 INFO Notify all downstairs, region set compare is done.
22244 Sep 22 23:21:38.156 INFO Set check for repair
22245 Sep 22 23:21:38.156 INFO [1] 127.0.0.1:44178 task reports connection:true
22246 Sep 22 23:21:38.156 INFO Created new region file "/tmp/downstairs-aIhbz7uq/region.json"
22247 Sep 22 23:21:38.156 INFO current number of open files limit 65536 is already the maximum
22248 Sep 22 23:21:38.156 INFO 5e3310b1-e4cc-48c6-92c2-40a58c307dae Active Active Active
22249 Sep 22 23:21:38.156 INFO Opened existing region file "/tmp/downstairs-IDeF0yuc/region.json"
22250 Sep 22 23:21:38.156 INFO Set check for repair
22251 Sep 22 23:21:38.156 INFO Database read version 1
22252 Sep 22 23:21:38.156 INFO Database write version 1
22253 Sep 22 23:21:38.156 INFO [2] 127.0.0.1:53578 task reports connection:true
22254 Sep 22 23:21:38.156 INFO 5e3310b1-e4cc-48c6-92c2-40a58c307dae Active Active Active
22255 Sep 22 23:21:38.156 INFO Set check for repair
22256 Sep 22 23:21:38.156 INFO [0] received reconcile message
22257 Sep 22 23:21:38.157 INFO [0] All repairs completed, exit
22258 Sep 22 23:21:38.157 INFO [0] Starts cmd_loop
22259 Sep 22 23:21:38.157 INFO [1] received reconcile message
22260 Sep 22 23:21:38.157 INFO [1] All repairs completed, exit
22261 Sep 22 23:21:38.157 INFO UUID: 9471030d-cea7-4287-81a6-b07789df3e4d
22262 Sep 22 23:21:38.157 INFO Blocks per extent:5 Total Extents: 2
22263 Sep 22 23:21:38.157 INFO [1] Starts cmd_loop
22264 Sep 22 23:21:38.157 INFO Crucible Version: Crucible Version: 0.0.1
22265 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22266 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22267 rustc: 1.70.0 stable x86_64-unknown-illumos
22268 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22269 Sep 22 23:21:38.157 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22270 Sep 22 23:21:38.157 INFO [2] received reconcile message
22271 Sep 22 23:21:38.157 INFO Using address: 127.0.0.1:52312, task: main
22272 Sep 22 23:21:38.157 INFO [2] All repairs completed, exit
22273 Sep 22 23:21:38.157 INFO [2] Starts cmd_loop
22274 The guest has finished waiting for activation
22275 Sep 22 23:21:38.157 INFO Repair listens on 127.0.0.1:0, task: repair
22276 Sep 22 23:21:38.157 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:39188, task: repair
22277 Sep 22 23:21:38.157 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:39188, task: repair
22278 Sep 22 23:21:38.157 INFO listening, local_addr: 127.0.0.1:39188, task: repair
22279 The guest has requested activation
22280 Sep 22 23:21:38.157 INFO e89b9f37-20f1-48ba-8434-b618ba5c23aa active request set
22281 Sep 22 23:21:38.157 INFO [0] received activate with gen 1
22282 Sep 22 23:21:38.157 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:39188, task: repair
22283 Sep 22 23:21:38.157 INFO [0] client got ds_active_rx, promote! session 902609d5-e2d5-4264-985b-fc9832ca6874
22284 Sep 22 23:21:38.157 INFO Using repair address: 127.0.0.1:39188, task: main
22285 Sep 22 23:21:38.157 INFO No SSL acceptor configured, task: main
22286 Sep 22 23:21:38.157 INFO [1] received activate with gen 1
22287 Sep 22 23:21:38.157 INFO [1] client got ds_active_rx, promote! session 902609d5-e2d5-4264-985b-fc9832ca6874
22288 Sep 22 23:21:38.157 INFO [2] received activate with gen 1
22289 Sep 22 23:21:38.158 INFO [2] client got ds_active_rx, promote! session 902609d5-e2d5-4264-985b-fc9832ca6874
22290 Sep 22 23:21:38.158 INFO UpstairsConnection { upstairs_id: e89b9f37-20f1-48ba-8434-b618ba5c23aa, session_id: 902609d5-e2d5-4264-985b-fc9832ca6874, gen: 1 } is now active (read-write)
22291 Sep 22 23:21:38.158 INFO UpstairsConnection { upstairs_id: e89b9f37-20f1-48ba-8434-b618ba5c23aa, session_id: 902609d5-e2d5-4264-985b-fc9832ca6874, gen: 1 } is now active (read-write)
22292 Sep 22 23:21:38.158 INFO UpstairsConnection { upstairs_id: e89b9f37-20f1-48ba-8434-b618ba5c23aa, session_id: 902609d5-e2d5-4264-985b-fc9832ca6874, gen: 1 } is now active (read-write)
22293 Sep 22 23:21:38.158 INFO current number of open files limit 65536 is already the maximum
22294 Sep 22 23:21:38.158 INFO Created new region file "/tmp/downstairs-z98mSOht/region.json"
22295 Sep 22 23:21:38.158 INFO [0] downstairs client at 127.0.0.1:58376 has UUID 481d7429-1224-42e4-a2f2-287bd0cfeaaf
22296 Sep 22 23:21:38.158 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 481d7429-1224-42e4-a2f2-287bd0cfeaaf, encrypted: true, database_read_version: 1, database_write_version: 1 }
22297 Sep 22 23:21:38.158 INFO e89b9f37-20f1-48ba-8434-b618ba5c23aa WaitActive WaitActive WaitActive
22298 Sep 22 23:21:38.158 INFO [1] downstairs client at 127.0.0.1:47267 has UUID f6750c55-7697-440f-81c9-8c2e90dcc4ce
22299 Sep 22 23:21:38.158 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f6750c55-7697-440f-81c9-8c2e90dcc4ce, encrypted: true, database_read_version: 1, database_write_version: 1 }
22300 Sep 22 23:21:38.159 INFO e89b9f37-20f1-48ba-8434-b618ba5c23aa WaitActive WaitActive WaitActive
22301 Sep 22 23:21:38.159 INFO [2] downstairs client at 127.0.0.1:54868 has UUID 4bd08688-0a92-4b76-9dcc-9afb5f6f7cc8
22302 Sep 22 23:21:38.159 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 4bd08688-0a92-4b76-9dcc-9afb5f6f7cc8, encrypted: true, database_read_version: 1, database_write_version: 1 }
22303 Sep 22 23:21:38.159 INFO e89b9f37-20f1-48ba-8434-b618ba5c23aa WaitActive WaitActive WaitActive
22304 Sep 22 23:21:38.159 INFO Current flush_numbers [0..12]: [0, 0]
22305 Sep 22 23:21:38.159 INFO UUID: c9ee3b95-925e-4745-a37e-7d93b0c64736
22306 Sep 22 23:21:38.159 INFO Blocks per extent:5 Total Extents: 2
22307 Sep 22 23:21:38.159 INFO Crucible Version: Crucible Version: 0.0.1
22308 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22309 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22310 rustc: 1.70.0 stable x86_64-unknown-illumos
22311 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22312 Sep 22 23:21:38.159 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22313 Sep 22 23:21:38.159 INFO Using address: 127.0.0.1:46844, task: main
22314 Sep 22 23:21:38.159 INFO Downstairs has completed Negotiation, task: proc
22315 Sep 22 23:21:38.159 INFO Current flush_numbers [0..12]: [0, 0]
22316 Sep 22 23:21:38.160 INFO Repair listens on 127.0.0.1:0, task: repair
22317 Sep 22 23:21:38.160 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:44127, task: repair
22318 Sep 22 23:21:38.160 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:44127, task: repair
22319 Sep 22 23:21:38.160 INFO listening, local_addr: 127.0.0.1:44127, task: repair
22320 Sep 22 23:21:38.160 INFO Downstairs has completed Negotiation, task: proc
22321 Sep 22 23:21:38.160 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:44127, task: repair
22322 Sep 22 23:21:38.160 INFO Using repair address: 127.0.0.1:44127, task: main
22323 Sep 22 23:21:38.160 INFO Current flush_numbers [0..12]: [0, 0]
22324 Sep 22 23:21:38.160 INFO No SSL acceptor configured, task: main
22325 Sep 22 23:21:38.160 INFO Downstairs has completed Negotiation, task: proc
22326 Sep 22 23:21:38.160 INFO [0] e89b9f37-20f1-48ba-8434-b618ba5c23aa (902609d5-e2d5-4264-985b-fc9832ca6874) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
22327 Sep 22 23:21:38.160 INFO [0] Transition from WaitActive to WaitQuorum
22328 Sep 22 23:21:38.160 WARN [0] new RM replaced this: None
22329 Sep 22 23:21:38.160 INFO [0] Starts reconcile loop
22330 Sep 22 23:21:38.160 INFO [1] e89b9f37-20f1-48ba-8434-b618ba5c23aa (902609d5-e2d5-4264-985b-fc9832ca6874) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
22331 Sep 22 23:21:38.160 INFO [1] Transition from WaitActive to WaitQuorum
22332 Sep 22 23:21:38.160 WARN [1] new RM replaced this: None
22333 Sep 22 23:21:38.160 INFO [1] Starts reconcile loop
22334 Sep 22 23:21:38.160 INFO current number of open files limit 65536 is already the maximum
22335 Sep 22 23:21:38.160 INFO current number of open files limit 65536 is already the maximum
22336 Sep 22 23:21:38.160 INFO [2] e89b9f37-20f1-48ba-8434-b618ba5c23aa (902609d5-e2d5-4264-985b-fc9832ca6874) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
22337 Sep 22 23:21:38.160 INFO [2] Transition from WaitActive to WaitQuorum
22338 Sep 22 23:21:38.160 INFO Opened existing region file "/tmp/downstairs-aIhbz7uq/region.json"
22339 Sep 22 23:21:38.160 WARN [2] new RM replaced this: None
22340 Sep 22 23:21:38.160 INFO Database read version 1
22341 Sep 22 23:21:38.160 INFO Database write version 1
22342 Sep 22 23:21:38.160 INFO [2] Starts reconcile loop
22343 Sep 22 23:21:38.161 INFO Created new region file "/tmp/downstairs-zWh48QwP/region.json"
22344 Sep 22 23:21:38.161 INFO [0] 127.0.0.1:58376 task reports connection:true
22345 Sep 22 23:21:38.161 INFO e89b9f37-20f1-48ba-8434-b618ba5c23aa WaitQuorum WaitQuorum WaitQuorum
22346 Sep 22 23:21:38.161 INFO [0]R flush_numbers: [0, 0]
22347 Sep 22 23:21:38.161 INFO [0]R generation: [0, 0]
22348 Sep 22 23:21:38.161 INFO [0]R dirty: [false, false]
22349 Sep 22 23:21:38.161 INFO [1]R flush_numbers: [0, 0]
22350 Sep 22 23:21:38.161 INFO [1]R generation: [0, 0]
22351 Sep 22 23:21:38.161 INFO [1]R dirty: [false, false]
22352 Sep 22 23:21:38.161 INFO [2]R flush_numbers: [0, 0]
22353 Sep 22 23:21:38.161 INFO [2]R generation: [0, 0]
22354 Sep 22 23:21:38.161 INFO [2]R dirty: [false, false]
22355 Sep 22 23:21:38.161 INFO Max found gen is 1
22356 Sep 22 23:21:38.161 INFO Generation requested: 1 >= found:1
22357 Sep 22 23:21:38.161 INFO Next flush: 1
22358 Sep 22 23:21:38.161 INFO All extents match
22359 Sep 22 23:21:38.161 INFO No downstairs repair required
22360 Sep 22 23:21:38.161 INFO No initial repair work was required
22361 Sep 22 23:21:38.161 INFO Set Downstairs and Upstairs active
22362 Sep 22 23:21:38.161 INFO e89b9f37-20f1-48ba-8434-b618ba5c23aa is now active with session: 902609d5-e2d5-4264-985b-fc9832ca6874
22363 Sep 22 23:21:38.161 INFO e89b9f37-20f1-48ba-8434-b618ba5c23aa Set Active after no repair
22364 Sep 22 23:21:38.161 INFO Notify all downstairs, region set compare is done.
22365 Sep 22 23:21:38.161 INFO Set check for repair
22366 Sep 22 23:21:38.161 INFO [1] 127.0.0.1:47267 task reports connection:true
22367 Sep 22 23:21:38.161 INFO e89b9f37-20f1-48ba-8434-b618ba5c23aa Active Active Active
22368 Sep 22 23:21:38.161 INFO Set check for repair
22369 Sep 22 23:21:38.161 INFO [2] 127.0.0.1:54868 task reports connection:true
22370 Sep 22 23:21:38.161 INFO e89b9f37-20f1-48ba-8434-b618ba5c23aa Active Active Active
22371 Sep 22 23:21:38.161 INFO Set check for repair
22372 Sep 22 23:21:38.161 INFO [0] received reconcile message
22373 Sep 22 23:21:38.161 INFO [0] All repairs completed, exit
22374 Sep 22 23:21:38.161 INFO [0] Starts cmd_loop
22375 Sep 22 23:21:38.161 INFO [1] received reconcile message
22376 Sep 22 23:21:38.161 INFO [1] All repairs completed, exit
22377 Sep 22 23:21:38.161 INFO [1] Starts cmd_loop
22378 Sep 22 23:21:38.161 INFO [2] received reconcile message
22379 Sep 22 23:21:38.161 INFO [2] All repairs completed, exit
22380 Sep 22 23:21:38.161 INFO [2] Starts cmd_loop
22381 The guest has finished waiting for activation
22382 Sep 22 23:21:38.163 INFO UUID: 89e24c32-e4e6-442a-a2a4-07d6620fd2c7
22383 Sep 22 23:21:38.163 INFO Blocks per extent:5 Total Extents: 2
22384 Sep 22 23:21:38.163 INFO Crucible Version: Crucible Version: 0.0.1
22385 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22386 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22387 rustc: 1.70.0 stable x86_64-unknown-illumos
22388 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22389 Sep 22 23:21:38.163 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22390 Sep 22 23:21:38.163 INFO Using address: 127.0.0.1:37306, task: main
22391 Sep 22 23:21:38.163 INFO current number of open files limit 65536 is already the maximum
22392 Sep 22 23:21:38.163 INFO Opened existing region file "/tmp/downstairs-z98mSOht/region.json"
22393 Sep 22 23:21:38.163 INFO Database read version 1
22394 Sep 22 23:21:38.163 INFO Database write version 1
22395 Sep 22 23:21:38.163 INFO Repair listens on 127.0.0.1:0, task: repair
22396 Sep 22 23:21:38.163 DEBG IO Write 1000 has deps []
22397 Sep 22 23:21:38.163 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51785, task: repair
22398 Sep 22 23:21:38.163 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51785, task: repair
22399 Sep 22 23:21:38.163 INFO listening, local_addr: 127.0.0.1:51785, task: repair
22400 Sep 22 23:21:38.163 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51785, task: repair
22401 Sep 22 23:21:38.163 INFO Using repair address: 127.0.0.1:51785, task: main
22402 Sep 22 23:21:38.163 INFO No SSL acceptor configured, task: main
22403 note: configured to log to "/dev/stdout"
22404 Sep 22 23:21:38.165 INFO UUID: 3c399f96-6189-45bc-9c78-28fb3e21a0fa
22405 Sep 22 23:21:38.166 INFO Blocks per extent:5 Total Extents: 2
22406 Sep 22 23:21:38.166 INFO Crucible Version: Crucible Version: 0.0.1
22407 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22408 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22409 rustc: 1.70.0 stable x86_64-unknown-illumos
22410 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22411 Sep 22 23:21:38.166 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22412 Sep 22 23:21:38.166 INFO Using address: 127.0.0.1:58283, task: main
22413 Sep 22 23:21:38.166 INFO Repair listens on 127.0.0.1:0, task: repair
22414 Sep 22 23:21:38.166 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:40584, task: repair
22415 Sep 22 23:21:38.166 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:40584, task: repair
22416 Sep 22 23:21:38.166 INFO listening, local_addr: 127.0.0.1:40584, task: repair
22417 Sep 22 23:21:38.166 INFO current number of open files limit 65536 is already the maximum
22418 Sep 22 23:21:38.166 INFO Opened existing region file "/tmp/downstairs-zWh48QwP/region.json"
22419 Sep 22 23:21:38.166 INFO Database read version 1
22420 Sep 22 23:21:38.166 INFO Database write version 1
22421 Sep 22 23:21:38.166 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:40584, task: repair
22422 Sep 22 23:21:38.166 INFO Using repair address: 127.0.0.1:40584, task: main
22423 Sep 22 23:21:38.166 INFO No SSL acceptor configured, task: main
224242023-09-22T23:21:38.166ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:51585
224252023-09-22T23:21:38.166ZINFOcrucible-pantry: listen IP: 127.0.0.1:51585
22426 Sep 22 23:21:38.167 INFO Upstairs starts
22427 Sep 22 23:21:38.167 INFO Crucible Version: BuildInfo {
22428 version: "0.0.1",
22429 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
22430 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
22431 git_branch: "main",
22432 rustc_semver: "1.70.0",
22433 rustc_channel: "stable",
22434 rustc_host_triple: "x86_64-unknown-illumos",
22435 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
22436 cargo_triple: "x86_64-unknown-illumos",
22437 debug: true,
22438 opt_level: 0,
22439 }
22440 Sep 22 23:21:38.167 INFO Upstairs <-> Downstairs Message Version: 4
22441 Sep 22 23:21:38.167 INFO Crucible stats registered with UUID: 921a859f-fc02-4985-a3ec-da920cfa511a
22442 Sep 22 23:21:38.167 INFO Crucible 921a859f-fc02-4985-a3ec-da920cfa511a has session id: 25183a0b-04a2-464c-b2a4-ba6ae86fb122
22443 Sep 22 23:21:38.167 INFO listening on 127.0.0.1:0, task: main
22444 Sep 22 23:21:38.167 INFO listening on 127.0.0.1:0, task: main
22445 Sep 22 23:21:38.167 INFO listening on 127.0.0.1:0, task: main
22446 Sep 22 23:21:38.167 INFO listening on 127.0.0.1:0, task: main
22447 Sep 22 23:21:38.168 INFO listening on 127.0.0.1:0, task: main
22448 Sep 22 23:21:38.168 INFO listening on 127.0.0.1:0, task: main
22449 Sep 22 23:21:38.168 INFO [0] connecting to 127.0.0.1:51219, looper: 0
22450 Sep 22 23:21:38.168 INFO [1] connecting to 127.0.0.1:44324, looper: 1
22451 Sep 22 23:21:38.168 INFO [2] connecting to 127.0.0.1:52939, looper: 2
22452 Sep 22 23:21:38.168 INFO up_listen starts, task: up_listen
22453 Sep 22 23:21:38.168 INFO Wait for all three downstairs to come online
22454 Sep 22 23:21:38.168 INFO Flush timeout: 0.5
22455 Sep 22 23:21:38.168 INFO accepted connection from 127.0.0.1:47115, task: main
22456 Sep 22 23:21:38.168 INFO accepted connection from 127.0.0.1:55027, task: main
22457 Sep 22 23:21:38.168 INFO accepted connection from 127.0.0.1:44125, task: main
22458 Sep 22 23:21:38.168 INFO [0] 921a859f-fc02-4985-a3ec-da920cfa511a looper connected, looper: 0
22459 Sep 22 23:21:38.168 INFO [0] Proc runs for 127.0.0.1:51219 in state New
22460 Sep 22 23:21:38.168 INFO [1] 921a859f-fc02-4985-a3ec-da920cfa511a looper connected, looper: 1
22461 Sep 22 23:21:38.168 INFO [1] Proc runs for 127.0.0.1:44324 in state New
22462 Sep 22 23:21:38.168 INFO [2] 921a859f-fc02-4985-a3ec-da920cfa511a looper connected, looper: 2
22463 Sep 22 23:21:38.169 INFO [2] Proc runs for 127.0.0.1:52939 in state New
22464 Sep 22 23:21:38.169 DEBG up_ds_listen was notified
22465 Sep 22 23:21:38.169 INFO UUID: 2d5bbbc4-fa02-4480-b887-9b6f4980eb1b
22466 Sep 22 23:21:38.169 DEBG up_ds_listen process 1000
22467 Sep 22 23:21:38.169 INFO Blocks per extent:5 Total Extents: 2
22468 Sep 22 23:21:38.169 DEBG [A] ack job 1000:1, : downstairs
22469 Sep 22 23:21:38.169 INFO Upstairs starts
22470 Sep 22 23:21:38.169 DEBG up_ds_listen checked 1 jobs, back to waiting
22471 Sep 22 23:21:38.169 INFO Crucible Version: Crucible Version: 0.0.1
22472 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22473 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22474 rustc: 1.70.0 stable x86_64-unknown-illumos
22475 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22476 Sep 22 23:21:38.169 INFO Crucible Version: BuildInfo {
22477 version: "0.0.1",
22478 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
22479 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
22480 git_branch: "main",
22481 rustc_semver: "1.70.0",
22482 rustc_channel: "stable",
22483 rustc_host_triple: "x86_64-unknown-illumos",
22484 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
22485 cargo_triple: "x86_64-unknown-illumos",
22486 debug: true,
22487 opt_level: 0,
22488 }
22489 Sep 22 23:21:38.169 INFO Upstairs <-> Downstairs Message Version: 4
22490 Sep 22 23:21:38.169 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22491 Sep 22 23:21:38.169 INFO Crucible stats registered with UUID: 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa
22492 Sep 22 23:21:38.169 INFO Using address: 127.0.0.1:39258, task: main
22493 Sep 22 23:21:38.169 INFO Crucible 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa has session id: fc30cbc3-86fb-4d4d-b35d-cb789cc9bc9f
22494 Sep 22 23:21:38.169 INFO Connection request from 921a859f-fc02-4985-a3ec-da920cfa511a with version 4, task: proc
22495 Sep 22 23:21:38.169 INFO upstairs UpstairsConnection { upstairs_id: 921a859f-fc02-4985-a3ec-da920cfa511a, session_id: 00e59270-0ce2-4dbd-bb99-47e86c7cfd3e, gen: 1 } connected, version 4, task: proc
22496 Sep 22 23:21:38.169 INFO Connection request from 921a859f-fc02-4985-a3ec-da920cfa511a with version 4, task: proc
22497 Sep 22 23:21:38.169 INFO upstairs UpstairsConnection { upstairs_id: 921a859f-fc02-4985-a3ec-da920cfa511a, session_id: 00e59270-0ce2-4dbd-bb99-47e86c7cfd3e, gen: 1 } connected, version 4, task: proc
22498 Sep 22 23:21:38.169 INFO Connection request from 921a859f-fc02-4985-a3ec-da920cfa511a with version 4, task: proc
22499 Sep 22 23:21:38.169 INFO upstairs UpstairsConnection { upstairs_id: 921a859f-fc02-4985-a3ec-da920cfa511a, session_id: 00e59270-0ce2-4dbd-bb99-47e86c7cfd3e, gen: 1 } connected, version 4, task: proc
22500 Sep 22 23:21:38.169 INFO Repair listens on 127.0.0.1:0, task: repair
22501 Sep 22 23:21:38.169 INFO [0] connecting to 127.0.0.1:48752, looper: 0
22502 Sep 22 23:21:38.169 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:61391, task: repair
22503 Sep 22 23:21:38.169 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:61391, task: repair
22504 Sep 22 23:21:38.169 INFO [1] connecting to 127.0.0.1:52312, looper: 1
22505 Sep 22 23:21:38.169 INFO listening, local_addr: 127.0.0.1:61391, task: repair
22506 Sep 22 23:21:38.170 INFO [2] connecting to 127.0.0.1:58283, looper: 2
22507 Sep 22 23:21:38.170 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:61391, task: repair
22508 Sep 22 23:21:38.170 INFO up_listen starts, task: up_listen
22509 Sep 22 23:21:38.170 INFO Wait for all three downstairs to come online
22510 Sep 22 23:21:38.170 INFO Flush timeout: 0.5
22511 Sep 22 23:21:38.170 INFO Using repair address: 127.0.0.1:61391, task: main
22512 Sep 22 23:21:38.170 INFO No SSL acceptor configured, task: main
22513 Sep 22 23:21:38.170 INFO current number of open files limit 65536 is already the maximum
22514 Sep 22 23:21:38.170 INFO accepted connection from 127.0.0.1:46328, task: main
22515 Sep 22 23:21:38.170 INFO Created new region file "/tmp/downstairs-lFV1uaQD/region.json"
22516 Sep 22 23:21:38.170 INFO accepted connection from 127.0.0.1:55292, task: main
22517 Sep 22 23:21:38.170 INFO accepted connection from 127.0.0.1:51427, task: main
22518 Sep 22 23:21:38.170 INFO [0] 921a859f-fc02-4985-a3ec-da920cfa511a (00e59270-0ce2-4dbd-bb99-47e86c7cfd3e) New New New ds_transition to WaitActive
22519 Sep 22 23:21:38.170 INFO [0] Transition from New to WaitActive
22520 Sep 22 23:21:38.170 INFO [1] 921a859f-fc02-4985-a3ec-da920cfa511a (00e59270-0ce2-4dbd-bb99-47e86c7cfd3e) WaitActive New New ds_transition to WaitActive
22521 Sep 22 23:21:38.170 INFO [1] Transition from New to WaitActive
22522 Sep 22 23:21:38.170 INFO [2] 921a859f-fc02-4985-a3ec-da920cfa511a (00e59270-0ce2-4dbd-bb99-47e86c7cfd3e) WaitActive WaitActive New ds_transition to WaitActive
22523 Sep 22 23:21:38.170 INFO [2] Transition from New to WaitActive
22524 Sep 22 23:21:38.170 INFO [0] 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa looper connected, looper: 0
22525 Sep 22 23:21:38.171 INFO [0] Proc runs for 127.0.0.1:48752 in state New
22526 Sep 22 23:21:38.171 INFO [1] 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa looper connected, looper: 1
22527 Sep 22 23:21:38.171 INFO [1] Proc runs for 127.0.0.1:52312 in state New
22528 Sep 22 23:21:38.171 INFO [2] 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa looper connected, looper: 2
22529 Sep 22 23:21:38.171 INFO [2] Proc runs for 127.0.0.1:58283 in state New
22530 Sep 22 23:21:38.171 INFO Connection request from 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa with version 4, task: proc
22531 Sep 22 23:21:38.171 INFO upstairs UpstairsConnection { upstairs_id: 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa, session_id: eb0da1b5-c89a-40f6-a24b-5a6a9adb2329, gen: 1 } connected, version 4, task: proc
22532 Sep 22 23:21:38.171 INFO Connection request from 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa with version 4, task: proc
22533 Sep 22 23:21:38.171 INFO upstairs UpstairsConnection { upstairs_id: 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa, session_id: eb0da1b5-c89a-40f6-a24b-5a6a9adb2329, gen: 1 } connected, version 4, task: proc
22534 Sep 22 23:21:38.171 INFO Connection request from 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa with version 4, task: proc
22535 Sep 22 23:21:38.171 INFO upstairs UpstairsConnection { upstairs_id: 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa, session_id: eb0da1b5-c89a-40f6-a24b-5a6a9adb2329, gen: 1 } connected, version 4, task: proc
22536 Sep 22 23:21:38.172 INFO [0] 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa (eb0da1b5-c89a-40f6-a24b-5a6a9adb2329) New New New ds_transition to WaitActive
22537 Sep 22 23:21:38.172 INFO [0] Transition from New to WaitActive
22538 Sep 22 23:21:38.172 INFO [1] 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa (eb0da1b5-c89a-40f6-a24b-5a6a9adb2329) WaitActive New New ds_transition to WaitActive
22539 Sep 22 23:21:38.172 INFO [1] Transition from New to WaitActive
22540 Sep 22 23:21:38.172 INFO [2] 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa (eb0da1b5-c89a-40f6-a24b-5a6a9adb2329) WaitActive WaitActive New ds_transition to WaitActive
22541 Sep 22 23:21:38.172 INFO [2] Transition from New to WaitActive
22542 Sep 22 23:21:38.172 DEBG IO Write 1000 has deps []
22543 The guest has requested activation
22544 Sep 22 23:21:38.172 INFO 921a859f-fc02-4985-a3ec-da920cfa511a active request set
22545 Sep 22 23:21:38.172 INFO [0] received activate with gen 1
22546 Sep 22 23:21:38.172 INFO [0] client got ds_active_rx, promote! session 00e59270-0ce2-4dbd-bb99-47e86c7cfd3e
22547 Sep 22 23:21:38.172 INFO [1] received activate with gen 1
22548 Sep 22 23:21:38.172 INFO [1] client got ds_active_rx, promote! session 00e59270-0ce2-4dbd-bb99-47e86c7cfd3e
22549 Sep 22 23:21:38.172 INFO [2] received activate with gen 1
22550 Sep 22 23:21:38.172 INFO [2] client got ds_active_rx, promote! session 00e59270-0ce2-4dbd-bb99-47e86c7cfd3e
22551 Sep 22 23:21:38.172 INFO UpstairsConnection { upstairs_id: 921a859f-fc02-4985-a3ec-da920cfa511a, session_id: 00e59270-0ce2-4dbd-bb99-47e86c7cfd3e, gen: 1 } is now active (read-write)
22552 Sep 22 23:21:38.173 INFO UpstairsConnection { upstairs_id: 921a859f-fc02-4985-a3ec-da920cfa511a, session_id: 00e59270-0ce2-4dbd-bb99-47e86c7cfd3e, gen: 1 } is now active (read-write)
22553 Sep 22 23:21:38.173 INFO UpstairsConnection { upstairs_id: 921a859f-fc02-4985-a3ec-da920cfa511a, session_id: 00e59270-0ce2-4dbd-bb99-47e86c7cfd3e, gen: 1 } is now active (read-write)
22554 Sep 22 23:21:38.173 INFO [0] downstairs client at 127.0.0.1:51219 has UUID 71f89d71-fca2-4667-8b12-c125e7772d19
22555 Sep 22 23:21:38.173 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 71f89d71-fca2-4667-8b12-c125e7772d19, encrypted: true, database_read_version: 1, database_write_version: 1 }
22556 Sep 22 23:21:38.173 INFO 921a859f-fc02-4985-a3ec-da920cfa511a WaitActive WaitActive WaitActive
22557 Sep 22 23:21:38.173 INFO [1] downstairs client at 127.0.0.1:44324 has UUID 7c9decb7-dee6-4a23-a2b7-7860a60db1d6
22558 Sep 22 23:21:38.173 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 7c9decb7-dee6-4a23-a2b7-7860a60db1d6, encrypted: true, database_read_version: 1, database_write_version: 1 }
22559 Sep 22 23:21:38.173 INFO 921a859f-fc02-4985-a3ec-da920cfa511a WaitActive WaitActive WaitActive
22560 Sep 22 23:21:38.174 INFO [2] downstairs client at 127.0.0.1:52939 has UUID 6e4f126c-20d7-47c5-9d94-936338c4e38d
22561 Sep 22 23:21:38.174 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 6e4f126c-20d7-47c5-9d94-936338c4e38d, encrypted: true, database_read_version: 1, database_write_version: 1 }
22562 Sep 22 23:21:38.174 INFO 921a859f-fc02-4985-a3ec-da920cfa511a WaitActive WaitActive WaitActive
22563 Sep 22 23:21:38.174 INFO current number of open files limit 65536 is already the maximum
22564 Sep 22 23:21:38.174 INFO Opened existing region file "/tmp/downstairs-lFV1uaQD/region.json"
22565 Sep 22 23:21:38.174 INFO Database read version 1
22566 Sep 22 23:21:38.174 INFO Database write version 1
22567 Sep 22 23:21:38.174 INFO Current flush_numbers [0..12]: [0, 0]
22568 Sep 22 23:21:38.174 INFO Downstairs has completed Negotiation, task: proc
22569 Sep 22 23:21:38.174 INFO Current flush_numbers [0..12]: [0, 0]
22570 Sep 22 23:21:38.174 INFO Downstairs has completed Negotiation, task: proc
22571 Sep 22 23:21:38.175 INFO Current flush_numbers [0..12]: [0, 0]
22572 Sep 22 23:21:38.175 INFO Downstairs has completed Negotiation, task: proc
22573 Sep 22 23:21:38.175 INFO [0] 921a859f-fc02-4985-a3ec-da920cfa511a (00e59270-0ce2-4dbd-bb99-47e86c7cfd3e) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
22574 Sep 22 23:21:38.175 INFO [0] Transition from WaitActive to WaitQuorum
22575 Sep 22 23:21:38.175 WARN [0] new RM replaced this: None
22576 Sep 22 23:21:38.175 INFO [0] Starts reconcile loop
22577 Sep 22 23:21:38.175 INFO [1] 921a859f-fc02-4985-a3ec-da920cfa511a (00e59270-0ce2-4dbd-bb99-47e86c7cfd3e) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
22578 Sep 22 23:21:38.175 INFO [1] Transition from WaitActive to WaitQuorum
22579 Sep 22 23:21:38.175 WARN [1] new RM replaced this: None
22580 Sep 22 23:21:38.175 INFO [1] Starts reconcile loop
22581 Sep 22 23:21:38.175 INFO [2] 921a859f-fc02-4985-a3ec-da920cfa511a (00e59270-0ce2-4dbd-bb99-47e86c7cfd3e) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
22582 Sep 22 23:21:38.175 INFO [2] Transition from WaitActive to WaitQuorum
22583 Sep 22 23:21:38.175 WARN [2] new RM replaced this: None
22584 Sep 22 23:21:38.175 INFO [2] Starts reconcile loop
22585 Sep 22 23:21:38.176 INFO [0] 127.0.0.1:51219 task reports connection:true
22586 Sep 22 23:21:38.176 INFO 921a859f-fc02-4985-a3ec-da920cfa511a WaitQuorum WaitQuorum WaitQuorum
22587 Sep 22 23:21:38.176 INFO [0]R flush_numbers: [0, 0]
22588 Sep 22 23:21:38.176 INFO [0]R generation: [0, 0]
22589 Sep 22 23:21:38.176 INFO [0]R dirty: [false, false]
22590 Sep 22 23:21:38.176 INFO [1]R flush_numbers: [0, 0]
22591 Sep 22 23:21:38.176 INFO [1]R generation: [0, 0]
22592 Sep 22 23:21:38.176 INFO [1]R dirty: [false, false]
22593 Sep 22 23:21:38.176 INFO [2]R flush_numbers: [0, 0]
22594 Sep 22 23:21:38.176 INFO [2]R generation: [0, 0]
22595 Sep 22 23:21:38.176 INFO [2]R dirty: [false, false]
22596 Sep 22 23:21:38.176 INFO Max found gen is 1
22597 Sep 22 23:21:38.176 INFO Generation requested: 1 >= found:1
22598 Sep 22 23:21:38.176 INFO Next flush: 1
22599 Sep 22 23:21:38.176 INFO All extents match
22600 Sep 22 23:21:38.176 INFO No downstairs repair required
22601 Sep 22 23:21:38.176 INFO No initial repair work was required
22602 Sep 22 23:21:38.176 INFO Set Downstairs and Upstairs active
22603 Sep 22 23:21:38.176 INFO 921a859f-fc02-4985-a3ec-da920cfa511a is now active with session: 00e59270-0ce2-4dbd-bb99-47e86c7cfd3e
22604 Sep 22 23:21:38.176 INFO 921a859f-fc02-4985-a3ec-da920cfa511a Set Active after no repair
22605 Sep 22 23:21:38.176 INFO Notify all downstairs, region set compare is done.
22606 Sep 22 23:21:38.176 INFO Set check for repair
22607 Sep 22 23:21:38.176 INFO [1] 127.0.0.1:44324 task reports connection:true
22608 Sep 22 23:21:38.176 INFO 921a859f-fc02-4985-a3ec-da920cfa511a Active Active Active
22609 Sep 22 23:21:38.176 INFO Set check for repair
22610 Sep 22 23:21:38.176 INFO [2] 127.0.0.1:52939 task reports connection:true
22611 Sep 22 23:21:38.176 INFO 921a859f-fc02-4985-a3ec-da920cfa511a Active Active Active
22612 Sep 22 23:21:38.176 INFO Set check for repair
22613 Sep 22 23:21:38.176 INFO [0] received reconcile message
22614 Sep 22 23:21:38.176 INFO [0] All repairs completed, exit
22615 Sep 22 23:21:38.176 INFO [0] Starts cmd_loop
22616 Sep 22 23:21:38.176 INFO UUID: 635f8034-f948-4642-a596-c4452884ff7b
22617 Sep 22 23:21:38.176 INFO Blocks per extent:5 Total Extents: 2
22618 Sep 22 23:21:38.176 INFO [1] received reconcile message
22619 Sep 22 23:21:38.176 INFO [1] All repairs completed, exit
22620 Sep 22 23:21:38.176 INFO Crucible Version: Crucible Version: 0.0.1
22621 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22622 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22623 rustc: 1.70.0 stable x86_64-unknown-illumos
22624 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22625 Sep 22 23:21:38.177 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22626 Sep 22 23:21:38.177 INFO [1] Starts cmd_loop
22627 Sep 22 23:21:38.177 INFO Using address: 127.0.0.1:63407, task: main
22628 Sep 22 23:21:38.177 INFO [2] received reconcile message
22629 Sep 22 23:21:38.177 INFO [2] All repairs completed, exit
22630 Sep 22 23:21:38.177 INFO [2] Starts cmd_loop
22631 The guest has finished waiting for activation
22632 Sep 22 23:21:38.177 INFO Repair listens on 127.0.0.1:0, task: repair
22633 Sep 22 23:21:38.177 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:62799, task: repair
22634 Sep 22 23:21:38.177 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:62799, task: repair
22635 The guest has requested activation
22636 Sep 22 23:21:38.177 INFO listening, local_addr: 127.0.0.1:62799, task: repair
22637 Sep 22 23:21:38.177 INFO 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa active request set
22638 Sep 22 23:21:38.177 INFO [0] received activate with gen 1
22639 Sep 22 23:21:38.177 INFO [0] client got ds_active_rx, promote! session eb0da1b5-c89a-40f6-a24b-5a6a9adb2329
22640 Sep 22 23:21:38.177 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:62799, task: repair
22641 Sep 22 23:21:38.177 INFO Using repair address: 127.0.0.1:62799, task: main
22642 Sep 22 23:21:38.177 INFO [1] received activate with gen 1
22643 Sep 22 23:21:38.177 INFO No SSL acceptor configured, task: main
22644 Sep 22 23:21:38.177 INFO [1] client got ds_active_rx, promote! session eb0da1b5-c89a-40f6-a24b-5a6a9adb2329
22645 Sep 22 23:21:38.178 INFO [2] received activate with gen 1
22646 Sep 22 23:21:38.178 INFO [2] client got ds_active_rx, promote! session eb0da1b5-c89a-40f6-a24b-5a6a9adb2329
22647 Sep 22 23:21:38.178 INFO UpstairsConnection { upstairs_id: 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa, session_id: eb0da1b5-c89a-40f6-a24b-5a6a9adb2329, gen: 1 } is now active (read-write)
22648 Sep 22 23:21:38.178 INFO UpstairsConnection { upstairs_id: 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa, session_id: eb0da1b5-c89a-40f6-a24b-5a6a9adb2329, gen: 1 } is now active (read-write)
22649 Sep 22 23:21:38.178 DEBG up_ds_listen was notified
22650 Sep 22 23:21:38.178 INFO current number of open files limit 65536 is already the maximum
22651 Sep 22 23:21:38.178 DEBG up_ds_listen process 1000
22652 Sep 22 23:21:38.178 INFO UpstairsConnection { upstairs_id: 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa, session_id: eb0da1b5-c89a-40f6-a24b-5a6a9adb2329, gen: 1 } is now active (read-write)
22653 Sep 22 23:21:38.178 DEBG [A] ack job 1000:1, : downstairs
22654 Sep 22 23:21:38.178 INFO Created new region file "/tmp/downstairs-BulvGBTg/region.json"
22655 Sep 22 23:21:38.178 DEBG up_ds_listen checked 1 jobs, back to waiting
22656 Sep 22 23:21:38.178 DEBG IO Read 1001 has deps [JobId(1000)]
22657 Sep 22 23:21:38.178 INFO [0] downstairs client at 127.0.0.1:48752 has UUID 0d1968cb-b30f-4a44-a680-9f3b52cfb45a
22658 Sep 22 23:21:38.178 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 0d1968cb-b30f-4a44-a680-9f3b52cfb45a, encrypted: true, database_read_version: 1, database_write_version: 1 }
22659 Sep 22 23:21:38.179 INFO 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa WaitActive WaitActive WaitActive
22660 Sep 22 23:21:38.179 INFO [1] downstairs client at 127.0.0.1:52312 has UUID 9471030d-cea7-4287-81a6-b07789df3e4d
22661 Sep 22 23:21:38.179 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 9471030d-cea7-4287-81a6-b07789df3e4d, encrypted: true, database_read_version: 1, database_write_version: 1 }
22662 Sep 22 23:21:38.179 INFO 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa WaitActive WaitActive WaitActive
22663 Sep 22 23:21:38.179 INFO [2] downstairs client at 127.0.0.1:58283 has UUID 3c399f96-6189-45bc-9c78-28fb3e21a0fa
22664 Sep 22 23:21:38.179 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 3c399f96-6189-45bc-9c78-28fb3e21a0fa, encrypted: true, database_read_version: 1, database_write_version: 1 }
22665 Sep 22 23:21:38.179 INFO 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa WaitActive WaitActive WaitActive
22666 Sep 22 23:21:38.179 INFO Current flush_numbers [0..12]: [0, 0]
22667 Sep 22 23:21:38.179 DEBG Read :1001 deps:[JobId(1000)] res:true
22668 Sep 22 23:21:38.179 INFO Downstairs has completed Negotiation, task: proc
22669 Sep 22 23:21:38.180 DEBG Read :1001 deps:[JobId(1000)] res:true
22670 Sep 22 23:21:38.180 INFO Current flush_numbers [0..12]: [0, 0]
22671 Sep 22 23:21:38.180 INFO Downstairs has completed Negotiation, task: proc
22672 Sep 22 23:21:38.180 DEBG Read :1001 deps:[JobId(1000)] res:true
22673 Sep 22 23:21:38.180 INFO Current flush_numbers [0..12]: [0, 0]
22674 Sep 22 23:21:38.180 INFO Downstairs has completed Negotiation, task: proc
22675 Sep 22 23:21:38.181 INFO [0] 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa (eb0da1b5-c89a-40f6-a24b-5a6a9adb2329) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
22676 Sep 22 23:21:38.181 INFO [0] Transition from WaitActive to WaitQuorum
22677 Sep 22 23:21:38.181 WARN [0] new RM replaced this: None
22678 Sep 22 23:21:38.181 INFO [0] Starts reconcile loop
22679 Sep 22 23:21:38.181 INFO [1] 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa (eb0da1b5-c89a-40f6-a24b-5a6a9adb2329) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
22680 Sep 22 23:21:38.181 INFO [1] Transition from WaitActive to WaitQuorum
22681 Sep 22 23:21:38.181 WARN [1] new RM replaced this: None
22682 Sep 22 23:21:38.181 INFO [1] Starts reconcile loop
22683 Sep 22 23:21:38.181 INFO [2] 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa (eb0da1b5-c89a-40f6-a24b-5a6a9adb2329) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
22684 Sep 22 23:21:38.181 INFO [2] Transition from WaitActive to WaitQuorum
22685 Sep 22 23:21:38.181 WARN [2] new RM replaced this: None
22686 Sep 22 23:21:38.181 INFO [2] Starts reconcile loop
22687 Sep 22 23:21:38.181 INFO [0] 127.0.0.1:48752 task reports connection:true
22688 Sep 22 23:21:38.181 INFO 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa WaitQuorum WaitQuorum WaitQuorum
22689 Sep 22 23:21:38.181 INFO [0]R flush_numbers: [0, 0]
22690 Sep 22 23:21:38.181 INFO [0]R generation: [0, 0]
22691 Sep 22 23:21:38.181 INFO [0]R dirty: [false, false]
22692 Sep 22 23:21:38.181 INFO [1]R flush_numbers: [0, 0]
22693 Sep 22 23:21:38.181 INFO [1]R generation: [0, 0]
22694 Sep 22 23:21:38.181 INFO [1]R dirty: [false, false]
22695 Sep 22 23:21:38.181 INFO [2]R flush_numbers: [0, 0]
22696 Sep 22 23:21:38.181 INFO [2]R generation: [0, 0]
22697 Sep 22 23:21:38.181 INFO [2]R dirty: [false, false]
22698 Sep 22 23:21:38.181 INFO Max found gen is 1
22699 Sep 22 23:21:38.181 INFO Generation requested: 1 >= found:1
22700 Sep 22 23:21:38.181 INFO Next flush: 1
22701 Sep 22 23:21:38.181 INFO All extents match
22702 Sep 22 23:21:38.181 INFO No downstairs repair required
22703 Sep 22 23:21:38.181 INFO No initial repair work was required
22704 Sep 22 23:21:38.181 INFO Set Downstairs and Upstairs active
22705 Sep 22 23:21:38.181 INFO 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa is now active with session: eb0da1b5-c89a-40f6-a24b-5a6a9adb2329
22706 Sep 22 23:21:38.181 INFO 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa Set Active after no repair
22707 Sep 22 23:21:38.181 INFO Notify all downstairs, region set compare is done.
22708 Sep 22 23:21:38.181 INFO Set check for repair
22709 Sep 22 23:21:38.181 INFO [1] 127.0.0.1:52312 task reports connection:true
22710 Sep 22 23:21:38.182 INFO 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa Active Active Active
22711 Sep 22 23:21:38.182 INFO Set check for repair
22712 Sep 22 23:21:38.182 INFO [2] 127.0.0.1:58283 task reports connection:true
22713 Sep 22 23:21:38.182 INFO 5bb4c0ba-2389-4fe1-a4cc-d740e8a65daa Active Active Active
22714 Sep 22 23:21:38.182 INFO Set check for repair
22715 Sep 22 23:21:38.182 INFO [0] received reconcile message
22716 Sep 22 23:21:38.182 INFO [0] All repairs completed, exit
22717 Sep 22 23:21:38.182 INFO [0] Starts cmd_loop
22718 Sep 22 23:21:38.182 INFO [1] received reconcile message
22719 Sep 22 23:21:38.182 INFO [1] All repairs completed, exit
22720 Sep 22 23:21:38.182 INFO [1] Starts cmd_loop
22721 Sep 22 23:21:38.182 INFO [2] received reconcile message
22722 Sep 22 23:21:38.182 INFO [2] All repairs completed, exit
22723 Sep 22 23:21:38.182 INFO [2] Starts cmd_loop
22724 The guest has finished waiting for activation
22725 Sep 22 23:21:38.183 INFO current number of open files limit 65536 is already the maximum
22726 Sep 22 23:21:38.183 INFO Opened existing region file "/tmp/downstairs-BulvGBTg/region.json"
22727 Sep 22 23:21:38.183 INFO Database read version 1
22728 Sep 22 23:21:38.183 INFO Database write version 1
22729 Sep 22 23:21:38.183 DEBG [0] Read AckReady 1001, : downstairs
22730 Sep 22 23:21:38.183 DEBG IO Write 1000 has deps []
22731 Sep 22 23:21:38.184 DEBG [1] Read already AckReady 1001, : downstairs
22732 Sep 22 23:21:38.185 INFO UUID: d6053127-e474-4389-afe6-5018c4a351d2
22733 Sep 22 23:21:38.185 INFO Blocks per extent:5 Total Extents: 2
22734 Sep 22 23:21:38.185 DEBG up_ds_listen was notified
22735 Sep 22 23:21:38.185 INFO Crucible Version: Crucible Version: 0.0.1
22736 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22737 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22738 rustc: 1.70.0 stable x86_64-unknown-illumos
22739 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22740 Sep 22 23:21:38.185 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22741 Sep 22 23:21:38.185 DEBG up_ds_listen process 1000
22742 Sep 22 23:21:38.185 INFO Using address: 127.0.0.1:42114, task: main
22743 Sep 22 23:21:38.185 DEBG [A] ack job 1000:1, : downstairs
22744 Sep 22 23:21:38.185 DEBG up_ds_listen checked 1 jobs, back to waiting
22745 Sep 22 23:21:38.185 INFO Repair listens on 127.0.0.1:0, task: repair
22746 Sep 22 23:21:38.186 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:49825, task: repair
22747 Sep 22 23:21:38.186 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:49825, task: repair
22748 Sep 22 23:21:38.186 DEBG IO Write 1000 has deps []
22749 Sep 22 23:21:38.186 INFO listening, local_addr: 127.0.0.1:49825, task: repair
22750 Sep 22 23:21:38.186 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:49825, task: repair
22751 Sep 22 23:21:38.186 INFO Using repair address: 127.0.0.1:49825, task: main
22752 Sep 22 23:21:38.186 INFO No SSL acceptor configured, task: main
22753 Sep 22 23:21:38.186 DEBG [2] Read already AckReady 1001, : downstairs
22754 Sep 22 23:21:38.186 INFO current number of open files limit 65536 is already the maximum
22755 Sep 22 23:21:38.186 INFO Opened existing region file "/tmp/downstairs-wZJpXBn2/region.json"
22756 Sep 22 23:21:38.186 INFO Database read version 1
22757 Sep 22 23:21:38.186 INFO Database write version 1
22758 Sep 22 23:21:38.186 DEBG up_ds_listen was notified
22759 Sep 22 23:21:38.186 DEBG up_ds_listen process 1001
22760 Sep 22 23:21:38.186 DEBG [A] ack job 1001:2, : downstairs
22761 Sep 22 23:21:38.186 INFO Upstairs starts
22762 Sep 22 23:21:38.186 INFO Crucible Version: BuildInfo {
22763 version: "0.0.1",
22764 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
22765 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
22766 git_branch: "main",
22767 rustc_semver: "1.70.0",
22768 rustc_channel: "stable",
22769 rustc_host_triple: "x86_64-unknown-illumos",
22770 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
22771 cargo_triple: "x86_64-unknown-illumos",
22772 debug: true,
22773 opt_level: 0,
22774 }
22775 Sep 22 23:21:38.186 INFO Upstairs <-> Downstairs Message Version: 4
22776 Sep 22 23:21:38.186 INFO Crucible stats registered with UUID: 913a7ee4-ee11-47c7-a343-a7c9547130eb
22777 Sep 22 23:21:38.186 INFO Crucible 913a7ee4-ee11-47c7-a343-a7c9547130eb has session id: b580f6b1-5b80-4eb2-9b2e-ec0df1111077
22778 Sep 22 23:21:38.187 INFO listening on 127.0.0.1:0, task: main
22779 Sep 22 23:21:38.187 INFO listening on 127.0.0.1:0, task: main
22780 Sep 22 23:21:38.187 INFO listening on 127.0.0.1:0, task: main
22781 Sep 22 23:21:38.187 INFO listening on 127.0.0.1:0, task: main
22782 Sep 22 23:21:38.187 DEBG up_ds_listen checked 1 jobs, back to waiting
22783 Sep 22 23:21:38.187 INFO listening on 127.0.0.1:0, task: main
22784 Sep 22 23:21:38.187 INFO listening on 127.0.0.1:0, task: main
22785 Sep 22 23:21:38.187 INFO [0] connecting to 127.0.0.1:62388, looper: 0
22786 Sep 22 23:21:38.187 INFO [1] connecting to 127.0.0.1:43611, looper: 1
22787 Sep 22 23:21:38.187 DEBG IO Read 1001 has deps [JobId(1000)]
22788 Sep 22 23:21:38.187 INFO [2] connecting to 127.0.0.1:46844, looper: 2
22789 Sep 22 23:21:38.187 INFO up_listen starts, task: up_listen
22790 Sep 22 23:21:38.187 INFO Wait for all three downstairs to come online
22791 Sep 22 23:21:38.187 INFO Flush timeout: 0.5
22792 Sep 22 23:21:38.188 INFO accepted connection from 127.0.0.1:39854, task: main
22793 Sep 22 23:21:38.188 INFO accepted connection from 127.0.0.1:54255, task: main
22794 Sep 22 23:21:38.188 INFO accepted connection from 127.0.0.1:57006, task: main
22795 Sep 22 23:21:38.188 INFO [0] 913a7ee4-ee11-47c7-a343-a7c9547130eb looper connected, looper: 0
22796 Sep 22 23:21:38.188 INFO [0] Proc runs for 127.0.0.1:62388 in state New
22797 Sep 22 23:21:38.188 INFO [1] 913a7ee4-ee11-47c7-a343-a7c9547130eb looper connected, looper: 1
22798 Sep 22 23:21:38.188 INFO [1] Proc runs for 127.0.0.1:43611 in state New
22799 Sep 22 23:21:38.188 DEBG Read :1001 deps:[JobId(1000)] res:true
22800 Sep 22 23:21:38.188 INFO [2] 913a7ee4-ee11-47c7-a343-a7c9547130eb looper connected, looper: 2
22801 Sep 22 23:21:38.188 DEBG up_ds_listen was notified
22802 Sep 22 23:21:38.188 INFO [2] Proc runs for 127.0.0.1:46844 in state New
22803 Sep 22 23:21:38.188 DEBG up_ds_listen process 1000
22804 Sep 22 23:21:38.188 DEBG [A] ack job 1000:1, : downstairs
22805 Sep 22 23:21:38.188 DEBG up_ds_listen checked 1 jobs, back to waiting
22806 Sep 22 23:21:38.188 DEBG Read :1001 deps:[JobId(1000)] res:true
22807 Sep 22 23:21:38.188 INFO Upstairs starts
22808 Sep 22 23:21:38.188 INFO Crucible Version: BuildInfo {
22809 version: "0.0.1",
22810 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
22811 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
22812 git_branch: "main",
22813 rustc_semver: "1.70.0",
22814 rustc_channel: "stable",
22815 rustc_host_triple: "x86_64-unknown-illumos",
22816 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
22817 cargo_triple: "x86_64-unknown-illumos",
22818 debug: true,
22819 opt_level: 0,
22820 }
22821 Sep 22 23:21:38.189 INFO Upstairs <-> Downstairs Message Version: 4
22822 Sep 22 23:21:38.189 INFO Crucible stats registered with UUID: edae0731-d549-4a19-a15b-e1c9f01172b4
22823 Sep 22 23:21:38.189 INFO Crucible edae0731-d549-4a19-a15b-e1c9f01172b4 has session id: cab8b72b-236e-4a56-8922-0a546f70b336
22824 Sep 22 23:21:38.189 INFO Connection request from 913a7ee4-ee11-47c7-a343-a7c9547130eb with version 4, task: proc
22825 Sep 22 23:21:38.189 INFO upstairs UpstairsConnection { upstairs_id: 913a7ee4-ee11-47c7-a343-a7c9547130eb, session_id: fbf121c3-eef6-4f40-8f3c-e3ef373940ac, gen: 1 } connected, version 4, task: proc
22826 Sep 22 23:21:38.189 INFO Connection request from 913a7ee4-ee11-47c7-a343-a7c9547130eb with version 4, task: proc
22827 Sep 22 23:21:38.189 INFO upstairs UpstairsConnection { upstairs_id: 913a7ee4-ee11-47c7-a343-a7c9547130eb, session_id: fbf121c3-eef6-4f40-8f3c-e3ef373940ac, gen: 1 } connected, version 4, task: proc
22828 Sep 22 23:21:38.189 INFO Connection request from 913a7ee4-ee11-47c7-a343-a7c9547130eb with version 4, task: proc
22829 Sep 22 23:21:38.189 INFO upstairs UpstairsConnection { upstairs_id: 913a7ee4-ee11-47c7-a343-a7c9547130eb, session_id: fbf121c3-eef6-4f40-8f3c-e3ef373940ac, gen: 1 } connected, version 4, task: proc
22830 Sep 22 23:21:38.189 DEBG Read :1001 deps:[JobId(1000)] res:true
22831 Sep 22 23:21:38.189 INFO [0] connecting to 127.0.0.1:39258, looper: 0
22832 Sep 22 23:21:38.189 INFO [1] connecting to 127.0.0.1:63407, looper: 1
22833 Sep 22 23:21:38.189 INFO [2] connecting to 127.0.0.1:42114, looper: 2
22834 Sep 22 23:21:38.189 INFO up_listen starts, task: up_listen
22835 Sep 22 23:21:38.189 INFO Wait for all three downstairs to come online
22836 Sep 22 23:21:38.189 INFO Flush timeout: 0.5
22837 Sep 22 23:21:38.190 INFO accepted connection from 127.0.0.1:51279, task: main
22838 Sep 22 23:21:38.190 INFO accepted connection from 127.0.0.1:63124, task: main
22839 Sep 22 23:21:38.190 INFO accepted connection from 127.0.0.1:33283, task: main
22840 Sep 22 23:21:38.190 INFO [0] 913a7ee4-ee11-47c7-a343-a7c9547130eb (fbf121c3-eef6-4f40-8f3c-e3ef373940ac) New New New ds_transition to WaitActive
22841 Sep 22 23:21:38.190 INFO [0] Transition from New to WaitActive
22842 Sep 22 23:21:38.190 INFO [1] 913a7ee4-ee11-47c7-a343-a7c9547130eb (fbf121c3-eef6-4f40-8f3c-e3ef373940ac) WaitActive New New ds_transition to WaitActive
22843 Sep 22 23:21:38.190 INFO [1] Transition from New to WaitActive
22844 Sep 22 23:21:38.190 INFO [2] 913a7ee4-ee11-47c7-a343-a7c9547130eb (fbf121c3-eef6-4f40-8f3c-e3ef373940ac) WaitActive WaitActive New ds_transition to WaitActive
22845 Sep 22 23:21:38.190 INFO [2] Transition from New to WaitActive
22846 Sep 22 23:21:38.190 INFO [0] edae0731-d549-4a19-a15b-e1c9f01172b4 looper connected, looper: 0
22847 Sep 22 23:21:38.190 INFO [0] Proc runs for 127.0.0.1:39258 in state New
22848 Sep 22 23:21:38.190 INFO [1] edae0731-d549-4a19-a15b-e1c9f01172b4 looper connected, looper: 1
22849 Sep 22 23:21:38.190 INFO [1] Proc runs for 127.0.0.1:63407 in state New
22850 Sep 22 23:21:38.190 INFO [2] edae0731-d549-4a19-a15b-e1c9f01172b4 looper connected, looper: 2
22851 Sep 22 23:21:38.190 INFO [2] Proc runs for 127.0.0.1:42114 in state New
22852 Sep 22 23:21:38.191 INFO Connection request from edae0731-d549-4a19-a15b-e1c9f01172b4 with version 4, task: proc
22853 Sep 22 23:21:38.191 INFO upstairs UpstairsConnection { upstairs_id: edae0731-d549-4a19-a15b-e1c9f01172b4, session_id: d263b9b4-f13a-4d85-ac47-3dad10fd9290, gen: 1 } connected, version 4, task: proc
22854 Sep 22 23:21:38.191 INFO Connection request from edae0731-d549-4a19-a15b-e1c9f01172b4 with version 4, task: proc
22855 Sep 22 23:21:38.191 INFO upstairs UpstairsConnection { upstairs_id: edae0731-d549-4a19-a15b-e1c9f01172b4, session_id: d263b9b4-f13a-4d85-ac47-3dad10fd9290, gen: 1 } connected, version 4, task: proc
22856 Sep 22 23:21:38.191 INFO Connection request from edae0731-d549-4a19-a15b-e1c9f01172b4 with version 4, task: proc
22857 Sep 22 23:21:38.191 INFO upstairs UpstairsConnection { upstairs_id: edae0731-d549-4a19-a15b-e1c9f01172b4, session_id: d263b9b4-f13a-4d85-ac47-3dad10fd9290, gen: 1 } connected, version 4, task: proc
22858 Sep 22 23:21:38.191 INFO [0] edae0731-d549-4a19-a15b-e1c9f01172b4 (d263b9b4-f13a-4d85-ac47-3dad10fd9290) New New New ds_transition to WaitActive
22859 Sep 22 23:21:38.191 INFO [0] Transition from New to WaitActive
22860 Sep 22 23:21:38.191 INFO [1] edae0731-d549-4a19-a15b-e1c9f01172b4 (d263b9b4-f13a-4d85-ac47-3dad10fd9290) WaitActive New New ds_transition to WaitActive
22861 Sep 22 23:21:38.191 INFO [1] Transition from New to WaitActive
22862 Sep 22 23:21:38.191 INFO [2] edae0731-d549-4a19-a15b-e1c9f01172b4 (d263b9b4-f13a-4d85-ac47-3dad10fd9290) WaitActive WaitActive New ds_transition to WaitActive
22863 Sep 22 23:21:38.191 INFO [2] Transition from New to WaitActive
22864 The guest has requested activation
22865 Sep 22 23:21:38.191 INFO 913a7ee4-ee11-47c7-a343-a7c9547130eb active request set
22866 Sep 22 23:21:38.191 INFO [0] received activate with gen 1
22867 Sep 22 23:21:38.191 INFO [0] client got ds_active_rx, promote! session fbf121c3-eef6-4f40-8f3c-e3ef373940ac
22868 Sep 22 23:21:38.191 DEBG IO Write 1001 has deps [JobId(1000)]
22869 Sep 22 23:21:38.191 INFO [1] received activate with gen 1
22870 Sep 22 23:21:38.191 INFO [1] client got ds_active_rx, promote! session fbf121c3-eef6-4f40-8f3c-e3ef373940ac
22871 Sep 22 23:21:38.191 INFO [2] received activate with gen 1
22872 Sep 22 23:21:38.191 INFO [2] client got ds_active_rx, promote! session fbf121c3-eef6-4f40-8f3c-e3ef373940ac
22873 Sep 22 23:21:38.192 INFO UpstairsConnection { upstairs_id: 913a7ee4-ee11-47c7-a343-a7c9547130eb, session_id: fbf121c3-eef6-4f40-8f3c-e3ef373940ac, gen: 1 } is now active (read-write)
22874 Sep 22 23:21:38.192 INFO UpstairsConnection { upstairs_id: 913a7ee4-ee11-47c7-a343-a7c9547130eb, session_id: fbf121c3-eef6-4f40-8f3c-e3ef373940ac, gen: 1 } is now active (read-write)
22875 Sep 22 23:21:38.192 INFO UpstairsConnection { upstairs_id: 913a7ee4-ee11-47c7-a343-a7c9547130eb, session_id: fbf121c3-eef6-4f40-8f3c-e3ef373940ac, gen: 1 } is now active (read-write)
22876 Sep 22 23:21:38.192 INFO [0] downstairs client at 127.0.0.1:62388 has UUID 6a3090e7-a0f2-4a80-947f-1db172a31a5f
22877 Sep 22 23:21:38.192 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 6a3090e7-a0f2-4a80-947f-1db172a31a5f, encrypted: true, database_read_version: 1, database_write_version: 1 }
22878 Sep 22 23:21:38.192 INFO 913a7ee4-ee11-47c7-a343-a7c9547130eb WaitActive WaitActive WaitActive
22879 Sep 22 23:21:38.192 INFO [1] downstairs client at 127.0.0.1:43611 has UUID ab814286-bbb3-4f61-a468-0a3953430633
22880 Sep 22 23:21:38.192 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ab814286-bbb3-4f61-a468-0a3953430633, encrypted: true, database_read_version: 1, database_write_version: 1 }
22881 Sep 22 23:21:38.192 INFO 913a7ee4-ee11-47c7-a343-a7c9547130eb WaitActive WaitActive WaitActive
22882 Sep 22 23:21:38.192 INFO [2] downstairs client at 127.0.0.1:46844 has UUID c9ee3b95-925e-4745-a37e-7d93b0c64736
22883 Sep 22 23:21:38.192 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c9ee3b95-925e-4745-a37e-7d93b0c64736, encrypted: true, database_read_version: 1, database_write_version: 1 }
22884 Sep 22 23:21:38.192 INFO 913a7ee4-ee11-47c7-a343-a7c9547130eb WaitActive WaitActive WaitActive
22885 Sep 22 23:21:38.193 INFO Current flush_numbers [0..12]: [0, 0]
22886 Sep 22 23:21:38.193 INFO Downstairs has completed Negotiation, task: proc
22887 Sep 22 23:21:38.193 INFO Current flush_numbers [0..12]: [0, 0]
22888 Sep 22 23:21:38.193 INFO Downstairs has completed Negotiation, task: proc
22889 Sep 22 23:21:38.193 INFO Current flush_numbers [0..12]: [0, 0]
22890 Sep 22 23:21:38.193 INFO Downstairs has completed Negotiation, task: proc
22891 Sep 22 23:21:38.193 DEBG [0] Read AckReady 1001, : downstairs
22892 Sep 22 23:21:38.194 INFO [0] 913a7ee4-ee11-47c7-a343-a7c9547130eb (fbf121c3-eef6-4f40-8f3c-e3ef373940ac) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
22893 Sep 22 23:21:38.194 INFO [0] Transition from WaitActive to WaitQuorum
22894 Sep 22 23:21:38.194 WARN [0] new RM replaced this: None
22895 Sep 22 23:21:38.194 INFO [0] Starts reconcile loop
22896 Sep 22 23:21:38.194 INFO [1] 913a7ee4-ee11-47c7-a343-a7c9547130eb (fbf121c3-eef6-4f40-8f3c-e3ef373940ac) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
22897 Sep 22 23:21:38.194 INFO [1] Transition from WaitActive to WaitQuorum
22898 Sep 22 23:21:38.194 WARN [1] new RM replaced this: None
22899 Sep 22 23:21:38.194 INFO [1] Starts reconcile loop
22900 Sep 22 23:21:38.194 INFO [2] 913a7ee4-ee11-47c7-a343-a7c9547130eb (fbf121c3-eef6-4f40-8f3c-e3ef373940ac) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
22901 Sep 22 23:21:38.194 INFO [2] Transition from WaitActive to WaitQuorum
22902 Sep 22 23:21:38.194 WARN [2] new RM replaced this: None
22903 Sep 22 23:21:38.194 INFO [2] Starts reconcile loop
22904 Sep 22 23:21:38.194 INFO [0] 127.0.0.1:62388 task reports connection:true
22905 Sep 22 23:21:38.194 INFO 913a7ee4-ee11-47c7-a343-a7c9547130eb WaitQuorum WaitQuorum WaitQuorum
22906 Sep 22 23:21:38.194 INFO [0]R flush_numbers: [0, 0]
22907 Sep 22 23:21:38.194 INFO [0]R generation: [0, 0]
22908 Sep 22 23:21:38.194 INFO [0]R dirty: [false, false]
22909 Sep 22 23:21:38.194 INFO [1]R flush_numbers: [0, 0]
22910 Sep 22 23:21:38.194 INFO [1]R generation: [0, 0]
22911 Sep 22 23:21:38.194 INFO [1]R dirty: [false, false]
22912 Sep 22 23:21:38.194 INFO [2]R flush_numbers: [0, 0]
22913 Sep 22 23:21:38.194 INFO [2]R generation: [0, 0]
22914 Sep 22 23:21:38.194 INFO [2]R dirty: [false, false]
22915 Sep 22 23:21:38.194 INFO Max found gen is 1
22916 Sep 22 23:21:38.194 INFO Generation requested: 1 >= found:1
22917 Sep 22 23:21:38.194 INFO Next flush: 1
22918 Sep 22 23:21:38.194 INFO All extents match
22919 Sep 22 23:21:38.194 INFO No downstairs repair required
22920 Sep 22 23:21:38.194 INFO UUID: d1402af7-f301-4500-84d1-bcd26b1c483b
22921 Sep 22 23:21:38.194 INFO Blocks per extent:512 Total Extents: 188
22922 Sep 22 23:21:38.194 INFO No initial repair work was required
22923 Sep 22 23:21:38.194 INFO Set Downstairs and Upstairs active
22924 Sep 22 23:21:38.194 INFO 913a7ee4-ee11-47c7-a343-a7c9547130eb is now active with session: fbf121c3-eef6-4f40-8f3c-e3ef373940ac
22925 Sep 22 23:21:38.194 INFO Crucible Version: Crucible Version: 0.0.1
22926 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22927 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22928 rustc: 1.70.0 stable x86_64-unknown-illumos
22929 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22930 Sep 22 23:21:38.194 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22931 Sep 22 23:21:38.194 INFO 913a7ee4-ee11-47c7-a343-a7c9547130eb Set Active after no repair
22932 Sep 22 23:21:38.194 INFO Using address: 127.0.0.1:36172, task: main
22933 Sep 22 23:21:38.194 INFO Notify all downstairs, region set compare is done.
22934 Sep 22 23:21:38.194 INFO Set check for repair
22935 Sep 22 23:21:38.195 INFO [1] 127.0.0.1:43611 task reports connection:true
22936 Sep 22 23:21:38.195 INFO 913a7ee4-ee11-47c7-a343-a7c9547130eb Active Active Active
22937 Sep 22 23:21:38.195 INFO Set check for repair
22938 Sep 22 23:21:38.195 INFO [2] 127.0.0.1:46844 task reports connection:true
22939 Sep 22 23:21:38.195 INFO 913a7ee4-ee11-47c7-a343-a7c9547130eb Active Active Active
22940 Sep 22 23:21:38.195 INFO Set check for repair
22941 Sep 22 23:21:38.195 INFO Repair listens on 127.0.0.1:0, task: repair
22942 Sep 22 23:21:38.195 INFO [0] received reconcile message
22943 Sep 22 23:21:38.195 INFO [0] All repairs completed, exit
22944 Sep 22 23:21:38.195 INFO [0] Starts cmd_loop
22945 Sep 22 23:21:38.195 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:44765, task: repair
22946 Sep 22 23:21:38.195 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:44765, task: repair
22947 Sep 22 23:21:38.195 INFO [1] received reconcile message
22948 Sep 22 23:21:38.195 INFO listening, local_addr: 127.0.0.1:44765, task: repair
22949 Sep 22 23:21:38.195 INFO [1] All repairs completed, exit
22950 Sep 22 23:21:38.195 INFO [1] Starts cmd_loop
22951 Sep 22 23:21:38.195 INFO [2] received reconcile message
22952 Sep 22 23:21:38.195 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:44765, task: repair
22953 Sep 22 23:21:38.195 INFO Using repair address: 127.0.0.1:44765, task: main
22954 Sep 22 23:21:38.195 INFO [2] All repairs completed, exit
22955 Sep 22 23:21:38.195 INFO No SSL acceptor configured, task: main
22956 Sep 22 23:21:38.195 INFO [2] Starts cmd_loop
22957 The guest has finished waiting for activation
22958 The guest has requested activation
22959 Sep 22 23:21:38.196 INFO edae0731-d549-4a19-a15b-e1c9f01172b4 active request set
22960 Sep 22 23:21:38.196 INFO [0] received activate with gen 1
22961 Sep 22 23:21:38.196 INFO [0] client got ds_active_rx, promote! session d263b9b4-f13a-4d85-ac47-3dad10fd9290
22962 Sep 22 23:21:38.196 INFO [1] received activate with gen 1
22963 Sep 22 23:21:38.196 INFO [1] client got ds_active_rx, promote! session d263b9b4-f13a-4d85-ac47-3dad10fd9290
22964 Sep 22 23:21:38.196 INFO [2] received activate with gen 1
22965 Sep 22 23:21:38.196 INFO [2] client got ds_active_rx, promote! session d263b9b4-f13a-4d85-ac47-3dad10fd9290
22966 Sep 22 23:21:38.196 INFO UpstairsConnection { upstairs_id: edae0731-d549-4a19-a15b-e1c9f01172b4, session_id: d263b9b4-f13a-4d85-ac47-3dad10fd9290, gen: 1 } is now active (read-write)
22967 Sep 22 23:21:38.196 INFO UpstairsConnection { upstairs_id: edae0731-d549-4a19-a15b-e1c9f01172b4, session_id: d263b9b4-f13a-4d85-ac47-3dad10fd9290, gen: 1 } is now active (read-write)
22968 Sep 22 23:21:38.196 DEBG [1] Read already AckReady 1001, : downstairs
22969 Sep 22 23:21:38.196 INFO UpstairsConnection { upstairs_id: edae0731-d549-4a19-a15b-e1c9f01172b4, session_id: d263b9b4-f13a-4d85-ac47-3dad10fd9290, gen: 1 } is now active (read-write)
22970 Sep 22 23:21:38.196 DEBG up_ds_listen was notified
22971 Sep 22 23:21:38.196 DEBG up_ds_listen process 1001
22972 Sep 22 23:21:38.196 DEBG [A] ack job 1001:2, : downstairs
22973 Sep 22 23:21:38.196 DEBG IO Flush 1001 has deps [JobId(1000)]
22974 Sep 22 23:21:38.196 DEBG up_ds_listen checked 1 jobs, back to waiting
22975 Sep 22 23:21:38.197 INFO [0] downstairs client at 127.0.0.1:39258 has UUID 2d5bbbc4-fa02-4480-b887-9b6f4980eb1b
22976 Sep 22 23:21:38.197 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 2d5bbbc4-fa02-4480-b887-9b6f4980eb1b, encrypted: true, database_read_version: 1, database_write_version: 1 }
22977 Sep 22 23:21:38.197 INFO edae0731-d549-4a19-a15b-e1c9f01172b4 WaitActive WaitActive WaitActive
22978 Sep 22 23:21:38.197 INFO [1] downstairs client at 127.0.0.1:63407 has UUID 635f8034-f948-4642-a596-c4452884ff7b
22979 Sep 22 23:21:38.197 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 635f8034-f948-4642-a596-c4452884ff7b, encrypted: true, database_read_version: 1, database_write_version: 1 }
22980 Sep 22 23:21:38.197 INFO edae0731-d549-4a19-a15b-e1c9f01172b4 WaitActive WaitActive WaitActive
22981 Sep 22 23:21:38.197 INFO [2] downstairs client at 127.0.0.1:42114 has UUID d6053127-e474-4389-afe6-5018c4a351d2
22982 Sep 22 23:21:38.197 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: d6053127-e474-4389-afe6-5018c4a351d2, encrypted: true, database_read_version: 1, database_write_version: 1 }
22983 Sep 22 23:21:38.197 INFO edae0731-d549-4a19-a15b-e1c9f01172b4 WaitActive WaitActive WaitActive
22984 Sep 22 23:21:38.197 INFO Current flush_numbers [0..12]: [0, 0]
22985 Sep 22 23:21:38.198 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
22986 Sep 22 23:21:38.198 INFO Downstairs has completed Negotiation, task: proc
22987 Sep 22 23:21:38.198 DEBG up_ds_listen was notified
22988 Sep 22 23:21:38.198 DEBG up_ds_listen process 1002
22989 Sep 22 23:21:38.198 DEBG [A] ack job 1002:3, : downstairs
22990 Sep 22 23:21:38.198 DEBG up_ds_listen checked 1 jobs, back to waiting
22991 Sep 22 23:21:38.198 INFO Current flush_numbers [0..12]: [0, 0]
22992 Sep 22 23:21:38.198 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
22993 Sep 22 23:21:38.198 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
22994 Sep 22 23:21:38.198 INFO Downstairs has completed Negotiation, task: proc
22995 Sep 22 23:21:38.198 INFO Current flush_numbers [0..12]: [0, 0]
22996 Sep 22 23:21:38.198 DEBG up_ds_listen was notified
22997 Sep 22 23:21:38.198 DEBG up_ds_listen process 1001
22998 Sep 22 23:21:38.198 DEBG [A] ack job 1001:2, : downstairs
22999 Sep 22 23:21:38.198 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
23000 Sep 22 23:21:38.198 DEBG up_ds_listen checked 1 jobs, back to waiting
23001 Sep 22 23:21:38.198 INFO Downstairs has completed Negotiation, task: proc
23002 Sep 22 23:21:38.199 INFO [0] edae0731-d549-4a19-a15b-e1c9f01172b4 (d263b9b4-f13a-4d85-ac47-3dad10fd9290) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
23003 Sep 22 23:21:38.199 INFO [0] Transition from WaitActive to WaitQuorum
23004 Sep 22 23:21:38.199 WARN [0] new RM replaced this: None
23005 Sep 22 23:21:38.199 INFO [0] Starts reconcile loop
23006 Sep 22 23:21:38.199 INFO [1] edae0731-d549-4a19-a15b-e1c9f01172b4 (d263b9b4-f13a-4d85-ac47-3dad10fd9290) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
23007 Sep 22 23:21:38.199 INFO [1] Transition from WaitActive to WaitQuorum
23008 Sep 22 23:21:38.199 WARN [1] new RM replaced this: None
23009 Sep 22 23:21:38.199 INFO [1] Starts reconcile loop
23010 Sep 22 23:21:38.199 INFO [2] edae0731-d549-4a19-a15b-e1c9f01172b4 (d263b9b4-f13a-4d85-ac47-3dad10fd9290) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
23011 Sep 22 23:21:38.199 INFO [2] Transition from WaitActive to WaitQuorum
23012 Sep 22 23:21:38.199 WARN [2] new RM replaced this: None
23013 Sep 22 23:21:38.199 INFO [2] Starts reconcile loop
23014 Sep 22 23:21:38.199 INFO [0] 127.0.0.1:39258 task reports connection:true
23015 Sep 22 23:21:38.199 DEBG [2] Read already AckReady 1001, : downstairs
23016 Sep 22 23:21:38.199 INFO edae0731-d549-4a19-a15b-e1c9f01172b4 WaitQuorum WaitQuorum WaitQuorum
23017 Sep 22 23:21:38.199 INFO [0]R flush_numbers: [0, 0]
23018 Sep 22 23:21:38.199 INFO [0]R generation: [0, 0]
23019 Sep 22 23:21:38.199 DEBG up_ds_listen was notified
23020 Sep 22 23:21:38.199 INFO [0]R dirty: [false, false]
23021 Sep 22 23:21:38.199 INFO [1]R flush_numbers: [0, 0]
23022 Sep 22 23:21:38.199 DEBG up_ds_listen process 1001
23023 Sep 22 23:21:38.199 INFO [1]R generation: [0, 0]
23024 Sep 22 23:21:38.199 DEBG [A] ack job 1001:2, : downstairs
23025 Sep 22 23:21:38.199 INFO [1]R dirty: [false, false]
23026 Sep 22 23:21:38.199 DEBG Write :1002 deps:[JobId(1001), JobId(1000)] res:true
23027 Sep 22 23:21:38.199 INFO [2]R flush_numbers: [0, 0]
23028 Sep 22 23:21:38.199 INFO [2]R generation: [0, 0]
23029 Sep 22 23:21:38.199 INFO [2]R dirty: [false, false]
23030 Sep 22 23:21:38.199 INFO Max found gen is 1
23031 Sep 22 23:21:38.199 INFO Generation requested: 1 >= found:1
23032 Sep 22 23:21:38.199 INFO Next flush: 1
23033 Sep 22 23:21:38.199 INFO All extents match
23034 Sep 22 23:21:38.199 INFO No downstairs repair required
23035 Sep 22 23:21:38.199 INFO No initial repair work was required
23036 Sep 22 23:21:38.199 INFO Set Downstairs and Upstairs active
23037 Sep 22 23:21:38.199 INFO edae0731-d549-4a19-a15b-e1c9f01172b4 is now active with session: d263b9b4-f13a-4d85-ac47-3dad10fd9290
23038 Sep 22 23:21:38.199 INFO edae0731-d549-4a19-a15b-e1c9f01172b4 Set Active after no repair
23039 Sep 22 23:21:38.199 INFO Notify all downstairs, region set compare is done.
23040 Sep 22 23:21:38.200 INFO Set check for repair
23041 Sep 22 23:21:38.200 DEBG Write :1002 deps:[JobId(1001), JobId(1000)] res:true
23042 Sep 22 23:21:38.200 INFO [1] 127.0.0.1:63407 task reports connection:true
23043 Sep 22 23:21:38.200 INFO edae0731-d549-4a19-a15b-e1c9f01172b4 Active Active Active
23044 Sep 22 23:21:38.200 INFO Set check for repair
23045 Sep 22 23:21:38.200 INFO [2] 127.0.0.1:42114 task reports connection:true
23046 Sep 22 23:21:38.200 INFO edae0731-d549-4a19-a15b-e1c9f01172b4 Active Active Active
23047 Sep 22 23:21:38.200 INFO Set check for repair
23048 Sep 22 23:21:38.200 DEBG up_ds_listen checked 1 jobs, back to waiting
23049 Sep 22 23:21:38.200 INFO [0] received reconcile message
23050 Sep 22 23:21:38.200 DEBG Write :1002 deps:[JobId(1001), JobId(1000)] res:true
23051 Sep 22 23:21:38.200 INFO [0] All repairs completed, exit
23052 Sep 22 23:21:38.200 INFO [0] Starts cmd_loop
23053 Sep 22 23:21:38.200 INFO [1] received reconcile message
23054 Sep 22 23:21:38.200 INFO [1] All repairs completed, exit
23055 Sep 22 23:21:38.200 INFO [1] Starts cmd_loop
23056 Sep 22 23:21:38.200 INFO [2] received reconcile message
23057 Sep 22 23:21:38.200 INFO [2] All repairs completed, exit
23058 Sep 22 23:21:38.200 INFO [2] Starts cmd_loop
23059 The guest has finished waiting for activation
23060 Sep 22 23:21:38.201 DEBG IO Write 1000 has deps []
23061 Sep 22 23:21:38.203 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
23062 Sep 22 23:21:38.203 DEBG IO Write 1001 has deps [JobId(1000)]
23063 Sep 22 23:21:38.203 DEBG up_ds_listen was notified
23064 Sep 22 23:21:38.203 DEBG up_ds_listen process 1000
23065 Sep 22 23:21:38.203 DEBG [A] ack job 1000:1, : downstairs
23066 Sep 22 23:21:38.203 DEBG up_ds_listen checked 1 jobs, back to waiting
23067 Sep 22 23:21:38.203 DEBG up_ds_listen was notified
23068 Sep 22 23:21:38.203 DEBG up_ds_listen process 1001
23069 Sep 22 23:21:38.203 DEBG [A] ack job 1001:2, : downstairs
23070 Sep 22 23:21:38.203 DEBG up_ds_listen checked 1 jobs, back to waiting
23071 Sep 22 23:21:38.204 DEBG IO Write 1000 has deps []
23072 Sep 22 23:21:38.206 DEBG up_ds_listen was notified
23073 Sep 22 23:21:38.206 DEBG up_ds_listen process 1000
23074 Sep 22 23:21:38.206 DEBG [A] ack job 1000:1, : downstairs
23075 Sep 22 23:21:38.206 DEBG up_ds_listen checked 1 jobs, back to waiting
23076 Sep 22 23:21:38.206 DEBG IO Read 1001 has deps [JobId(1000)]
23077 Sep 22 23:21:38.206 DEBG Write :1001 deps:[JobId(1000)] res:true
23078 Sep 22 23:21:38.206 DEBG up_ds_listen was notified
23079 Sep 22 23:21:38.207 DEBG up_ds_listen process 1002
23080 Sep 22 23:21:38.207 DEBG [A] ack job 1002:3, : downstairs
23081 Sep 22 23:21:38.207 DEBG up_ds_listen checked 1 jobs, back to waiting
23082 Sep 22 23:21:38.207 DEBG Read :1001 deps:[JobId(1000)] res:true
23083 Sep 22 23:21:38.207 DEBG Write :1001 deps:[JobId(1000)] res:true
23084 Sep 22 23:21:38.207 INFO current number of open files limit 65536 is already the maximum
23085 Sep 22 23:21:38.207 DEBG Read :1001 deps:[JobId(1000)] res:true
23086 Sep 22 23:21:38.207 INFO Created new region file "/tmp/downstairs-XotxiwRu/region.json"
23087 Sep 22 23:21:38.207 DEBG Read :1001 deps:[JobId(1000)] res:true
23088 Sep 22 23:21:38.208 DEBG Write :1001 deps:[JobId(1000)] res:true
23089 Sep 22 23:21:38.208 DEBG IO Read 1003 has deps [JobId(1002), JobId(1001), JobId(1000)]
23090 Sep 22 23:21:38.208 DEBG [0] Read AckReady 1001, : downstairs
23091 Sep 22 23:21:38.208 DEBG [1] Read already AckReady 1001, : downstairs
23092 Sep 22 23:21:38.209 DEBG [2] Read already AckReady 1001, : downstairs
23093 Sep 22 23:21:38.209 DEBG up_ds_listen was notified
23094 Sep 22 23:21:38.209 DEBG up_ds_listen process 1001
23095 Sep 22 23:21:38.209 DEBG [A] ack job 1001:2, : downstairs
23096 Sep 22 23:21:38.209 DEBG Read :1003 deps:[JobId(1002), JobId(1001), JobId(1000)] res:true
23097 Sep 22 23:21:38.209 DEBG up_ds_listen checked 1 jobs, back to waiting
23098 Sep 22 23:21:38.209 DEBG IO Read 1001 has deps [JobId(1000)]
23099 Sep 22 23:21:38.210 DEBG Read :1003 deps:[JobId(1002), JobId(1001), JobId(1000)] res:true
23100 Sep 22 23:21:38.210 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
23101 Sep 22 23:21:38.210 DEBG Read :1003 deps:[JobId(1002), JobId(1001), JobId(1000)] res:true
23102 Sep 22 23:21:38.210 DEBG Read :1001 deps:[JobId(1000)] res:true
23103 Sep 22 23:21:38.210 DEBG Read :1001 deps:[JobId(1000)] res:true
23104 Sep 22 23:21:38.210 DEBG Read :1001 deps:[JobId(1000)] res:true
23105 Sep 22 23:21:38.211 DEBG [0] Read AckReady 1001, : downstairs
23106 Sep 22 23:21:38.212 DEBG [1] Read already AckReady 1001, : downstairs
23107 Sep 22 23:21:38.212 DEBG [2] Read already AckReady 1001, : downstairs
23108 Sep 22 23:21:38.212 DEBG up_ds_listen was notified
23109 Sep 22 23:21:38.212 DEBG up_ds_listen process 1001
23110 Sep 22 23:21:38.212 DEBG [A] ack job 1001:2, : downstairs
23111 Sep 22 23:21:38.212 DEBG up_ds_listen checked 1 jobs, back to waiting
23112 Sep 22 23:21:38.213 DEBG up_ds_listen was notified
23113 Sep 22 23:21:38.213 DEBG up_ds_listen process 1002
23114 Sep 22 23:21:38.213 DEBG [A] ack job 1002:3, : downstairs
23115 Sep 22 23:21:38.213 DEBG up_ds_listen checked 1 jobs, back to waiting
23116 Sep 22 23:21:38.213 DEBG IO Read 1003 has deps [JobId(1002), JobId(1000)]
23117 Sep 22 23:21:38.214 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23118 Sep 22 23:21:38.215 DEBG [0] Read AckReady 1003, : downstairs
23119 Sep 22 23:21:38.215 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23120 Sep 22 23:21:38.215 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
23121 Sep 22 23:21:38.215 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23122 Sep 22 23:21:38.218 DEBG [1] Read already AckReady 1003, : downstairs
23123 Sep 22 23:21:38.220 DEBG [0] Read AckReady 1003, : downstairs
23124 Sep 22 23:21:38.220 DEBG up_ds_listen was notified
23125 Sep 22 23:21:38.220 DEBG up_ds_listen process 1002
23126 Sep 22 23:21:38.220 DEBG [A] ack job 1002:3, : downstairs
23127 Sep 22 23:21:38.220 DEBG up_ds_listen checked 1 jobs, back to waiting
23128 Sep 22 23:21:38.221 DEBG [2] Read already AckReady 1003, : downstairs
23129 Sep 22 23:21:38.221 DEBG up_ds_listen was notified
23130 Sep 22 23:21:38.221 DEBG up_ds_listen process 1003
23131 Sep 22 23:21:38.221 DEBG [A] ack job 1003:4, : downstairs
23132 Sep 22 23:21:38.221 DEBG up_ds_listen checked 1 jobs, back to waiting
23133 Sep 22 23:21:38.222 DEBG IO Read 1002 has deps [JobId(1001), JobId(1000)]
23134 Sep 22 23:21:38.222 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
23135 Sep 22 23:21:38.223 DEBG [1] Read already AckReady 1003, : downstairs
23136 Sep 22 23:21:38.223 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
23137 Sep 22 23:21:38.223 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
23138 Sep 22 23:21:38.223 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
23139 Sep 22 23:21:38.226 DEBG [2] Read already AckReady 1003, : downstairs
23140 Sep 22 23:21:38.226 DEBG up_ds_listen was notified
23141 Sep 22 23:21:38.226 DEBG up_ds_listen process 1003
23142 Sep 22 23:21:38.226 DEBG [A] ack job 1003:4, : downstairs
23143 Sep 22 23:21:38.226 DEBG up_ds_listen checked 1 jobs, back to waiting
23144 Sep 22 23:21:38.227 DEBG IO Read 1003 has deps [JobId(1002), JobId(1000)]
23145 Sep 22 23:21:38.228 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23146 Sep 22 23:21:38.228 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23147 Sep 22 23:21:38.228 DEBG [0] Read AckReady 1002, : downstairs
23148 Sep 22 23:21:38.228 DEBG up_ds_listen was notified
23149 Sep 22 23:21:38.228 DEBG up_ds_listen process 1002
23150 Sep 22 23:21:38.228 DEBG [A] ack job 1002:3, : downstairs
23151 Sep 22 23:21:38.228 DEBG up_ds_listen checked 1 jobs, back to waiting
23152 Sep 22 23:21:38.229 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23153 Sep 22 23:21:38.229 DEBG IO Read 1003 has deps [JobId(1002), JobId(1000)]
23154 Sep 22 23:21:38.230 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23155 Sep 22 23:21:38.230 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23156 Sep 22 23:21:38.231 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23157 Sep 22 23:21:38.231 DEBG [1] Read already AckReady 1002, : downstairs
23158 Sep 22 23:21:38.233 DEBG [0] Read AckReady 1003, : downstairs
23159 Sep 22 23:21:38.234 DEBG [2] Read already AckReady 1002, : downstairs
23160 Sep 22 23:21:38.234 DEBG up_ds_listen was notified
23161 Sep 22 23:21:38.234 DEBG up_ds_listen process 1002
23162 Sep 22 23:21:38.234 DEBG [A] ack job 1002:3, : downstairs
23163 Sep 22 23:21:38.235 DEBG up_ds_listen checked 1 jobs, back to waiting
23164 Sep 22 23:21:38.235 DEBG [0] Read AckReady 1003, : downstairs
23165 Sep 22 23:21:38.236 DEBG [1] Read already AckReady 1003, : downstairs
23166 Sep 22 23:21:38.238 DEBG [1] Read already AckReady 1003, : downstairs
23167 Sep 22 23:21:38.239 DEBG [2] Read already AckReady 1003, : downstairs
23168 Sep 22 23:21:38.239 DEBG up_ds_listen was notified
23169 Sep 22 23:21:38.239 DEBG up_ds_listen process 1003
23170 Sep 22 23:21:38.239 DEBG [A] ack job 1003:4, : downstairs
23171 Sep 22 23:21:38.239 DEBG up_ds_listen checked 1 jobs, back to waiting
23172 Sep 22 23:21:38.240 DEBG [2] Read already AckReady 1003, : downstairs
23173 Sep 22 23:21:38.240 DEBG up_ds_listen was notified
23174 Sep 22 23:21:38.240 DEBG up_ds_listen process 1003
23175 Sep 22 23:21:38.240 DEBG [A] ack job 1003:4, : downstairs
23176 Sep 22 23:21:38.240 INFO listening on 127.0.0.1:0, task: main
23177 Sep 22 23:21:38.240 INFO listening on 127.0.0.1:0, task: main
23178 Sep 22 23:21:38.240 INFO listening on 127.0.0.1:0, task: main
23179 Sep 22 23:21:38.241 DEBG up_ds_listen checked 1 jobs, back to waiting
23180 Sep 22 23:21:38.241 DEBG IO Read 1003 has deps [JobId(1002), JobId(1000)]
231812023-09-22T23:21:38.241ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:51585 remote_addr = 127.0.0.1:63359
23182 Sep 22 23:21:38.242 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23183 Sep 22 23:21:38.243 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23184 Sep 22 23:21:38.243 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23185 test test::integration_test_volume_write_unwritten_subvols_3 ... ok
231862023-09-22T23:21:38.243ZINFOcrucible-pantry (datafile): no entry exists for volume 16548ffc-77b2-48b7-a37b-cfc57a4b3680, constructing...
23187 Sep 22 23:21:38.243 INFO current number of open files limit 65536 is already the maximum
231882023-09-22T23:21:38.243ZINFOcrucible-pantry (datafile): Upstairs starts
231892023-09-22T23:21:38.244ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
231902023-09-22T23:21:38.244ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
23191 Sep 22 23:21:38.244 INFO Created new region file "/tmp/downstairs-sxN8iMqh/region.json"
231922023-09-22T23:21:38.244ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: d17b320f-b32d-4e32-93ab-fda757545dc7
231932023-09-22T23:21:38.244ZINFOcrucible-pantry (datafile): Crucible d17b320f-b32d-4e32-93ab-fda757545dc7 has session id: bead0ee0-d667-4905-9676-91a0e4e103e0
231942023-09-22T23:21:38.244ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:39369 looper = 0
231952023-09-22T23:21:38.244ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:55268 looper = 1
231962023-09-22T23:21:38.244ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:37306 looper = 2
231972023-09-22T23:21:38.244ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
231982023-09-22T23:21:38.244ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
231992023-09-22T23:21:38.244ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
232002023-09-22T23:21:38.244ZINFOcrucible-pantry (datafile): volume 16548ffc-77b2-48b7-a37b-cfc57a4b3680 constructed ok
23201 The guest has requested activation
232022023-09-22T23:21:38.245ZINFOcrucible-pantry (datafile): d17b320f-b32d-4e32-93ab-fda757545dc7 active request set
232032023-09-22T23:21:38.245ZINFOcrucible-pantry (datafile): [0] d17b320f-b32d-4e32-93ab-fda757545dc7 looper connected looper = 0
232042023-09-22T23:21:38.245ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:39369 in state New
232052023-09-22T23:21:38.245ZINFOcrucible-pantry (datafile): [1] d17b320f-b32d-4e32-93ab-fda757545dc7 looper connected looper = 1
232062023-09-22T23:21:38.245ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:55268 in state New
232072023-09-22T23:21:38.245ZINFOcrucible-pantry (datafile): [2] d17b320f-b32d-4e32-93ab-fda757545dc7 looper connected looper = 2
232082023-09-22T23:21:38.245ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:37306 in state New
23209 Sep 22 23:21:38.245 INFO accepted connection from 127.0.0.1:39935, task: main
23210 Sep 22 23:21:38.245 INFO accepted connection from 127.0.0.1:35109, task: main
23211 Sep 22 23:21:38.245 INFO accepted connection from 127.0.0.1:34869, task: main
23212 Sep 22 23:21:38.246 INFO Connection request from d17b320f-b32d-4e32-93ab-fda757545dc7 with version 4, task: proc
23213 Sep 22 23:21:38.246 INFO upstairs UpstairsConnection { upstairs_id: d17b320f-b32d-4e32-93ab-fda757545dc7, session_id: c6dc54f3-5d65-4987-826a-9776dd72d00d, gen: 1 } connected, version 4, task: proc
23214 Sep 22 23:21:38.246 INFO Connection request from d17b320f-b32d-4e32-93ab-fda757545dc7 with version 4, task: proc
23215 Sep 22 23:21:38.246 INFO upstairs UpstairsConnection { upstairs_id: d17b320f-b32d-4e32-93ab-fda757545dc7, session_id: c6dc54f3-5d65-4987-826a-9776dd72d00d, gen: 1 } connected, version 4, task: proc
23216 Sep 22 23:21:38.246 INFO Connection request from d17b320f-b32d-4e32-93ab-fda757545dc7 with version 4, task: proc
23217 Sep 22 23:21:38.246 INFO upstairs UpstairsConnection { upstairs_id: d17b320f-b32d-4e32-93ab-fda757545dc7, session_id: c6dc54f3-5d65-4987-826a-9776dd72d00d, gen: 1 } connected, version 4, task: proc
232182023-09-22T23:21:38.246ZINFOcrucible-pantry (datafile): [0] d17b320f-b32d-4e32-93ab-fda757545dc7 (c6dc54f3-5d65-4987-826a-9776dd72d00d) New New New ds_transition to WaitActive
232192023-09-22T23:21:38.246ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
232202023-09-22T23:21:38.246ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session c6dc54f3-5d65-4987-826a-9776dd72d00d
232212023-09-22T23:21:38.246ZINFOcrucible-pantry (datafile): [1] d17b320f-b32d-4e32-93ab-fda757545dc7 (c6dc54f3-5d65-4987-826a-9776dd72d00d) WaitActive New New ds_transition to WaitActive
232222023-09-22T23:21:38.246ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
232232023-09-22T23:21:38.246ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session c6dc54f3-5d65-4987-826a-9776dd72d00d
23224 Sep 22 23:21:38.246 INFO UpstairsConnection { upstairs_id: d17b320f-b32d-4e32-93ab-fda757545dc7, session_id: c6dc54f3-5d65-4987-826a-9776dd72d00d, gen: 1 } is now active (read-write)
232252023-09-22T23:21:38.246ZINFOcrucible-pantry (datafile): [2] d17b320f-b32d-4e32-93ab-fda757545dc7 (c6dc54f3-5d65-4987-826a-9776dd72d00d) WaitActive WaitActive New ds_transition to WaitActive
232262023-09-22T23:21:38.246ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
232272023-09-22T23:21:38.246ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session c6dc54f3-5d65-4987-826a-9776dd72d00d
23228 Sep 22 23:21:38.246 INFO UpstairsConnection { upstairs_id: d17b320f-b32d-4e32-93ab-fda757545dc7, session_id: c6dc54f3-5d65-4987-826a-9776dd72d00d, gen: 1 } is now active (read-write)
23229 Sep 22 23:21:38.247 INFO UpstairsConnection { upstairs_id: d17b320f-b32d-4e32-93ab-fda757545dc7, session_id: c6dc54f3-5d65-4987-826a-9776dd72d00d, gen: 1 } is now active (read-write)
23230 test test::integration_test_volume_write_unwritten_subvols ... ok
23231 Sep 22 23:21:38.247 INFO current number of open files limit 65536 is already the maximum
23232 Sep 22 23:21:38.247 DEBG [0] Read AckReady 1003, : downstairs
23233 Sep 22 23:21:38.247 INFO Created new region file "/tmp/downstairs-lRuGJNxo/region.json"
232342023-09-22T23:21:38.247ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:39369 has UUID 07ed70f5-1d11-4ea6-9c11-5ff16ec9fcb3
232352023-09-22T23:21:38.247ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 07ed70f5-1d11-4ea6-9c11-5ff16ec9fcb3, encrypted: true, database_read_version: 1, database_write_version: 1 }
232362023-09-22T23:21:38.247ZINFOcrucible-pantry (datafile): d17b320f-b32d-4e32-93ab-fda757545dc7 WaitActive WaitActive WaitActive
232372023-09-22T23:21:38.247ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:55268 has UUID fbc1ff7a-5e2f-4a0a-a554-498b43e005a3
232382023-09-22T23:21:38.247ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: fbc1ff7a-5e2f-4a0a-a554-498b43e005a3, encrypted: true, database_read_version: 1, database_write_version: 1 }
232392023-09-22T23:21:38.247ZINFOcrucible-pantry (datafile): d17b320f-b32d-4e32-93ab-fda757545dc7 WaitActive WaitActive WaitActive
232402023-09-22T23:21:38.247ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:37306 has UUID 89e24c32-e4e6-442a-a2a4-07d6620fd2c7
232412023-09-22T23:21:38.247ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 89e24c32-e4e6-442a-a2a4-07d6620fd2c7, encrypted: true, database_read_version: 1, database_write_version: 1 }
232422023-09-22T23:21:38.248ZINFOcrucible-pantry (datafile): d17b320f-b32d-4e32-93ab-fda757545dc7 WaitActive WaitActive WaitActive
23243 Sep 22 23:21:38.248 INFO Current flush_numbers [0..12]: [0, 0]
23244 Sep 22 23:21:38.248 INFO Downstairs has completed Negotiation, task: proc
23245 Sep 22 23:21:38.248 INFO Current flush_numbers [0..12]: [0, 0]
23246 Sep 22 23:21:38.248 INFO Downstairs has completed Negotiation, task: proc
23247 Sep 22 23:21:38.249 INFO Current flush_numbers [0..12]: [0, 0]
23248 Sep 22 23:21:38.249 DEBG [1] Read already AckReady 1003, : downstairs
23249 Sep 22 23:21:38.249 INFO Downstairs has completed Negotiation, task: proc
232502023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [0] d17b320f-b32d-4e32-93ab-fda757545dc7 (c6dc54f3-5d65-4987-826a-9776dd72d00d) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
232512023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
232522023-09-22T23:21:38.250ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
232532023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
232542023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [1] d17b320f-b32d-4e32-93ab-fda757545dc7 (c6dc54f3-5d65-4987-826a-9776dd72d00d) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
232552023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
232562023-09-22T23:21:38.250ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
232572023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
232582023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [2] d17b320f-b32d-4e32-93ab-fda757545dc7 (c6dc54f3-5d65-4987-826a-9776dd72d00d) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
23259 Sep 22 23:21:38.250 INFO current number of open files limit 65536 is already the maximum
232602023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
23261 Sep 22 23:21:38.250 INFO Opened existing region file "/tmp/downstairs-lRuGJNxo/region.json"
23262 Sep 22 23:21:38.250 INFO Database read version 1
232632023-09-22T23:21:38.250ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
23264 The guest has finished waiting for activation
232652023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
232662023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:39369 task reports connection:true
23267 Sep 22 23:21:38.250 INFO Database write version 1
232682023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): d17b320f-b32d-4e32-93ab-fda757545dc7 WaitQuorum WaitQuorum WaitQuorum
232692023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [0]R flush_numbers: [0, 0]
232702023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [0]R generation: [0, 0]
232712023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [0]R dirty: [false, false]
232722023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [1]R flush_numbers: [0, 0]
232732023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [1]R generation: [0, 0]
232742023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [1]R dirty: [false, false]
232752023-09-22T23:21:38.250ZINFOcrucible-pantry (datafile): [2]R flush_numbers: [0, 0]
232762023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): [2]R generation: [0, 0]
232772023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): [2]R dirty: [false, false]
232782023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): Max found gen is 1
232792023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
232802023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): Next flush: 1
232812023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): All extents match
23282 Sep 22 23:21:38.251 DEBG [2] Read already AckReady 1003, : downstairs
232832023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): No downstairs repair required
232842023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): No initial repair work was required
23285 Sep 22 23:21:38.251 DEBG up_ds_listen was notified
23286 Sep 22 23:21:38.251 DEBG up_ds_listen process 1003
232872023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
23288 Sep 22 23:21:38.251 DEBG [A] ack job 1003:4, : downstairs
232892023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): d17b320f-b32d-4e32-93ab-fda757545dc7 is now active with session: c6dc54f3-5d65-4987-826a-9776dd72d00d
232902023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): d17b320f-b32d-4e32-93ab-fda757545dc7 Set Active after no repair
232912023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
232922023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): Set check for repair
232932023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:55268 task reports connection:true
232942023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): d17b320f-b32d-4e32-93ab-fda757545dc7 Active Active Active
232952023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): Set check for repair
232962023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:37306 task reports connection:true
23297 Sep 22 23:21:38.251 DEBG up_ds_listen checked 1 jobs, back to waiting
232982023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): d17b320f-b32d-4e32-93ab-fda757545dc7 Active Active Active
232992023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): Set check for repair
233002023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): [0] received reconcile message
23301 Sep 22 23:21:38.251 INFO UUID: c25278bf-5b9e-4380-abf6-dfd9701dce27
23302 Sep 22 23:21:38.251 INFO Blocks per extent:5 Total Extents: 2
233032023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
233042023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
23305 Sep 22 23:21:38.251 INFO Crucible Version: Crucible Version: 0.0.1
23306 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
23307 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
23308 rustc: 1.70.0 stable x86_64-unknown-illumos
23309 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
23310 Sep 22 23:21:38.251 INFO Upstairs <-> Downstairs Message Version: 4, task: main
233112023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): [1] received reconcile message
23312 Sep 22 23:21:38.251 INFO Using address: 127.0.0.1:33527, task: main
233132023-09-22T23:21:38.251ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
233142023-09-22T23:21:38.252ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
233152023-09-22T23:21:38.252ZINFOcrucible-pantry (datafile): [2] received reconcile message
233162023-09-22T23:21:38.252ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
233172023-09-22T23:21:38.252ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
233182023-09-22T23:21:38.252ZINFOcrucible-pantry (datafile): volume 16548ffc-77b2-48b7-a37b-cfc57a4b3680 activated ok
233192023-09-22T23:21:38.252ZINFOcrucible-pantry (datafile): volume 16548ffc-77b2-48b7-a37b-cfc57a4b3680 constructed and inserted ok
233202023-09-22T23:21:38.252ZINFOcrucible-pantry (dropshot): request completed latency_us = 9256 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = be303c4e-f00d-4251-8e01-091f9f93f1f2 response_code = 200 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680
23321 Sep 22 23:21:38.252 INFO Repair listens on 127.0.0.1:0, task: repair
23322 Sep 22 23:21:38.252 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58793, task: repair
23323 Sep 22 23:21:38.252 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58793, task: repair
23324 Sep 22 23:21:38.252 INFO listening, local_addr: 127.0.0.1:58793, task: repair
23325 Sep 22 23:21:38.252 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58793, task: repair
23326 Sep 22 23:21:38.252 INFO Using repair address: 127.0.0.1:58793, task: main
23327 Sep 22 23:21:38.252 INFO No SSL acceptor configured, task: main
23328 Sep 22 23:21:38.253 INFO current number of open files limit 65536 is already the maximum
23329 Sep 22 23:21:38.253 INFO Created new region file "/tmp/downstairs-ZkSPwZxy/region.json"
233302023-09-22T23:21:38.253ZINFOcrucible-pantry (dropshot): request completed latency_us = 1587 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = 56b4dd93-c533-4f0b-8c7b-8238348be4a1 response_code = 204 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_write
23331 Sep 22 23:21:38.255 DEBG Write :1000 deps:[] res:true
23332 Sep 22 23:21:38.255 DEBG Write :1000 deps:[] res:true
23333 Sep 22 23:21:38.256 DEBG Write :1000 deps:[] res:true
233342023-09-22T23:21:38.258ZINFOcrucible-pantry (dropshot): request completed latency_us = 1360 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = cd39d46d-e128-42ea-8288-99336e3a80ef response_code = 204 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_write
23335 Sep 22 23:21:38.258 INFO current number of open files limit 65536 is already the maximum
23336 Sep 22 23:21:38.258 INFO Opened existing region file "/tmp/downstairs-ZkSPwZxy/region.json"
23337 Sep 22 23:21:38.258 INFO Database read version 1
23338 Sep 22 23:21:38.259 INFO Database write version 1
23339 Sep 22 23:21:38.259 DEBG Write :1001 deps:[] res:true
23340 test test::integration_test_volume_write_unwritten_subvols_sparse ... ok
23341 Sep 22 23:21:38.259 DEBG Write :1001 deps:[] res:true
23342 Sep 22 23:21:38.259 INFO current number of open files limit 65536 is already the maximum
23343 Sep 22 23:21:38.259 DEBG Write :1001 deps:[] res:true
23344 Sep 22 23:21:38.259 INFO Created new region file "/tmp/downstairs-9KQd6X6V/region.json"
23345 Sep 22 23:21:38.261 INFO UUID: 99808fa8-ed0d-4880-abf3-c77623f8cd97
23346 Sep 22 23:21:38.261 INFO Blocks per extent:5 Total Extents: 2
23347 Sep 22 23:21:38.261 INFO Crucible Version: Crucible Version: 0.0.1
23348 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
23349 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
23350 rustc: 1.70.0 stable x86_64-unknown-illumos
23351 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
23352 Sep 22 23:21:38.261 INFO Upstairs <-> Downstairs Message Version: 4, task: main
23353 Sep 22 23:21:38.261 INFO Using address: 127.0.0.1:59188, task: main
23354 Sep 22 23:21:38.261 INFO Repair listens on 127.0.0.1:0, task: repair
233552023-09-22T23:21:38.261ZINFOcrucible-pantry (dropshot): request completed latency_us = 1323 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = 451ed998-29e4-43cb-9662-7473e4fde8da response_code = 204 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_write
23356 Sep 22 23:21:38.261 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33494, task: repair
23357 Sep 22 23:21:38.261 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33494, task: repair
23358 Sep 22 23:21:38.261 INFO listening, local_addr: 127.0.0.1:33494, task: repair
23359 Sep 22 23:21:38.261 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33494, task: repair
23360 Sep 22 23:21:38.262 INFO Using repair address: 127.0.0.1:33494, task: main
23361 Sep 22 23:21:38.262 INFO No SSL acceptor configured, task: main
23362 Sep 22 23:21:38.262 INFO current number of open files limit 65536 is already the maximum
23363 Sep 22 23:21:38.262 DEBG Write :1002 deps:[] res:true
23364 Sep 22 23:21:38.262 INFO Created new region file "/tmp/downstairs-lX8N7uaO/region.json"
23365 Sep 22 23:21:38.263 DEBG Write :1002 deps:[] res:true
23366 Sep 22 23:21:38.263 DEBG Write :1002 deps:[] res:true
233672023-09-22T23:21:38.265ZINFOcrucible-pantry (dropshot): request completed latency_us = 1449 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = 7855f821-41fa-4f86-a042-757c2b9ce7b4 response_code = 204 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_write
23368 Sep 22 23:21:38.266 DEBG Write :1003 deps:[] res:true
23369 Sep 22 23:21:38.266 DEBG Write :1003 deps:[] res:true
23370 Sep 22 23:21:38.266 DEBG Write :1003 deps:[] res:true
23371 Sep 22 23:21:38.268 INFO current number of open files limit 65536 is already the maximum
23372 Sep 22 23:21:38.268 INFO Opened existing region file "/tmp/downstairs-lX8N7uaO/region.json"
23373 Sep 22 23:21:38.268 INFO Database read version 1
23374 Sep 22 23:21:38.268 INFO Database write version 1
233752023-09-22T23:21:38.268ZINFOcrucible-pantry (dropshot): request completed latency_us = 1153 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = 3b06b483-774a-4581-b001-3f530562d342 response_code = 204 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_write
23376 Sep 22 23:21:38.269 DEBG Write :1004 deps:[] res:true
23377 Sep 22 23:21:38.269 DEBG Write :1004 deps:[] res:true
23378 Sep 22 23:21:38.270 DEBG Write :1004 deps:[] res:true
23379 Sep 22 23:21:38.272 INFO UUID: 651c2c7a-b2ed-48c3-b57d-44ab91a6f440
23380 Sep 22 23:21:38.272 INFO Blocks per extent:5 Total Extents: 2
233812023-09-22T23:21:38.272ZINFOcrucible-pantry (dropshot): request completed latency_us = 1341 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = b2d950b3-9eee-4cba-80c0-bff75bd19d02 response_code = 204 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_write
23382 Sep 22 23:21:38.272 INFO Crucible Version: Crucible Version: 0.0.1
23383 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
23384 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
23385 rustc: 1.70.0 stable x86_64-unknown-illumos
23386 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
23387 Sep 22 23:21:38.272 INFO Upstairs <-> Downstairs Message Version: 4, task: main
23388 Sep 22 23:21:38.272 INFO Using address: 127.0.0.1:54900, task: main
23389 Sep 22 23:21:38.272 INFO Repair listens on 127.0.0.1:0, task: repair
23390 Sep 22 23:21:38.272 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:35114, task: repair
23391 Sep 22 23:21:38.272 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:35114, task: repair
23392 Sep 22 23:21:38.272 INFO listening, local_addr: 127.0.0.1:35114, task: repair
23393 Sep 22 23:21:38.272 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:35114, task: repair
23394 Sep 22 23:21:38.272 INFO Using repair address: 127.0.0.1:35114, task: main
23395 Sep 22 23:21:38.272 INFO No SSL acceptor configured, task: main
23396 Sep 22 23:21:38.273 DEBG Write :1005 deps:[] res:true
23397 note: configured to log to "/dev/stdout"
23398 Sep 22 23:21:38.273 DEBG Write :1005 deps:[] res:true
23399 Sep 22 23:21:38.274 DEBG Write :1005 deps:[] res:true
234002023-09-22T23:21:38.274ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:55883
234012023-09-22T23:21:38.274ZINFOcrucible-pantry: listen IP: 127.0.0.1:55883
234022023-09-22T23:21:38.276ZINFOcrucible-pantry (dropshot): request completed latency_us = 1451 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = 63600948-3668-44fc-8dc9-d0c64f1a5c93 response_code = 204 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_write
23403 Sep 22 23:21:38.277 DEBG Write :1006 deps:[] res:true
23404 Sep 22 23:21:38.277 DEBG Write :1006 deps:[] res:true
23405 Sep 22 23:21:38.277 DEBG Write :1006 deps:[] res:true
234062023-09-22T23:21:38.280ZINFOcrucible-pantry (dropshot): request completed latency_us = 1436 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = be829a86-e982-4672-8966-74ca3433b266 response_code = 204 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_write
23407 Sep 22 23:21:38.280 DEBG Write :1007 deps:[] res:true
23408 Sep 22 23:21:38.280 DEBG Write :1007 deps:[] res:true
23409 Sep 22 23:21:38.281 DEBG Write :1007 deps:[] res:true
234102023-09-22T23:21:38.283ZINFOcrucible-pantry (dropshot): request completed latency_us = 1417 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = 3d4340b2-0490-4162-a3ff-806301c41c2b response_code = 204 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_write
23411 Sep 22 23:21:38.284 DEBG Write :1008 deps:[] res:true
23412 Sep 22 23:21:38.284 DEBG Write :1008 deps:[] res:true
23413 Sep 22 23:21:38.284 DEBG Write :1008 deps:[] res:true
234142023-09-22T23:21:38.286ZINFOcrucible-pantry (dropshot): request completed latency_us = 1375 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = a1ea2512-259c-446d-8e01-3bb394cc5bb6 response_code = 204 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_write
23415 Sep 22 23:21:38.287 DEBG Write :1009 deps:[] res:true
23416 Sep 22 23:21:38.288 DEBG Write :1009 deps:[] res:true
23417 Sep 22 23:21:38.288 DEBG Write :1009 deps:[] res:true
23418 Sep 22 23:21:38.290 DEBG Read :1010 deps:[JobId(1000)] res:true
23419 Sep 22 23:21:38.291 DEBG Read :1010 deps:[JobId(1000)] res:true
23420 Sep 22 23:21:38.291 DEBG Read :1010 deps:[JobId(1000)] res:true
234212023-09-22T23:21:38.293ZINFOcrucible-pantry (dropshot): request completed latency_us = 3911 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = b5d5c091-ed3a-47ee-b6c8-ded69fbb01c1 response_code = 200 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_read
23422 Sep 22 23:21:38.294 DEBG Read :1011 deps:[JobId(1001)] res:true
23423 Sep 22 23:21:38.295 DEBG Read :1011 deps:[JobId(1001)] res:true
23424 Sep 22 23:21:38.295 DEBG Read :1011 deps:[JobId(1001)] res:true
234252023-09-22T23:21:38.296ZINFOcrucible-pantry (dropshot): request completed latency_us = 2562 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = 52c2828e-9b2a-487b-96ce-4ded00679350 response_code = 200 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_read
23426 Sep 22 23:21:38.298 DEBG Read :1012 deps:[JobId(1002)] res:true
23427 Sep 22 23:21:38.298 DEBG Read :1012 deps:[JobId(1002)] res:true
23428 Sep 22 23:21:38.298 DEBG Read :1012 deps:[JobId(1002)] res:true
234292023-09-22T23:21:38.299ZINFOcrucible-pantry (dropshot): request completed latency_us = 2530 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = 0b94ac49-1f6c-4b0e-aa0c-67cef59b9be8 response_code = 200 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_read
23430 Sep 22 23:21:38.301 DEBG Read :1013 deps:[JobId(1003)] res:true
23431 Sep 22 23:21:38.301 DEBG Read :1013 deps:[JobId(1003)] res:true
23432 Sep 22 23:21:38.302 DEBG Read :1013 deps:[JobId(1003)] res:true
234332023-09-22T23:21:38.303ZINFOcrucible-pantry (dropshot): request completed latency_us = 3511 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = a87c80a2-e124-422a-955a-81caa2c37315 response_code = 200 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_read
23434 Sep 22 23:21:38.305 DEBG Read :1014 deps:[JobId(1004)] res:true
23435 Sep 22 23:21:38.306 DEBG Read :1014 deps:[JobId(1004)] res:true
23436 Sep 22 23:21:38.306 DEBG Read :1014 deps:[JobId(1004)] res:true
234372023-09-22T23:21:38.308ZINFOcrucible-pantry (dropshot): request completed latency_us = 3400 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = 269ff15b-d9c8-451c-915a-4fd1718d5b5a response_code = 200 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_read
23438 Sep 22 23:21:38.310 DEBG Read :1015 deps:[JobId(1005)] res:true
23439 Sep 22 23:21:38.310 DEBG Read :1015 deps:[JobId(1005)] res:true
23440 Sep 22 23:21:38.310 DEBG Read :1015 deps:[JobId(1005)] res:true
234412023-09-22T23:21:38.312ZINFOcrucible-pantry (dropshot): request completed latency_us = 3600 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = aa7a1fc7-c999-4c03-b17e-4d27a531a8dd response_code = 200 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_read
23442 Sep 22 23:21:38.314 DEBG Read :1016 deps:[JobId(1006)] res:true
23443 Sep 22 23:21:38.314 DEBG Read :1016 deps:[JobId(1006)] res:true
23444 Sep 22 23:21:38.314 DEBG Read :1016 deps:[JobId(1006)] res:true
234452023-09-22T23:21:38.316ZINFOcrucible-pantry (dropshot): request completed latency_us = 3260 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = 62a9d1af-c42a-42d4-bf8a-80d715759cd9 response_code = 200 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_read
23446 Sep 22 23:21:38.318 DEBG Read :1017 deps:[JobId(1007)] res:true
23447 Sep 22 23:21:38.318 DEBG Read :1017 deps:[JobId(1007)] res:true
23448 Sep 22 23:21:38.318 DEBG Read :1017 deps:[JobId(1007)] res:true
234492023-09-22T23:21:38.319ZINFOcrucible-pantry (dropshot): request completed latency_us = 2854 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = 0c974305-b9bc-4b44-a358-65991b73ac64 response_code = 200 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_read
23450 Sep 22 23:21:38.321 DEBG Read :1018 deps:[JobId(1008)] res:true
23451 Sep 22 23:21:38.321 DEBG Read :1018 deps:[JobId(1008)] res:true
23452 Sep 22 23:21:38.321 DEBG Read :1018 deps:[JobId(1008)] res:true
234532023-09-22T23:21:38.323ZINFOcrucible-pantry (dropshot): request completed latency_us = 2524 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = b4b3215a-bb8a-4800-abac-5c796f2b91bb response_code = 200 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_read
23454 Sep 22 23:21:38.324 DEBG Read :1019 deps:[JobId(1009)] res:true
23455 Sep 22 23:21:38.324 DEBG Read :1019 deps:[JobId(1009)] res:true
23456 Sep 22 23:21:38.324 DEBG Read :1019 deps:[JobId(1009)] res:true
234572023-09-22T23:21:38.326ZINFOcrucible-pantry (dropshot): request completed latency_us = 2609 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = 73e0f1b6-e16d-4263-883f-2a541ea6960e response_code = 200 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_read
23458 Sep 22 23:21:38.328 DEBG Read :1020 deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
23459 Sep 22 23:21:38.328 DEBG Read :1020 deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
23460 Sep 22 23:21:38.329 DEBG Read :1020 deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
234612023-09-22T23:21:38.338ZINFOcrucible-pantry (dropshot): request completed latency_us = 11073 local_addr = 127.0.0.1:51585 method = POST remote_addr = 127.0.0.1:63359 req_id = 8905ea72-8884-4a07-9fa7-7341d81decfa response_code = 200 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680/bulk_read
234622023-09-22T23:21:38.338ZINFOcrucible-pantry (datafile): detach removing entry for volume 16548ffc-77b2-48b7-a37b-cfc57a4b3680
234632023-09-22T23:21:38.338ZINFOcrucible-pantry (datafile): detaching volume 16548ffc-77b2-48b7-a37b-cfc57a4b3680
23464 Sep 22 23:21:38.340 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
23465 Sep 22 23:21:38.340 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
23466 Sep 22 23:21:38.340 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
234672023-09-22T23:21:38.341ZINFOcrucible-pantry (datafile): Request to deactivate this guest
234682023-09-22T23:21:38.341ZINFOcrucible-pantry (datafile): d17b320f-b32d-4e32-93ab-fda757545dc7 set deactivating.
234692023-09-22T23:21:38.341ZINFOcrucible-pantry (dropshot): request completed latency_us = 3063 local_addr = 127.0.0.1:51585 method = DELETE remote_addr = 127.0.0.1:63359 req_id = eaec3b2c-ced2-4377-aebc-b28f63a247a2 response_code = 204 uri = /crucible/pantry/0/volume/16548ffc-77b2-48b7-a37b-cfc57a4b3680
23470 test test::test_pantry_bulk_read ... ok
23471 Sep 22 23:21:38.350 INFO current number of open files limit 65536 is already the maximum
23472 Sep 22 23:21:38.350 INFO Created new region file "/tmp/downstairs-BqJQGXV6/region.json"
23473 Sep 22 23:21:38.355 INFO current number of open files limit 65536 is already the maximum
23474 Sep 22 23:21:38.356 INFO Opened existing region file "/tmp/downstairs-BqJQGXV6/region.json"
23475 Sep 22 23:21:38.356 INFO Database read version 1
23476 Sep 22 23:21:38.356 INFO Database write version 1
23477 Sep 22 23:21:38.359 INFO UUID: ad667607-b05e-4427-9709-433732d1d5ab
23478 Sep 22 23:21:38.359 INFO Blocks per extent:5 Total Extents: 2
23479 Sep 22 23:21:38.359 INFO Crucible Version: Crucible Version: 0.0.1
23480 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
23481 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
23482 rustc: 1.70.0 stable x86_64-unknown-illumos
23483 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
23484 Sep 22 23:21:38.359 INFO Upstairs <-> Downstairs Message Version: 4, task: main
23485 Sep 22 23:21:38.359 INFO Using address: 127.0.0.1:65363, task: main
23486 Sep 22 23:21:38.359 INFO Repair listens on 127.0.0.1:0, task: repair
23487 Sep 22 23:21:38.359 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57934, task: repair
23488 Sep 22 23:21:38.359 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57934, task: repair
23489 Sep 22 23:21:38.359 INFO listening, local_addr: 127.0.0.1:57934, task: repair
23490 Sep 22 23:21:38.360 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57934, task: repair
23491 Sep 22 23:21:38.360 INFO Using repair address: 127.0.0.1:57934, task: main
23492 Sep 22 23:21:38.360 INFO No SSL acceptor configured, task: main
23493 Sep 22 23:21:38.360 INFO current number of open files limit 65536 is already the maximum
23494 Sep 22 23:21:38.360 INFO Created new region file "/tmp/downstairs-BpCJ2qOn/region.json"
23495 Sep 22 23:21:38.366 INFO current number of open files limit 65536 is already the maximum
23496 Sep 22 23:21:38.367 INFO Opened existing region file "/tmp/downstairs-BpCJ2qOn/region.json"
23497 Sep 22 23:21:38.367 INFO Database read version 1
23498 Sep 22 23:21:38.367 INFO Database write version 1
23499 Sep 22 23:21:38.369 INFO UUID: 995b5b97-67dd-43f0-b20a-d5b2c0c00924
23500 Sep 22 23:21:38.369 INFO Blocks per extent:5 Total Extents: 2
23501 Sep 22 23:21:38.369 INFO Crucible Version: Crucible Version: 0.0.1
23502 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
23503 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
23504 rustc: 1.70.0 stable x86_64-unknown-illumos
23505 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
23506 Sep 22 23:21:38.369 INFO Upstairs <-> Downstairs Message Version: 4, task: main
23507 Sep 22 23:21:38.369 INFO Using address: 127.0.0.1:63095, task: main
23508 Sep 22 23:21:38.369 INFO Repair listens on 127.0.0.1:0, task: repair
23509 Sep 22 23:21:38.370 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:37590, task: repair
23510 Sep 22 23:21:38.370 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:37590, task: repair
23511 Sep 22 23:21:38.370 INFO listening, local_addr: 127.0.0.1:37590, task: repair
23512 Sep 22 23:21:38.370 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:37590, task: repair
23513 Sep 22 23:21:38.370 INFO Using repair address: 127.0.0.1:37590, task: main
23514 Sep 22 23:21:38.370 INFO No SSL acceptor configured, task: main
23515 Sep 22 23:21:38.370 INFO current number of open files limit 65536 is already the maximum
23516 Sep 22 23:21:38.370 INFO Created new region file "/tmp/downstairs-iM5wyDOR/region.json"
23517 Sep 22 23:21:38.376 INFO listening on 127.0.0.1:0, task: main
23518 Sep 22 23:21:38.376 INFO listening on 127.0.0.1:0, task: main
23519 Sep 22 23:21:38.376 INFO listening on 127.0.0.1:0, task: main
23520 Sep 22 23:21:38.376 INFO current number of open files limit 65536 is already the maximum
23521 Sep 22 23:21:38.376 INFO Opened existing region file "/tmp/downstairs-iM5wyDOR/region.json"
23522 Sep 22 23:21:38.376 INFO Database read version 1
23523 Sep 22 23:21:38.376 INFO Database write version 1
235242023-09-22T23:21:38.376ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:55883 remote_addr = 127.0.0.1:43786
235252023-09-22T23:21:38.377ZINFOcrucible-pantry (datafile): no entry exists for volume 17fca985-4bd4-4289-96a4-634e1d483fcb, constructing...
235262023-09-22T23:21:38.377ZINFOcrucible-pantry (datafile): Upstairs starts
235272023-09-22T23:21:38.377ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
235282023-09-22T23:21:38.377ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
235292023-09-22T23:21:38.377ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: e5b4ada7-7239-4d69-8861-23d72de6320f
235302023-09-22T23:21:38.378ZINFOcrucible-pantry (datafile): Crucible e5b4ada7-7239-4d69-8861-23d72de6320f has session id: 53f0eb62-df50-4075-93bc-d1b8847e7671
235312023-09-22T23:21:38.378ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:33527 looper = 0
235322023-09-22T23:21:38.378ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:59188 looper = 1
235332023-09-22T23:21:38.378ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:54900 looper = 2
235342023-09-22T23:21:38.378ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
235352023-09-22T23:21:38.378ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
235362023-09-22T23:21:38.378ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
235372023-09-22T23:21:38.378ZINFOcrucible-pantry (datafile): volume 17fca985-4bd4-4289-96a4-634e1d483fcb constructed ok
23538 The guest has requested activation
235392023-09-22T23:21:38.378ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f active request set
23540 Sep 22 23:21:38.378 INFO accepted connection from 127.0.0.1:55430, task: main
23541 Sep 22 23:21:38.379 INFO accepted connection from 127.0.0.1:50291, task: main
23542 Sep 22 23:21:38.379 INFO accepted connection from 127.0.0.1:55513, task: main
235432023-09-22T23:21:38.379ZINFOcrucible-pantry (datafile): [1] e5b4ada7-7239-4d69-8861-23d72de6320f looper connected looper = 1
235442023-09-22T23:21:38.379ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:59188 in state New
235452023-09-22T23:21:38.379ZINFOcrucible-pantry (datafile): [2] e5b4ada7-7239-4d69-8861-23d72de6320f looper connected looper = 2
235462023-09-22T23:21:38.379ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:54900 in state New
235472023-09-22T23:21:38.379ZINFOcrucible-pantry (datafile): [0] e5b4ada7-7239-4d69-8861-23d72de6320f looper connected looper = 0
235482023-09-22T23:21:38.379ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:33527 in state New
23549 Sep 22 23:21:38.379 INFO Connection request from e5b4ada7-7239-4d69-8861-23d72de6320f with version 4, task: proc
23550 Sep 22 23:21:38.379 INFO upstairs UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 } connected, version 4, task: proc
23551 Sep 22 23:21:38.379 INFO Connection request from e5b4ada7-7239-4d69-8861-23d72de6320f with version 4, task: proc
23552 Sep 22 23:21:38.379 INFO upstairs UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 } connected, version 4, task: proc
23553 Sep 22 23:21:38.379 INFO Connection request from e5b4ada7-7239-4d69-8861-23d72de6320f with version 4, task: proc
23554 Sep 22 23:21:38.379 INFO upstairs UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 } connected, version 4, task: proc
235552023-09-22T23:21:38.379ZINFOcrucible-pantry (datafile): [1] e5b4ada7-7239-4d69-8861-23d72de6320f (22467938-6385-426f-86c0-fa56c94d98b1) New New New ds_transition to WaitActive
235562023-09-22T23:21:38.380ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
235572023-09-22T23:21:38.380ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 22467938-6385-426f-86c0-fa56c94d98b1
235582023-09-22T23:21:38.380ZINFOcrucible-pantry (datafile): [2] e5b4ada7-7239-4d69-8861-23d72de6320f (22467938-6385-426f-86c0-fa56c94d98b1) New WaitActive New ds_transition to WaitActive
235592023-09-22T23:21:38.380ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
235602023-09-22T23:21:38.380ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 22467938-6385-426f-86c0-fa56c94d98b1
235612023-09-22T23:21:38.380ZINFOcrucible-pantry (datafile): [0] e5b4ada7-7239-4d69-8861-23d72de6320f (22467938-6385-426f-86c0-fa56c94d98b1) New WaitActive WaitActive ds_transition to WaitActive
23562 Sep 22 23:21:38.380 INFO UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 } is now active (read-write)
235632023-09-22T23:21:38.380ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
23564 Sep 22 23:21:38.380 INFO UUID: f0f4ef77-64c9-4cae-ad93-ac2fb4943d50
23565 Sep 22 23:21:38.380 INFO Blocks per extent:5 Total Extents: 2
235662023-09-22T23:21:38.380ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 22467938-6385-426f-86c0-fa56c94d98b1
23567 Sep 22 23:21:38.380 INFO UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 } is now active (read-write)
23568 Sep 22 23:21:38.380 INFO Crucible Version: Crucible Version: 0.0.1
23569 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
23570 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
23571 rustc: 1.70.0 stable x86_64-unknown-illumos
23572 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
23573 Sep 22 23:21:38.380 INFO Upstairs <-> Downstairs Message Version: 4, task: main
23574 Sep 22 23:21:38.380 INFO Using address: 127.0.0.1:64907, task: main
23575 Sep 22 23:21:38.380 INFO UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 } is now active (read-write)
23576 Sep 22 23:21:38.380 INFO Repair listens on 127.0.0.1:0, task: repair
23577 Sep 22 23:21:38.380 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:34369, task: repair
23578 Sep 22 23:21:38.380 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:34369, task: repair
23579 Sep 22 23:21:38.380 INFO listening, local_addr: 127.0.0.1:34369, task: repair
235802023-09-22T23:21:38.381ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:59188 has UUID 99808fa8-ed0d-4880-abf3-c77623f8cd97
23581 Sep 22 23:21:38.381 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:34369, task: repair
23582 Sep 22 23:21:38.381 INFO Using repair address: 127.0.0.1:34369, task: main
23583 Sep 22 23:21:38.381 INFO No SSL acceptor configured, task: main
235842023-09-22T23:21:38.381ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 99808fa8-ed0d-4880-abf3-c77623f8cd97, encrypted: true, database_read_version: 1, database_write_version: 1 }
235852023-09-22T23:21:38.381ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f WaitActive WaitActive WaitActive
235862023-09-22T23:21:38.381ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:54900 has UUID 651c2c7a-b2ed-48c3-b57d-44ab91a6f440
235872023-09-22T23:21:38.381ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 651c2c7a-b2ed-48c3-b57d-44ab91a6f440, encrypted: true, database_read_version: 1, database_write_version: 1 }
235882023-09-22T23:21:38.381ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f WaitActive WaitActive WaitActive
235892023-09-22T23:21:38.381ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:33527 has UUID c25278bf-5b9e-4380-abf6-dfd9701dce27
235902023-09-22T23:21:38.381ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c25278bf-5b9e-4380-abf6-dfd9701dce27, encrypted: true, database_read_version: 1, database_write_version: 1 }
235912023-09-22T23:21:38.381ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f WaitActive WaitActive WaitActive
23592 Sep 22 23:21:38.381 INFO Current flush_numbers [0..12]: [0, 0]
23593 Sep 22 23:21:38.381 INFO Downstairs has completed Negotiation, task: proc
23594 Sep 22 23:21:38.381 INFO Upstairs starts
23595 Sep 22 23:21:38.381 INFO Crucible Version: BuildInfo {
23596 version: "0.0.1",
23597 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
23598 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
23599 git_branch: "main",
23600 rustc_semver: "1.70.0",
23601 rustc_channel: "stable",
23602 rustc_host_triple: "x86_64-unknown-illumos",
23603 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
23604 cargo_triple: "x86_64-unknown-illumos",
23605 debug: true,
23606 opt_level: 0,
23607 }
23608 Sep 22 23:21:38.381 INFO Upstairs <-> Downstairs Message Version: 4
23609 Sep 22 23:21:38.381 INFO Crucible stats registered with UUID: f5cf38e1-07a4-4403-b783-c67b3e5d3b37
23610 Sep 22 23:21:38.381 INFO Crucible f5cf38e1-07a4-4403-b783-c67b3e5d3b37 has session id: 0e0eeacf-f1e3-4f53-b5ee-c5a33ce2f2b6
23611 Sep 22 23:21:38.381 INFO Current flush_numbers [0..12]: [0, 0]
23612 Sep 22 23:21:38.381 INFO listening on 127.0.0.1:0, task: main
23613 Sep 22 23:21:38.381 INFO listening on 127.0.0.1:0, task: main
23614 Sep 22 23:21:38.382 INFO listening on 127.0.0.1:0, task: main
23615 Sep 22 23:21:38.382 INFO Downstairs has completed Negotiation, task: proc
23616 Sep 22 23:21:38.382 INFO [0] connecting to 127.0.0.1:65363, looper: 0
23617 Sep 22 23:21:38.382 INFO [1] connecting to 127.0.0.1:63095, looper: 1
23618 Sep 22 23:21:38.382 INFO [2] connecting to 127.0.0.1:64907, looper: 2
23619 Sep 22 23:21:38.382 INFO up_listen starts, task: up_listen
23620 Sep 22 23:21:38.382 INFO Wait for all three downstairs to come online
23621 Sep 22 23:21:38.382 INFO Flush timeout: 0.5
23622 Sep 22 23:21:38.382 INFO Current flush_numbers [0..12]: [0, 0]
23623 Sep 22 23:21:38.382 INFO accepted connection from 127.0.0.1:46576, task: main
23624 Sep 22 23:21:38.382 INFO accepted connection from 127.0.0.1:61396, task: main
23625 Sep 22 23:21:38.382 INFO accepted connection from 127.0.0.1:56109, task: main
23626 Sep 22 23:21:38.382 INFO Downstairs has completed Negotiation, task: proc
23627 Sep 22 23:21:38.382 INFO [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 looper connected, looper: 0
23628 Sep 22 23:21:38.382 INFO [0] Proc runs for 127.0.0.1:65363 in state New
23629 Sep 22 23:21:38.382 INFO [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 looper connected, looper: 1
23630 Sep 22 23:21:38.382 INFO [1] Proc runs for 127.0.0.1:63095 in state New
23631 Sep 22 23:21:38.382 INFO [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 looper connected, looper: 2
23632 Sep 22 23:21:38.382 INFO [2] Proc runs for 127.0.0.1:64907 in state New
236332023-09-22T23:21:38.382ZINFOcrucible-pantry (datafile): [1] e5b4ada7-7239-4d69-8861-23d72de6320f (22467938-6385-426f-86c0-fa56c94d98b1) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
236342023-09-22T23:21:38.382ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
236352023-09-22T23:21:38.383ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
236362023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
236372023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [2] e5b4ada7-7239-4d69-8861-23d72de6320f (22467938-6385-426f-86c0-fa56c94d98b1) WaitActive WaitQuorum WaitActive ds_transition to WaitQuorum
236382023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
23639 Sep 22 23:21:38.383 INFO Connection request from f5cf38e1-07a4-4403-b783-c67b3e5d3b37 with version 4, task: proc
23640 Sep 22 23:21:38.383 INFO upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 02fbce88-f31f-4b2e-a8ed-d7530615b97b, gen: 1 } connected, version 4, task: proc
236412023-09-22T23:21:38.383ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
236422023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
23643 The guest has finished waiting for activation
23644 Sep 22 23:21:38.383 INFO Connection request from f5cf38e1-07a4-4403-b783-c67b3e5d3b37 with version 4, task: proc
23645 Sep 22 23:21:38.383 INFO upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 02fbce88-f31f-4b2e-a8ed-d7530615b97b, gen: 1 } connected, version 4, task: proc
236462023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [0] e5b4ada7-7239-4d69-8861-23d72de6320f (22467938-6385-426f-86c0-fa56c94d98b1) WaitActive WaitQuorum WaitQuorum ds_transition to WaitQuorum
236472023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
236482023-09-22T23:21:38.383ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
23649 Sep 22 23:21:38.383 INFO Connection request from f5cf38e1-07a4-4403-b783-c67b3e5d3b37 with version 4, task: proc
236502023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
23651 Sep 22 23:21:38.383 INFO upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 02fbce88-f31f-4b2e-a8ed-d7530615b97b, gen: 1 } connected, version 4, task: proc
236522023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:59188 task reports connection:true
236532023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f WaitQuorum WaitQuorum WaitQuorum
236542023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [0]R flush_numbers: [0, 0]
236552023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [0]R generation: [0, 0]
236562023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [0]R dirty: [false, false]
236572023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [1]R flush_numbers: [0, 0]
23658 Sep 22 23:21:38.383 INFO [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (02fbce88-f31f-4b2e-a8ed-d7530615b97b) New New New ds_transition to WaitActive
236592023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [1]R generation: [0, 0]
23660 Sep 22 23:21:38.383 INFO [0] Transition from New to WaitActive
236612023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [1]R dirty: [false, false]
236622023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [2]R flush_numbers: [0, 0]
23663 Sep 22 23:21:38.383 INFO [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (02fbce88-f31f-4b2e-a8ed-d7530615b97b) WaitActive New New ds_transition to WaitActive
236642023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [2]R generation: [0, 0]
23665 Sep 22 23:21:38.383 INFO [1] Transition from New to WaitActive
236662023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): [2]R dirty: [false, false]
236672023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): Max found gen is 1
23668 Sep 22 23:21:38.383 INFO [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (02fbce88-f31f-4b2e-a8ed-d7530615b97b) WaitActive WaitActive New ds_transition to WaitActive
236692023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
23670 Sep 22 23:21:38.383 INFO [2] Transition from New to WaitActive
23671 Sep 22 23:21:38.383 INFO UUID: 63b48fd3-ffb2-4c20-bff7-4a64980fdfc8
236722023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): Next flush: 1
23673 Sep 22 23:21:38.383 INFO Blocks per extent:512 Total Extents: 188
236742023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): All extents match
236752023-09-22T23:21:38.383ZINFOcrucible-pantry (datafile): No downstairs repair required
23676 The guest has requested activation
23677 Sep 22 23:21:38.383 INFO Crucible Version: Crucible Version: 0.0.1
23678 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
23679 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
23680 rustc: 1.70.0 stable x86_64-unknown-illumos
23681 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
236822023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): No initial repair work was required
23683 Sep 22 23:21:38.384 INFO Upstairs <-> Downstairs Message Version: 4, task: main
23684 Sep 22 23:21:38.384 INFO Using address: 127.0.0.1:52165, task: main
23685 Sep 22 23:21:38.384 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 active request set
236862023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
236872023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f is now active with session: 22467938-6385-426f-86c0-fa56c94d98b1
236882023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f Set Active after no repair
23689 Sep 22 23:21:38.384 INFO [0] received activate with gen 1
236902023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
23691 Sep 22 23:21:38.384 INFO [0] client got ds_active_rx, promote! session 02fbce88-f31f-4b2e-a8ed-d7530615b97b
236922023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): Set check for repair
236932023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:54900 task reports connection:true
236942023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f Active Active Active
23695 Sep 22 23:21:38.384 INFO [1] received activate with gen 1
23696 Sep 22 23:21:38.384 INFO [1] client got ds_active_rx, promote! session 02fbce88-f31f-4b2e-a8ed-d7530615b97b
236972023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): Set check for repair
236982023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:33527 task reports connection:true
236992023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f Active Active Active
23700 Sep 22 23:21:38.384 INFO [2] received activate with gen 1
237012023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): Set check for repair
23702 Sep 22 23:21:38.384 INFO [2] client got ds_active_rx, promote! session 02fbce88-f31f-4b2e-a8ed-d7530615b97b
237032023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): [0] received reconcile message
237042023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
237052023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
237062023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): [1] received reconcile message
237072023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
23708 Sep 22 23:21:38.384 INFO UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 02fbce88-f31f-4b2e-a8ed-d7530615b97b, gen: 1 } is now active (read-write)
237092023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
237102023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): [2] received reconcile message
237112023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
23712 Sep 22 23:21:38.384 INFO UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 02fbce88-f31f-4b2e-a8ed-d7530615b97b, gen: 1 } is now active (read-write)
237132023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
237142023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): volume 17fca985-4bd4-4289-96a4-634e1d483fcb activated ok
237152023-09-22T23:21:38.384ZINFOcrucible-pantry (datafile): volume 17fca985-4bd4-4289-96a4-634e1d483fcb constructed and inserted ok
23716 Sep 22 23:21:38.384 INFO Repair listens on 127.0.0.1:0, task: repair
237172023-09-22T23:21:38.384ZINFOcrucible-pantry (dropshot): request completed latency_us = 6298 local_addr = 127.0.0.1:55883 method = POST remote_addr = 127.0.0.1:43786 req_id = d8af2e67-68cb-43e5-8478-69f7b6054043 response_code = 200 uri = /crucible/pantry/0/volume/17fca985-4bd4-4289-96a4-634e1d483fcb
23718 Sep 22 23:21:38.384 INFO UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 02fbce88-f31f-4b2e-a8ed-d7530615b97b, gen: 1 } is now active (read-write)
23719 Sep 22 23:21:38.384 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50383, task: repair
23720 Sep 22 23:21:38.384 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50383, task: repair
23721 Sep 22 23:21:38.384 INFO listening, local_addr: 127.0.0.1:50383, task: repair
23722 Sep 22 23:21:38.385 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50383, task: repair
23723 Sep 22 23:21:38.385 INFO Using repair address: 127.0.0.1:50383, task: main
23724 Sep 22 23:21:38.385 INFO No SSL acceptor configured, task: main
23725 Sep 22 23:21:38.385 INFO [0] downstairs client at 127.0.0.1:65363 has UUID ad667607-b05e-4427-9709-433732d1d5ab
23726 Sep 22 23:21:38.385 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ad667607-b05e-4427-9709-433732d1d5ab, encrypted: true, database_read_version: 1, database_write_version: 1 }
23727 Sep 22 23:21:38.385 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 WaitActive WaitActive WaitActive
23728 Sep 22 23:21:38.385 INFO [1] downstairs client at 127.0.0.1:63095 has UUID 995b5b97-67dd-43f0-b20a-d5b2c0c00924
237292023-09-22T23:21:38.385ZINFOcrucible-pantry (dropshot): request completed latency_us = 1059 local_addr = 127.0.0.1:55883 method = POST remote_addr = 127.0.0.1:43786 req_id = 3ba8385d-17a3-4c27-ab54-6face2758159 response_code = 204 uri = /crucible/pantry/0/volume/17fca985-4bd4-4289-96a4-634e1d483fcb/bulk_write
23730 Sep 22 23:21:38.385 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 995b5b97-67dd-43f0-b20a-d5b2c0c00924, encrypted: true, database_read_version: 1, database_write_version: 1 }
23731 Sep 22 23:21:38.385 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 WaitActive WaitActive WaitActive
23732 Sep 22 23:21:38.385 INFO [2] downstairs client at 127.0.0.1:64907 has UUID f0f4ef77-64c9-4cae-ad93-ac2fb4943d50
23733 Sep 22 23:21:38.385 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f0f4ef77-64c9-4cae-ad93-ac2fb4943d50, encrypted: true, database_read_version: 1, database_write_version: 1 }
23734 Sep 22 23:21:38.385 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 WaitActive WaitActive WaitActive
23735 Sep 22 23:21:38.385 INFO Current flush_numbers [0..12]: [0, 0]
23736 Sep 22 23:21:38.386 INFO Downstairs has completed Negotiation, task: proc
23737 Sep 22 23:21:38.386 DEBG Write :1000 deps:[] res:true
23738 Sep 22 23:21:38.386 INFO Current flush_numbers [0..12]: [0, 0]
23739 Sep 22 23:21:38.386 INFO Downstairs has completed Negotiation, task: proc
23740 Sep 22 23:21:38.386 INFO Current flush_numbers [0..12]: [0, 0]
23741 Sep 22 23:21:38.386 DEBG Write :1000 deps:[] res:true
23742 Sep 22 23:21:38.386 INFO Downstairs has completed Negotiation, task: proc
23743 Sep 22 23:21:38.387 INFO [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (02fbce88-f31f-4b2e-a8ed-d7530615b97b) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
23744 Sep 22 23:21:38.387 INFO [0] Transition from WaitActive to WaitQuorum
23745 Sep 22 23:21:38.387 WARN [0] new RM replaced this: None
23746 Sep 22 23:21:38.387 INFO [0] Starts reconcile loop
23747 Sep 22 23:21:38.387 INFO [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (02fbce88-f31f-4b2e-a8ed-d7530615b97b) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
23748 Sep 22 23:21:38.387 INFO [1] Transition from WaitActive to WaitQuorum
23749 Sep 22 23:21:38.387 WARN [1] new RM replaced this: None
23750 Sep 22 23:21:38.387 DEBG Write :1000 deps:[] res:true
23751 Sep 22 23:21:38.387 INFO [1] Starts reconcile loop
23752 Sep 22 23:21:38.387 INFO [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (02fbce88-f31f-4b2e-a8ed-d7530615b97b) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
23753 Sep 22 23:21:38.387 INFO [2] Transition from WaitActive to WaitQuorum
23754 Sep 22 23:21:38.387 WARN [2] new RM replaced this: None
23755 Sep 22 23:21:38.387 INFO [2] Starts reconcile loop
23756 Sep 22 23:21:38.387 INFO [0] 127.0.0.1:65363 task reports connection:true
23757 Sep 22 23:21:38.387 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 WaitQuorum WaitQuorum WaitQuorum
23758 Sep 22 23:21:38.387 INFO [0]R flush_numbers: [0, 0]
23759 Sep 22 23:21:38.387 INFO [0]R generation: [0, 0]
23760 Sep 22 23:21:38.387 INFO [0]R dirty: [false, false]
23761 Sep 22 23:21:38.387 INFO [1]R flush_numbers: [0, 0]
23762 Sep 22 23:21:38.387 INFO [1]R generation: [0, 0]
23763 Sep 22 23:21:38.387 INFO [1]R dirty: [false, false]
23764 Sep 22 23:21:38.387 INFO [2]R flush_numbers: [0, 0]
23765 Sep 22 23:21:38.387 INFO [2]R generation: [0, 0]
23766 Sep 22 23:21:38.387 INFO [2]R dirty: [false, false]
23767 Sep 22 23:21:38.387 INFO Max found gen is 1
23768 Sep 22 23:21:38.387 INFO Generation requested: 1 >= found:1
23769 Sep 22 23:21:38.387 INFO Next flush: 1
23770 Sep 22 23:21:38.387 INFO All extents match
23771 Sep 22 23:21:38.387 INFO No downstairs repair required
23772 Sep 22 23:21:38.387 INFO No initial repair work was required
23773 Sep 22 23:21:38.387 INFO Set Downstairs and Upstairs active
23774 Sep 22 23:21:38.387 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 is now active with session: 02fbce88-f31f-4b2e-a8ed-d7530615b97b
23775 Sep 22 23:21:38.387 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Set Active after no repair
23776 Sep 22 23:21:38.387 INFO Notify all downstairs, region set compare is done.
23777 Sep 22 23:21:38.387 INFO Set check for repair
23778 Sep 22 23:21:38.387 INFO [1] 127.0.0.1:63095 task reports connection:true
23779 Sep 22 23:21:38.387 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Active Active Active
23780 Sep 22 23:21:38.387 INFO Set check for repair
23781 Sep 22 23:21:38.387 INFO [2] 127.0.0.1:64907 task reports connection:true
23782 Sep 22 23:21:38.387 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Active Active Active
23783 Sep 22 23:21:38.387 INFO Set check for repair
23784 Sep 22 23:21:38.387 INFO [0] received reconcile message
23785 Sep 22 23:21:38.387 INFO [0] All repairs completed, exit
23786 Sep 22 23:21:38.387 INFO [0] Starts cmd_loop
23787 Sep 22 23:21:38.387 INFO [1] received reconcile message
23788 Sep 22 23:21:38.387 INFO [1] All repairs completed, exit
23789 Sep 22 23:21:38.387 INFO [1] Starts cmd_loop
23790 Sep 22 23:21:38.387 INFO [2] received reconcile message
23791 Sep 22 23:21:38.387 INFO [2] All repairs completed, exit
23792 Sep 22 23:21:38.387 INFO [2] Starts cmd_loop
23793 The guest has finished waiting for activation
23794 Sep 22 23:21:38.388 DEBG IO Read 1000 has deps []
237952023-09-22T23:21:38.388ZINFOcrucible-pantry (dropshot): request completed latency_us = 1031 local_addr = 127.0.0.1:55883 method = POST remote_addr = 127.0.0.1:43786 req_id = 4e784489-d034-4935-b724-d9e27228f651 response_code = 204 uri = /crucible/pantry/0/volume/17fca985-4bd4-4289-96a4-634e1d483fcb/bulk_write
23796 Sep 22 23:21:38.389 DEBG Read :1000 deps:[] res:true
23797 Sep 22 23:21:38.389 DEBG Write :1001 deps:[] res:true
23798 Sep 22 23:21:38.389 DEBG Read :1000 deps:[] res:true
23799 Sep 22 23:21:38.389 DEBG Write :1001 deps:[] res:true
23800 Sep 22 23:21:38.390 DEBG Read :1000 deps:[] res:true
23801 Sep 22 23:21:38.390 DEBG Write :1001 deps:[] res:true
23802 Sep 22 23:21:38.391 DEBG [0] Read AckReady 1000, : downstairs
23803 Sep 22 23:21:38.391 DEBG [1] Read already AckReady 1000, : downstairs
23804 Sep 22 23:21:38.391 DEBG [2] Read already AckReady 1000, : downstairs
23805 Sep 22 23:21:38.391 DEBG up_ds_listen was notified
23806 Sep 22 23:21:38.391 DEBG up_ds_listen process 1000
23807 Sep 22 23:21:38.391 DEBG [A] ack job 1000:1, : downstairs
238082023-09-22T23:21:38.391ZINFOcrucible-pantry (dropshot): request completed latency_us = 1033 local_addr = 127.0.0.1:55883 method = POST remote_addr = 127.0.0.1:43786 req_id = a3ee8070-9551-4a61-8484-5c0ff5f1e16c response_code = 204 uri = /crucible/pantry/0/volume/17fca985-4bd4-4289-96a4-634e1d483fcb/bulk_write
23809 Sep 22 23:21:38.392 DEBG up_ds_listen checked 1 jobs, back to waiting
23810 Sep 22 23:21:38.392 INFO Request to deactivate this guest
23811 Sep 22 23:21:38.392 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 set deactivating.
23812 Sep 22 23:21:38.392 DEBG IO Flush 1001 has deps [JobId(1000)]
23813 Sep 22 23:21:38.392 DEBG Write :1002 deps:[] res:true
23814 Sep 22 23:21:38.392 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
23815 Sep 22 23:21:38.392 DEBG Write :1002 deps:[] res:true
23816 Sep 22 23:21:38.392 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
23817 Sep 22 23:21:38.393 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
23818 Sep 22 23:21:38.393 DEBG Write :1002 deps:[] res:true
23819 Sep 22 23:21:38.393 INFO [0] check deactivate YES
23820 Sep 22 23:21:38.393 INFO [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (02fbce88-f31f-4b2e-a8ed-d7530615b97b) Active Active Active ds_transition to Deactivated
23821 Sep 22 23:21:38.393 INFO [0] Transition from Active to Deactivated
23822 Sep 22 23:21:38.393 INFO [1] check deactivate YES
23823 Sep 22 23:21:38.393 INFO [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (02fbce88-f31f-4b2e-a8ed-d7530615b97b) Deactivated Active Active ds_transition to Deactivated
23824 Sep 22 23:21:38.393 INFO [1] Transition from Active to Deactivated
23825 Sep 22 23:21:38.393 DEBG [2] deactivate flush 1001 done, : downstairs
23826 Sep 22 23:21:38.393 INFO [2] check deactivate YES
23827 Sep 22 23:21:38.393 INFO [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (02fbce88-f31f-4b2e-a8ed-d7530615b97b) Deactivated Deactivated Active ds_transition to Deactivated
23828 Sep 22 23:21:38.393 INFO [2] Transition from Active to Deactivated
23829 Sep 22 23:21:38.393 ERRO 127.0.0.1:65363: proc: [0] client work task ended, Ok(Err([0] exits after deactivation)), so we end too, looper: 0
23830 Sep 22 23:21:38.393 INFO [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Gone missing, transition from Deactivated to New
23831 Sep 22 23:21:38.393 INFO deactivate transition checking...
23832 Sep 22 23:21:38.393 INFO deactivate_transition New Maybe
23833 Sep 22 23:21:38.393 INFO deactivate_transition Deactivated NO
23834 Sep 22 23:21:38.393 INFO deactivate_transition Deactivated NO
23835 Sep 22 23:21:38.393 INFO [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 connection to 127.0.0.1:65363 closed, looper: 0
23836 Sep 22 23:21:38.393 ERRO 127.0.0.1:63095: proc: [1] client work task ended, Ok(Err([1] exits after deactivation)), so we end too, looper: 1
23837 Sep 22 23:21:38.393 INFO [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Gone missing, transition from Deactivated to New
23838 Sep 22 23:21:38.393 INFO deactivate transition checking...
23839 Sep 22 23:21:38.393 INFO deactivate_transition New Maybe
23840 Sep 22 23:21:38.393 INFO deactivate_transition New Maybe
23841 Sep 22 23:21:38.393 INFO deactivate_transition Deactivated NO
23842 Sep 22 23:21:38.393 INFO [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 connection to 127.0.0.1:63095 closed, looper: 1
23843 Sep 22 23:21:38.394 DEBG up_ds_listen was notified
23844 Sep 22 23:21:38.394 DEBG up_ds_listen process 1001
23845 Sep 22 23:21:38.394 DEBG [A] ack job 1001:2, : downstairs
23846 Sep 22 23:21:38.394 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
23847 Sep 22 23:21:38.394 DEBG up_ds_listen checked 1 jobs, back to waiting
23848 Sep 22 23:21:38.394 ERRO 127.0.0.1:64907: proc: [2] client work task ended, Ok(Err([2] exits after deactivation)), so we end too, looper: 2
23849 Sep 22 23:21:38.394 INFO [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Gone missing, transition from Deactivated to New
23850 Sep 22 23:21:38.394 INFO deactivate transition checking...
23851 Sep 22 23:21:38.394 INFO deactivate_transition New Maybe
23852 Sep 22 23:21:38.394 INFO deactivate_transition New Maybe
23853 Sep 22 23:21:38.394 INFO deactivate_transition New Maybe
23854 Sep 22 23:21:38.394 INFO All DS in the proper state! -> INIT
23855 Sep 22 23:21:38.394 INFO [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 connection to 127.0.0.1:64907 closed, looper: 2
23856 Sep 22 23:21:38.394 INFO [0] 127.0.0.1:65363 task reports connection:false
23857 Sep 22 23:21:38.394 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 New New New
23858 Sep 22 23:21:38.394 INFO [0] 127.0.0.1:65363 task reports offline
23859 Sep 22 23:21:38.394 INFO [1] 127.0.0.1:63095 task reports connection:false
23860 Sep 22 23:21:38.394 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 New New New
23861 Sep 22 23:21:38.394 INFO [1] 127.0.0.1:63095 task reports offline
23862 Sep 22 23:21:38.394 INFO [2] 127.0.0.1:64907 task reports connection:false
23863 Sep 22 23:21:38.394 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 New New New
23864 Sep 22 23:21:38.394 INFO [2] 127.0.0.1:64907 task reports offline
23865 note: configured to log to "/dev/stdout"
238662023-09-22T23:21:38.394ZINFOcrucible-pantry (dropshot): request completed latency_us = 1110 local_addr = 127.0.0.1:55883 method = POST remote_addr = 127.0.0.1:43786 req_id = e522cfdb-4708-4a23-858e-1d0d5f323626 response_code = 204 uri = /crucible/pantry/0/volume/17fca985-4bd4-4289-96a4-634e1d483fcb/bulk_write
23867 Sep 22 23:21:38.395 DEBG Write :1003 deps:[] res:true
23868 Sep 22 23:21:38.395 DEBG Write :1003 deps:[] res:true
23869 Sep 22 23:21:38.396 DEBG Write :1003 deps:[] res:true
238702023-09-22T23:21:38.396ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:63903
238712023-09-22T23:21:38.396ZINFOcrucible-pantry: listen IP: 127.0.0.1:63903
238722023-09-22T23:21:38.398ZINFOcrucible-pantry (dropshot): request completed latency_us = 1405 local_addr = 127.0.0.1:55883 method = POST remote_addr = 127.0.0.1:43786 req_id = 297166ba-ec88-4c99-9a90-f12675f9a285 response_code = 204 uri = /crucible/pantry/0/volume/17fca985-4bd4-4289-96a4-634e1d483fcb/bulk_write
23873 Sep 22 23:21:38.398 INFO listening on 127.0.0.1:0, task: main
23874 Sep 22 23:21:38.399 INFO current number of open files limit 65536 is already the maximum
23875 Sep 22 23:21:38.399 INFO Opened existing region file "/tmp/downstairs-XotxiwRu/region.json"
23876 Sep 22 23:21:38.399 INFO Database read version 1
23877 Sep 22 23:21:38.399 DEBG Write :1004 deps:[] res:true
23878 Sep 22 23:21:38.399 INFO Database write version 1
23879 Sep 22 23:21:38.399 DEBG Write :1004 deps:[] res:true
23880 Sep 22 23:21:38.399 DEBG Write :1004 deps:[] res:true
238812023-09-22T23:21:38.401ZINFOcrucible-pantry (dropshot): request completed latency_us = 1440 local_addr = 127.0.0.1:55883 method = POST remote_addr = 127.0.0.1:43786 req_id = 47e56d17-6c0f-4338-9a8d-1862a6e302ed response_code = 204 uri = /crucible/pantry/0/volume/17fca985-4bd4-4289-96a4-634e1d483fcb/bulk_write
23882 Sep 22 23:21:38.402 DEBG Write :1005 deps:[] res:true
23883 Sep 22 23:21:38.403 DEBG Write :1005 deps:[] res:true
23884 Sep 22 23:21:38.403 DEBG Write :1005 deps:[] res:true
238852023-09-22T23:21:38.406ZINFOcrucible-pantry (dropshot): request completed latency_us = 1404 local_addr = 127.0.0.1:55883 method = POST remote_addr = 127.0.0.1:43786 req_id = d622fb87-a0dd-4ceb-80ff-9b2f1a9dfed6 response_code = 204 uri = /crucible/pantry/0/volume/17fca985-4bd4-4289-96a4-634e1d483fcb/bulk_write
23886 Sep 22 23:21:38.406 DEBG Write :1006 deps:[] res:true
23887 Sep 22 23:21:38.406 DEBG Write :1006 deps:[] res:true
23888 Sep 22 23:21:38.407 DEBG Write :1006 deps:[] res:true
238892023-09-22T23:21:38.408ZINFOcrucible-pantry (dropshot): request completed latency_us = 1042 local_addr = 127.0.0.1:55883 method = POST remote_addr = 127.0.0.1:43786 req_id = b1177d14-db18-4b39-9091-3cd7cce02134 response_code = 204 uri = /crucible/pantry/0/volume/17fca985-4bd4-4289-96a4-634e1d483fcb/bulk_write
23890 Sep 22 23:21:38.409 DEBG Write :1007 deps:[] res:true
23891 Sep 22 23:21:38.409 DEBG Write :1007 deps:[] res:true
23892 Sep 22 23:21:38.409 DEBG Write :1007 deps:[] res:true
238932023-09-22T23:21:38.411ZINFOcrucible-pantry (dropshot): request completed latency_us = 986 local_addr = 127.0.0.1:55883 method = POST remote_addr = 127.0.0.1:43786 req_id = 451fdf79-77d7-4af9-a4c4-3ade94f01481 response_code = 204 uri = /crucible/pantry/0/volume/17fca985-4bd4-4289-96a4-634e1d483fcb/bulk_write
23894 Sep 22 23:21:38.411 DEBG Write :1008 deps:[] res:true
23895 Sep 22 23:21:38.412 DEBG Write :1008 deps:[] res:true
23896 Sep 22 23:21:38.412 DEBG Write :1008 deps:[] res:true
238972023-09-22T23:21:38.414ZINFOcrucible-pantry (dropshot): request completed latency_us = 1002 local_addr = 127.0.0.1:55883 method = POST remote_addr = 127.0.0.1:43786 req_id = 94442a37-08c7-4cc8-9eff-16d447b5f2c9 response_code = 204 uri = /crucible/pantry/0/volume/17fca985-4bd4-4289-96a4-634e1d483fcb/bulk_write
23898 Sep 22 23:21:38.414 DEBG Write :1009 deps:[] res:true
23899 Sep 22 23:21:38.414 DEBG Write :1009 deps:[] res:true
23900 Sep 22 23:21:38.415 DEBG Write :1009 deps:[] res:true
23901 Sep 22 23:21:38.415 INFO current number of open files limit 65536 is already the maximum
23902 Sep 22 23:21:38.415 INFO Created new region file "/tmp/downstairs-hwOOuWZz/region.json"
239032023-09-22T23:21:38.415ZINFOcrucible-pantry (datafile): detach removing entry for volume 17fca985-4bd4-4289-96a4-634e1d483fcb
239042023-09-22T23:21:38.415ZINFOcrucible-pantry (datafile): detaching volume 17fca985-4bd4-4289-96a4-634e1d483fcb
23905 Sep 22 23:21:38.418 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
23906 Sep 22 23:21:38.419 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
23907 Sep 22 23:21:38.419 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
239082023-09-22T23:21:38.419ZINFOcrucible-pantry (datafile): Request to deactivate this guest
239092023-09-22T23:21:38.419ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f set deactivating.
239102023-09-22T23:21:38.419ZINFOcrucible-pantry (dropshot): request completed latency_us = 4147 local_addr = 127.0.0.1:55883 method = DELETE remote_addr = 127.0.0.1:43786 req_id = 35cb97e1-bffa-4a4f-98e1-d29d3d140c7d response_code = 204 uri = /crucible/pantry/0/volume/17fca985-4bd4-4289-96a4-634e1d483fcb
23911 Sep 22 23:21:38.420 INFO Upstairs starts
23912 Sep 22 23:21:38.420 INFO Crucible Version: BuildInfo {
23913 version: "0.0.1",
23914 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
23915 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
23916 git_branch: "main",
23917 rustc_semver: "1.70.0",
23918 rustc_channel: "stable",
23919 rustc_host_triple: "x86_64-unknown-illumos",
23920 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
23921 cargo_triple: "x86_64-unknown-illumos",
23922 debug: true,
23923 opt_level: 0,
23924 }
23925 Sep 22 23:21:38.420 INFO Upstairs <-> Downstairs Message Version: 4
23926 Sep 22 23:21:38.420 INFO Crucible stats registered with UUID: e5b4ada7-7239-4d69-8861-23d72de6320f
23927 Sep 22 23:21:38.420 INFO Crucible e5b4ada7-7239-4d69-8861-23d72de6320f has session id: 592327dc-1ee1-4d46-9c50-69a4560944c7
23928 Sep 22 23:21:38.420 INFO [0] connecting to 127.0.0.1:33527, looper: 0
23929 Sep 22 23:21:38.420 INFO [1] connecting to 127.0.0.1:59188, looper: 1
23930 Sep 22 23:21:38.420 INFO [2] connecting to 127.0.0.1:54900, looper: 2
23931 Sep 22 23:21:38.420 INFO up_listen starts, task: up_listen
23932 Sep 22 23:21:38.420 INFO Wait for all three downstairs to come online
23933 Sep 22 23:21:38.421 INFO Flush timeout: 0.5
23934 Sep 22 23:21:38.421 INFO accepted connection from 127.0.0.1:61046, task: main
23935 Sep 22 23:21:38.421 INFO accepted connection from 127.0.0.1:52775, task: main
23936 Sep 22 23:21:38.421 INFO accepted connection from 127.0.0.1:55794, task: main
23937 Sep 22 23:21:38.421 INFO [0] e5b4ada7-7239-4d69-8861-23d72de6320f looper connected, looper: 0
23938 Sep 22 23:21:38.421 INFO [0] Proc runs for 127.0.0.1:33527 in state New
23939 Sep 22 23:21:38.421 INFO [1] e5b4ada7-7239-4d69-8861-23d72de6320f looper connected, looper: 1
23940 Sep 22 23:21:38.421 INFO [1] Proc runs for 127.0.0.1:59188 in state New
23941 Sep 22 23:21:38.421 INFO [2] e5b4ada7-7239-4d69-8861-23d72de6320f looper connected, looper: 2
23942 Sep 22 23:21:38.421 INFO [2] Proc runs for 127.0.0.1:54900 in state New
23943 Sep 22 23:21:38.422 INFO Connection request from e5b4ada7-7239-4d69-8861-23d72de6320f with version 4, task: proc
23944 Sep 22 23:21:38.422 INFO upstairs UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 } connected, version 4, task: proc
23945 Sep 22 23:21:38.422 INFO Connection request from e5b4ada7-7239-4d69-8861-23d72de6320f with version 4, task: proc
23946 Sep 22 23:21:38.422 INFO upstairs UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 } connected, version 4, task: proc
23947 Sep 22 23:21:38.422 INFO Connection request from e5b4ada7-7239-4d69-8861-23d72de6320f with version 4, task: proc
23948 Sep 22 23:21:38.422 INFO upstairs UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 } connected, version 4, task: proc
23949 Sep 22 23:21:38.422 INFO [0] e5b4ada7-7239-4d69-8861-23d72de6320f (d15f82d8-86d1-4a0a-9592-c9a50e119254) New New New ds_transition to WaitActive
23950 Sep 22 23:21:38.422 INFO [0] Transition from New to WaitActive
23951 Sep 22 23:21:38.422 INFO [1] e5b4ada7-7239-4d69-8861-23d72de6320f (d15f82d8-86d1-4a0a-9592-c9a50e119254) WaitActive New New ds_transition to WaitActive
23952 Sep 22 23:21:38.422 INFO [1] Transition from New to WaitActive
23953 Sep 22 23:21:38.422 INFO [2] e5b4ada7-7239-4d69-8861-23d72de6320f (d15f82d8-86d1-4a0a-9592-c9a50e119254) WaitActive WaitActive New ds_transition to WaitActive
23954 Sep 22 23:21:38.422 INFO [2] Transition from New to WaitActive
23955 The guest has requested activation
23956 Sep 22 23:21:38.423 INFO e5b4ada7-7239-4d69-8861-23d72de6320f active request set
23957 Sep 22 23:21:38.423 INFO [0] received activate with gen 2
23958 Sep 22 23:21:38.423 INFO [0] client got ds_active_rx, promote! session d15f82d8-86d1-4a0a-9592-c9a50e119254
23959 Sep 22 23:21:38.423 INFO [1] received activate with gen 2
23960 Sep 22 23:21:38.423 INFO [1] client got ds_active_rx, promote! session d15f82d8-86d1-4a0a-9592-c9a50e119254
23961 Sep 22 23:21:38.423 INFO [2] received activate with gen 2
23962 Sep 22 23:21:38.423 INFO [2] client got ds_active_rx, promote! session d15f82d8-86d1-4a0a-9592-c9a50e119254
23963 Sep 22 23:21:38.423 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 } to UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 }
23964 Sep 22 23:21:38.423 WARN Signaling to UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 } thread that UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 } is being promoted (read-write)
23965 Sep 22 23:21:38.423 INFO UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 } is now active (read-write)
23966 Sep 22 23:21:38.423 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 } to UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 }
23967 Sep 22 23:21:38.423 WARN Signaling to UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 } thread that UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 } is being promoted (read-write)
23968 Sep 22 23:21:38.423 INFO UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 } is now active (read-write)
23969 Sep 22 23:21:38.423 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 } to UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 }
23970 Sep 22 23:21:38.423 WARN Signaling to UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 } thread that UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 } is being promoted (read-write)
23971 Sep 22 23:21:38.424 INFO UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 } is now active (read-write)
23972 Sep 22 23:21:38.424 WARN Another upstairs UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 }, task: main
23973 Sep 22 23:21:38.424 INFO connection (127.0.0.1:55430): all done
23974 Sep 22 23:21:38.424 WARN Another upstairs UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 }, task: main
23975 Sep 22 23:21:38.424 INFO connection (127.0.0.1:50291): all done
23976 Sep 22 23:21:38.424 WARN Another upstairs UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: d15f82d8-86d1-4a0a-9592-c9a50e119254, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: e5b4ada7-7239-4d69-8861-23d72de6320f, session_id: 22467938-6385-426f-86c0-fa56c94d98b1, gen: 1 }, task: main
23977 Sep 22 23:21:38.424 INFO connection (127.0.0.1:55513): all done
239782023-09-22T23:21:38.424ZERROcrucible-pantry (datafile): [1] e5b4ada7-7239-4d69-8861-23d72de6320f (22467938-6385-426f-86c0-fa56c94d98b1) cmd_loop saw YouAreNoLongerActive e5b4ada7-7239-4d69-8861-23d72de6320f d15f82d8-86d1-4a0a-9592-c9a50e119254 2
239792023-09-22T23:21:38.424ZINFOcrucible-pantry (datafile): [1] e5b4ada7-7239-4d69-8861-23d72de6320f (22467938-6385-426f-86c0-fa56c94d98b1) Active Active Active ds_transition to Disabled
239802023-09-22T23:21:38.424ZINFOcrucible-pantry (datafile): [1] Transition from Active to Disabled
239812023-09-22T23:21:38.424ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f set inactive, session 22467938-6385-426f-86c0-fa56c94d98b1
239822023-09-22T23:21:38.424ZERROcrucible-pantry (datafile): 127.0.0.1:59188: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 1
239832023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): [1] e5b4ada7-7239-4d69-8861-23d72de6320f Gone missing, transition from Disabled to Disconnected
239842023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): [1] e5b4ada7-7239-4d69-8861-23d72de6320f connection to 127.0.0.1:59188 closed looper = 1
239852023-09-22T23:21:38.425ZERROcrucible-pantry (datafile): [2] e5b4ada7-7239-4d69-8861-23d72de6320f (22467938-6385-426f-86c0-fa56c94d98b1) cmd_loop saw YouAreNoLongerActive e5b4ada7-7239-4d69-8861-23d72de6320f d15f82d8-86d1-4a0a-9592-c9a50e119254 2
239862023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): [2] e5b4ada7-7239-4d69-8861-23d72de6320f (22467938-6385-426f-86c0-fa56c94d98b1) Active Disconnected Active ds_transition to Disabled
239872023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): [2] Transition from Active to Disabled
239882023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f set inactive, session 22467938-6385-426f-86c0-fa56c94d98b1
239892023-09-22T23:21:38.425ZERROcrucible-pantry (datafile): 127.0.0.1:54900: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 2
239902023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): [2] e5b4ada7-7239-4d69-8861-23d72de6320f Gone missing, transition from Disabled to Disconnected
239912023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): [2] e5b4ada7-7239-4d69-8861-23d72de6320f connection to 127.0.0.1:54900 closed looper = 2
239922023-09-22T23:21:38.425ZERROcrucible-pantry (datafile): [0] e5b4ada7-7239-4d69-8861-23d72de6320f (22467938-6385-426f-86c0-fa56c94d98b1) cmd_loop saw YouAreNoLongerActive e5b4ada7-7239-4d69-8861-23d72de6320f d15f82d8-86d1-4a0a-9592-c9a50e119254 2
239932023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): [0] e5b4ada7-7239-4d69-8861-23d72de6320f (22467938-6385-426f-86c0-fa56c94d98b1) Active Disconnected Disconnected ds_transition to Disabled
239942023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): [0] Transition from Active to Disabled
239952023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f set inactive, session 22467938-6385-426f-86c0-fa56c94d98b1
239962023-09-22T23:21:38.425ZERROcrucible-pantry (datafile): 127.0.0.1:33527: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 0
239972023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): [0] e5b4ada7-7239-4d69-8861-23d72de6320f Gone missing, transition from Disabled to Disconnected
239982023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): [0] e5b4ada7-7239-4d69-8861-23d72de6320f connection to 127.0.0.1:33527 closed looper = 0
239992023-09-22T23:21:38.425ZWARNcrucible-pantry (datafile): [1] pm_task rx.recv() is None
24000 Sep 22 23:21:38.425 INFO [0] downstairs client at 127.0.0.1:33527 has UUID c25278bf-5b9e-4380-abf6-dfd9701dce27
240012023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:59188 task reports connection:false
24002 Sep 22 23:21:38.425 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c25278bf-5b9e-4380-abf6-dfd9701dce27, encrypted: true, database_read_version: 1, database_write_version: 1 }
240032023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f Disconnected Disconnected Disconnected
24004 Sep 22 23:21:38.425 INFO e5b4ada7-7239-4d69-8861-23d72de6320f WaitActive WaitActive WaitActive
240052023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:59188 task reports offline
240062023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:54900 task reports connection:false
24007 Sep 22 23:21:38.425 INFO [1] downstairs client at 127.0.0.1:59188 has UUID 99808fa8-ed0d-4880-abf3-c77623f8cd97
240082023-09-22T23:21:38.425ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f Disconnected Disconnected Disconnected
24009 Sep 22 23:21:38.425 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 99808fa8-ed0d-4880-abf3-c77623f8cd97, encrypted: true, database_read_version: 1, database_write_version: 1 }
240102023-09-22T23:21:38.426ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:54900 task reports offline
24011 Sep 22 23:21:38.426 INFO e5b4ada7-7239-4d69-8861-23d72de6320f WaitActive WaitActive WaitActive
240122023-09-22T23:21:38.426ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:33527 task reports connection:false
240132023-09-22T23:21:38.426ZINFOcrucible-pantry (datafile): e5b4ada7-7239-4d69-8861-23d72de6320f Disconnected Disconnected Disconnected
240142023-09-22T23:21:38.426ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:33527 task reports offline
24015 Sep 22 23:21:38.426 INFO [2] downstairs client at 127.0.0.1:54900 has UUID 651c2c7a-b2ed-48c3-b57d-44ab91a6f440
24016 Sep 22 23:21:38.426 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 651c2c7a-b2ed-48c3-b57d-44ab91a6f440, encrypted: true, database_read_version: 1, database_write_version: 1 }
240172023-09-22T23:21:38.426ZWARNcrucible-pantry (datafile): [2] pm_task rx.recv() is None
24018 Sep 22 23:21:38.426 INFO e5b4ada7-7239-4d69-8861-23d72de6320f WaitActive WaitActive WaitActive
240192023-09-22T23:21:38.426ZWARNcrucible-pantry (datafile): [0] pm_task rx.recv() is None
24020 Sep 22 23:21:38.426 INFO Current flush_numbers [0..12]: [1, 1]
24021 Sep 22 23:21:38.426 INFO Downstairs has completed Negotiation, task: proc
24022 Sep 22 23:21:38.427 INFO Current flush_numbers [0..12]: [1, 1]
24023 Sep 22 23:21:38.427 INFO Downstairs has completed Negotiation, task: proc
24024 Sep 22 23:21:38.427 INFO Current flush_numbers [0..12]: [1, 1]
24025 Sep 22 23:21:38.427 INFO Downstairs has completed Negotiation, task: proc
24026 Sep 22 23:21:38.428 INFO [0] e5b4ada7-7239-4d69-8861-23d72de6320f (d15f82d8-86d1-4a0a-9592-c9a50e119254) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
24027 Sep 22 23:21:38.428 INFO [0] Transition from WaitActive to WaitQuorum
24028 Sep 22 23:21:38.428 WARN [0] new RM replaced this: None
24029 Sep 22 23:21:38.428 INFO [0] Starts reconcile loop
24030 Sep 22 23:21:38.428 INFO [1] e5b4ada7-7239-4d69-8861-23d72de6320f (d15f82d8-86d1-4a0a-9592-c9a50e119254) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
24031 Sep 22 23:21:38.428 INFO [1] Transition from WaitActive to WaitQuorum
24032 Sep 22 23:21:38.428 WARN [1] new RM replaced this: None
24033 Sep 22 23:21:38.428 INFO [1] Starts reconcile loop
24034 Sep 22 23:21:38.428 INFO [2] e5b4ada7-7239-4d69-8861-23d72de6320f (d15f82d8-86d1-4a0a-9592-c9a50e119254) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
24035 Sep 22 23:21:38.428 INFO [2] Transition from WaitActive to WaitQuorum
24036 Sep 22 23:21:38.428 WARN [2] new RM replaced this: None
24037 Sep 22 23:21:38.428 INFO [2] Starts reconcile loop
24038 Sep 22 23:21:38.428 INFO [0] 127.0.0.1:33527 task reports connection:true
24039 Sep 22 23:21:38.428 INFO e5b4ada7-7239-4d69-8861-23d72de6320f WaitQuorum WaitQuorum WaitQuorum
24040 Sep 22 23:21:38.428 INFO [0]R flush_numbers: [1, 1]
24041 Sep 22 23:21:38.428 INFO [0]R generation: [1, 1]
24042 Sep 22 23:21:38.428 INFO [0]R dirty: [false, false]
24043 Sep 22 23:21:38.428 INFO [1]R flush_numbers: [1, 1]
24044 Sep 22 23:21:38.428 INFO [1]R generation: [1, 1]
24045 Sep 22 23:21:38.428 INFO [1]R dirty: [false, false]
24046 Sep 22 23:21:38.428 INFO [2]R flush_numbers: [1, 1]
24047 Sep 22 23:21:38.428 INFO [2]R generation: [1, 1]
24048 Sep 22 23:21:38.428 INFO [2]R dirty: [false, false]
24049 Sep 22 23:21:38.428 INFO Max found gen is 2
24050 Sep 22 23:21:38.428 INFO Generation requested: 2 >= found:2
24051 Sep 22 23:21:38.428 INFO Next flush: 2
24052 Sep 22 23:21:38.428 INFO All extents match
24053 Sep 22 23:21:38.428 INFO No downstairs repair required
24054 Sep 22 23:21:38.428 INFO No initial repair work was required
24055 Sep 22 23:21:38.428 INFO Set Downstairs and Upstairs active
24056 Sep 22 23:21:38.428 INFO e5b4ada7-7239-4d69-8861-23d72de6320f is now active with session: d15f82d8-86d1-4a0a-9592-c9a50e119254
24057 Sep 22 23:21:38.428 INFO e5b4ada7-7239-4d69-8861-23d72de6320f Set Active after no repair
24058 Sep 22 23:21:38.428 INFO Notify all downstairs, region set compare is done.
24059 Sep 22 23:21:38.429 INFO Set check for repair
24060 Sep 22 23:21:38.429 INFO [1] 127.0.0.1:59188 task reports connection:true
24061 Sep 22 23:21:38.429 INFO e5b4ada7-7239-4d69-8861-23d72de6320f Active Active Active
24062 Sep 22 23:21:38.429 INFO Set check for repair
24063 Sep 22 23:21:38.429 INFO [2] 127.0.0.1:54900 task reports connection:true
24064 Sep 22 23:21:38.429 INFO e5b4ada7-7239-4d69-8861-23d72de6320f Active Active Active
24065 Sep 22 23:21:38.429 INFO Set check for repair
24066 Sep 22 23:21:38.429 INFO [0] received reconcile message
24067 Sep 22 23:21:38.429 INFO [0] All repairs completed, exit
24068 Sep 22 23:21:38.429 INFO [0] Starts cmd_loop
24069 Sep 22 23:21:38.429 INFO [1] received reconcile message
24070 Sep 22 23:21:38.429 INFO [1] All repairs completed, exit
24071 Sep 22 23:21:38.429 INFO [1] Starts cmd_loop
24072 Sep 22 23:21:38.429 INFO [2] received reconcile message
24073 Sep 22 23:21:38.429 INFO [2] All repairs completed, exit
24074 Sep 22 23:21:38.429 INFO [2] Starts cmd_loop
24075 The guest has finished waiting for activation
24076 Sep 22 23:21:38.430 DEBG IO Read 1000 has deps []
24077 Sep 22 23:21:38.431 DEBG Read :1000 deps:[] res:true
24078 Sep 22 23:21:38.431 DEBG Read :1000 deps:[] res:true
24079 Sep 22 23:21:38.432 DEBG Read :1000 deps:[] res:true
24080 Sep 22 23:21:38.436 DEBG [0] Read AckReady 1000, : downstairs
24081 Sep 22 23:21:38.438 DEBG [1] Read already AckReady 1000, : downstairs
24082 Sep 22 23:21:38.441 DEBG [2] Read already AckReady 1000, : downstairs
24083 Sep 22 23:21:38.441 DEBG up_ds_listen was notified
24084 Sep 22 23:21:38.441 DEBG up_ds_listen process 1000
24085 Sep 22 23:21:38.441 DEBG [A] ack job 1000:1, : downstairs
24086 Sep 22 23:21:38.441 DEBG up_ds_listen checked 1 jobs, back to waiting
24087 test test::test_pantry_bulk_write ... ok
24088 Sep 22 23:21:38.448 INFO current number of open files limit 65536 is already the maximum
24089 Sep 22 23:21:38.448 INFO Created new region file "/tmp/downstairs-rDPgnXdu/region.json"
24090 Sep 22 23:21:38.461 DEBG IO Flush 1001 has deps [JobId(1000)]
24091 Sep 22 23:21:38.464 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
24092 Sep 22 23:21:38.464 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
24093 Sep 22 23:21:38.464 DEBG up_ds_listen was notified
24094 Sep 22 23:21:38.464 DEBG up_ds_listen process 1001
24095 Sep 22 23:21:38.464 DEBG [A] ack job 1001:2, : downstairs
24096 Sep 22 23:21:38.464 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
24097 Sep 22 23:21:38.464 DEBG up_ds_listen checked 1 jobs, back to waiting
24098 Sep 22 23:21:38.483 WARN upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 02fbce88-f31f-4b2e-a8ed-d7530615b97b, gen: 1 } disconnected, 0 jobs left, task: main
24099 Sep 22 23:21:38.483 WARN upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 02fbce88-f31f-4b2e-a8ed-d7530615b97b, gen: 1 } was previously active, clearing, task: main
24100 Sep 22 23:21:38.484 INFO connection (127.0.0.1:46576): all done
24101 Sep 22 23:21:38.484 WARN upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 02fbce88-f31f-4b2e-a8ed-d7530615b97b, gen: 1 } disconnected, 0 jobs left, task: main
24102 Sep 22 23:21:38.484 WARN upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 02fbce88-f31f-4b2e-a8ed-d7530615b97b, gen: 1 } was previously active, clearing, task: main
24103 Sep 22 23:21:38.484 INFO connection (127.0.0.1:61396): all done
24104 Sep 22 23:21:38.484 WARN upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 02fbce88-f31f-4b2e-a8ed-d7530615b97b, gen: 1 } disconnected, 0 jobs left, task: main
24105 Sep 22 23:21:38.484 WARN upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 02fbce88-f31f-4b2e-a8ed-d7530615b97b, gen: 1 } was previously active, clearing, task: main
24106 Sep 22 23:21:38.484 INFO connection (127.0.0.1:56109): all done
241072023-09-22T23:21:38.484ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:63903 remote_addr = 127.0.0.1:56972
241082023-09-22T23:21:38.485ZINFOcrucible-pantry (datafile): no entry exists for volume 6a15a37b-9c1f-4533-aac2-9a0f3a29490f, constructing...
241092023-09-22T23:21:38.485ZINFOcrucible-pantry (datafile): Upstairs starts
241102023-09-22T23:21:38.486ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
241112023-09-22T23:21:38.486ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
241122023-09-22T23:21:38.486ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: f5cf38e1-07a4-4403-b783-c67b3e5d3b37
241132023-09-22T23:21:38.486ZINFOcrucible-pantry (datafile): Crucible f5cf38e1-07a4-4403-b783-c67b3e5d3b37 has session id: 30a44f31-a40d-49e5-9fd4-5bd01d26e4ac
241142023-09-22T23:21:38.486ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:65363 looper = 0
241152023-09-22T23:21:38.486ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:63095 looper = 1
241162023-09-22T23:21:38.486ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:64907 looper = 2
241172023-09-22T23:21:38.486ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
241182023-09-22T23:21:38.486ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
241192023-09-22T23:21:38.486ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
241202023-09-22T23:21:38.486ZINFOcrucible-pantry (datafile): volume 6a15a37b-9c1f-4533-aac2-9a0f3a29490f constructed ok
24121 The guest has requested activation
241222023-09-22T23:21:38.487ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 active request set
24123 Sep 22 23:21:38.487 INFO accepted connection from 127.0.0.1:50295, task: main
24124 Sep 22 23:21:38.487 INFO accepted connection from 127.0.0.1:60639, task: main
24125 Sep 22 23:21:38.487 INFO accepted connection from 127.0.0.1:55335, task: main
241262023-09-22T23:21:38.487ZINFOcrucible-pantry (datafile): [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 looper connected looper = 0
241272023-09-22T23:21:38.487ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:65363 in state New
241282023-09-22T23:21:38.487ZINFOcrucible-pantry (datafile): [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 looper connected looper = 1
241292023-09-22T23:21:38.487ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:63095 in state New
241302023-09-22T23:21:38.487ZINFOcrucible-pantry (datafile): [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 looper connected looper = 2
24131 Sep 22 23:21:38.487 INFO Connection request from f5cf38e1-07a4-4403-b783-c67b3e5d3b37 with version 4, task: proc
241322023-09-22T23:21:38.487ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:64907 in state New
24133 Sep 22 23:21:38.487 INFO upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 } connected, version 4, task: proc
24134 Sep 22 23:21:38.488 INFO Connection request from f5cf38e1-07a4-4403-b783-c67b3e5d3b37 with version 4, task: proc
24135 Sep 22 23:21:38.488 INFO upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 } connected, version 4, task: proc
24136 Sep 22 23:21:38.488 INFO Connection request from f5cf38e1-07a4-4403-b783-c67b3e5d3b37 with version 4, task: proc
24137 Sep 22 23:21:38.488 INFO upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 } connected, version 4, task: proc
241382023-09-22T23:21:38.488ZINFOcrucible-pantry (datafile): [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (9ca4c47d-0d69-4204-a6cd-e8c5971753b4) New New New ds_transition to WaitActive
241392023-09-22T23:21:38.488ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
241402023-09-22T23:21:38.488ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 9ca4c47d-0d69-4204-a6cd-e8c5971753b4
241412023-09-22T23:21:38.488ZINFOcrucible-pantry (datafile): [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (9ca4c47d-0d69-4204-a6cd-e8c5971753b4) WaitActive New New ds_transition to WaitActive
241422023-09-22T23:21:38.488ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
241432023-09-22T23:21:38.488ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 9ca4c47d-0d69-4204-a6cd-e8c5971753b4
241442023-09-22T23:21:38.488ZINFOcrucible-pantry (datafile): [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (9ca4c47d-0d69-4204-a6cd-e8c5971753b4) WaitActive WaitActive New ds_transition to WaitActive
241452023-09-22T23:21:38.488ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
24146 Sep 22 23:21:38.488 INFO UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 } is now active (read-write)
241472023-09-22T23:21:38.488ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 9ca4c47d-0d69-4204-a6cd-e8c5971753b4
24148 Sep 22 23:21:38.488 INFO UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 } is now active (read-write)
24149 Sep 22 23:21:38.488 INFO UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 } is now active (read-write)
241502023-09-22T23:21:38.489ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:65363 has UUID ad667607-b05e-4427-9709-433732d1d5ab
241512023-09-22T23:21:38.489ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ad667607-b05e-4427-9709-433732d1d5ab, encrypted: true, database_read_version: 1, database_write_version: 1 }
241522023-09-22T23:21:38.489ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 WaitActive WaitActive WaitActive
241532023-09-22T23:21:38.489ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:63095 has UUID 995b5b97-67dd-43f0-b20a-d5b2c0c00924
241542023-09-22T23:21:38.489ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 995b5b97-67dd-43f0-b20a-d5b2c0c00924, encrypted: true, database_read_version: 1, database_write_version: 1 }
241552023-09-22T23:21:38.489ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 WaitActive WaitActive WaitActive
241562023-09-22T23:21:38.489ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:64907 has UUID f0f4ef77-64c9-4cae-ad93-ac2fb4943d50
241572023-09-22T23:21:38.489ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f0f4ef77-64c9-4cae-ad93-ac2fb4943d50, encrypted: true, database_read_version: 1, database_write_version: 1 }
241582023-09-22T23:21:38.489ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 WaitActive WaitActive WaitActive
24159 Sep 22 23:21:38.489 INFO Current flush_numbers [0..12]: [0, 0]
24160 Sep 22 23:21:38.490 INFO Downstairs has completed Negotiation, task: proc
24161 Sep 22 23:21:38.490 INFO Current flush_numbers [0..12]: [0, 0]
24162 Sep 22 23:21:38.490 INFO Downstairs has completed Negotiation, task: proc
24163 Sep 22 23:21:38.491 INFO Current flush_numbers [0..12]: [0, 0]
24164 Sep 22 23:21:38.491 INFO Downstairs has completed Negotiation, task: proc
241652023-09-22T23:21:38.491ZINFOcrucible-pantry (datafile): [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (9ca4c47d-0d69-4204-a6cd-e8c5971753b4) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
241662023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
241672023-09-22T23:21:38.492ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
241682023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
241692023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (9ca4c47d-0d69-4204-a6cd-e8c5971753b4) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
241702023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
241712023-09-22T23:21:38.492ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
241722023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
241732023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (9ca4c47d-0d69-4204-a6cd-e8c5971753b4) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
241742023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
241752023-09-22T23:21:38.492ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
241762023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
241772023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:65363 task reports connection:true
241782023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 WaitQuorum WaitQuorum WaitQuorum
241792023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [0]R flush_numbers: [0, 0]
241802023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [0]R generation: [0, 0]
241812023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [0]R dirty: [false, false]
241822023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [1]R flush_numbers: [0, 0]
241832023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [1]R generation: [0, 0]
24184 The guest has finished waiting for activation
241852023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [1]R dirty: [false, false]
241862023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [2]R flush_numbers: [0, 0]
241872023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [2]R generation: [0, 0]
241882023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [2]R dirty: [false, false]
241892023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): Max found gen is 1
241902023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
241912023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): Next flush: 1
241922023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): All extents match
241932023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): No downstairs repair required
241942023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): No initial repair work was required
241952023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
241962023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 is now active with session: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4
241972023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Set Active after no repair
241982023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
241992023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): Set check for repair
242002023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:63095 task reports connection:true
242012023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Active Active Active
242022023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): Set check for repair
242032023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:64907 task reports connection:true
242042023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Active Active Active
242052023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): Set check for repair
242062023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [0] received reconcile message
242072023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
242082023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
242092023-09-22T23:21:38.492ZINFOcrucible-pantry (datafile): [1] received reconcile message
242102023-09-22T23:21:38.493ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
242112023-09-22T23:21:38.493ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
242122023-09-22T23:21:38.493ZINFOcrucible-pantry (datafile): [2] received reconcile message
242132023-09-22T23:21:38.493ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
242142023-09-22T23:21:38.493ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
242152023-09-22T23:21:38.493ZINFOcrucible-pantry (datafile): volume 6a15a37b-9c1f-4533-aac2-9a0f3a29490f activated ok
242162023-09-22T23:21:38.493ZINFOcrucible-pantry (datafile): volume 6a15a37b-9c1f-4533-aac2-9a0f3a29490f constructed and inserted ok
242172023-09-22T23:21:38.493ZINFOcrucible-pantry (dropshot): request completed latency_us = 7602 local_addr = 127.0.0.1:63903 method = POST remote_addr = 127.0.0.1:56972 req_id = a451b240-4668-432f-bef0-04505eeedc8a response_code = 200 uri = /crucible/pantry/0/volume/6a15a37b-9c1f-4533-aac2-9a0f3a29490f
242182023-09-22T23:21:38.494ZINFOcrucible-pantry (dropshot): request completed latency_us = 414 local_addr = 127.0.0.1:63903 method = POST remote_addr = 127.0.0.1:56972 req_id = 3abc8a05-6de8-4254-99e3-103189fc2dd1 response_code = 200 uri = /crucible/pantry/0/volume/6a15a37b-9c1f-4533-aac2-9a0f3a29490f/import_from_url
24219 Sep 22 23:21:38.507 INFO current number of open files limit 65536 is already the maximum
24220 Sep 22 23:21:38.507 INFO Opened existing region file "/tmp/downstairs-sxN8iMqh/region.json"
24221 Sep 22 23:21:38.507 INFO Database read version 1
24222 Sep 22 23:21:38.507 INFO Database write version 1
24223 Sep 22 23:21:38.553 INFO current number of open files limit 65536 is already the maximum
24224 Sep 22 23:21:38.553 INFO Opened existing region file "/tmp/downstairs-9KQd6X6V/region.json"
24225 Sep 22 23:21:38.553 INFO Database read version 1
24226 Sep 22 23:21:38.553 INFO Database write version 1
24227 Sep 22 23:21:38.584 DEBG Write :1000 deps:[] res:true
24228 Sep 22 23:21:38.585 DEBG Write :1000 deps:[] res:true
24229 Sep 22 23:21:38.586 DEBG Write :1000 deps:[] res:true
24230 Sep 22 23:21:38.588 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
24231 Sep 22 23:21:38.588 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
24232 Sep 22 23:21:38.588 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
242332023-09-22T23:21:38.591ZINFOcrucible-pantry (dropshot): request completed latency_us = 12975 local_addr = 127.0.0.1:63903 method = GET remote_addr = 127.0.0.1:56972 req_id = 7bb2c307-3a70-4afb-9141-62fda82d9cfa response_code = 200 uri = /crucible/pantry/0/job/1df59235-070a-40ea-abd7-4364f8871e04/ok
242342023-09-22T23:21:38.591ZINFOcrucible-pantry (datafile): detach removing entry for volume 6a15a37b-9c1f-4533-aac2-9a0f3a29490f
242352023-09-22T23:21:38.591ZINFOcrucible-pantry (datafile): detaching volume 6a15a37b-9c1f-4533-aac2-9a0f3a29490f
24236 Sep 22 23:21:38.592 DEBG Flush :1002 extent_limit None deps:[] res:true f:2 g:1
24237 Sep 22 23:21:38.592 DEBG Flush :1002 extent_limit None deps:[] res:true f:2 g:1
24238 Sep 22 23:21:38.592 DEBG Flush :1002 extent_limit None deps:[] res:true f:2 g:1
242392023-09-22T23:21:38.593ZINFOcrucible-pantry (datafile): Request to deactivate this guest
242402023-09-22T23:21:38.593ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 set deactivating.
242412023-09-22T23:21:38.593ZINFOcrucible-pantry (dropshot): request completed latency_us = 1852 local_addr = 127.0.0.1:63903 method = DELETE remote_addr = 127.0.0.1:56972 req_id = b7960ba4-c817-4103-b2b5-de050300c968 response_code = 204 uri = /crucible/pantry/0/volume/6a15a37b-9c1f-4533-aac2-9a0f3a29490f
24242 Sep 22 23:21:38.593 INFO Upstairs starts
24243 Sep 22 23:21:38.594 INFO Crucible Version: BuildInfo {
24244 version: "0.0.1",
24245 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
24246 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
24247 git_branch: "main",
24248 rustc_semver: "1.70.0",
24249 rustc_channel: "stable",
24250 rustc_host_triple: "x86_64-unknown-illumos",
24251 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
24252 cargo_triple: "x86_64-unknown-illumos",
24253 debug: true,
24254 opt_level: 0,
24255 }
24256 Sep 22 23:21:38.594 INFO Upstairs <-> Downstairs Message Version: 4
24257 Sep 22 23:21:38.594 INFO Crucible stats registered with UUID: f5cf38e1-07a4-4403-b783-c67b3e5d3b37
24258 Sep 22 23:21:38.594 INFO Crucible f5cf38e1-07a4-4403-b783-c67b3e5d3b37 has session id: 22525190-99f3-4342-b603-249740a74d31
24259 Sep 22 23:21:38.594 INFO [0] connecting to 127.0.0.1:65363, looper: 0
24260 Sep 22 23:21:38.594 INFO [1] connecting to 127.0.0.1:63095, looper: 1
24261 Sep 22 23:21:38.594 INFO [2] connecting to 127.0.0.1:64907, looper: 2
24262 Sep 22 23:21:38.594 INFO up_listen starts, task: up_listen
24263 Sep 22 23:21:38.594 INFO Wait for all three downstairs to come online
24264 Sep 22 23:21:38.594 INFO Flush timeout: 0.5
24265 Sep 22 23:21:38.594 INFO accepted connection from 127.0.0.1:51063, task: main
24266 Sep 22 23:21:38.594 INFO accepted connection from 127.0.0.1:35409, task: main
24267 Sep 22 23:21:38.594 INFO accepted connection from 127.0.0.1:47713, task: main
24268 Sep 22 23:21:38.595 INFO [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 looper connected, looper: 0
24269 Sep 22 23:21:38.595 INFO [0] Proc runs for 127.0.0.1:65363 in state New
24270 Sep 22 23:21:38.595 INFO [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 looper connected, looper: 1
24271 Sep 22 23:21:38.595 INFO [1] Proc runs for 127.0.0.1:63095 in state New
24272 Sep 22 23:21:38.595 INFO [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 looper connected, looper: 2
24273 Sep 22 23:21:38.595 INFO [2] Proc runs for 127.0.0.1:64907 in state New
24274 Sep 22 23:21:38.595 INFO Connection request from f5cf38e1-07a4-4403-b783-c67b3e5d3b37 with version 4, task: proc
24275 Sep 22 23:21:38.595 INFO upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 } connected, version 4, task: proc
24276 Sep 22 23:21:38.595 INFO Connection request from f5cf38e1-07a4-4403-b783-c67b3e5d3b37 with version 4, task: proc
24277 Sep 22 23:21:38.595 INFO upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 } connected, version 4, task: proc
24278 Sep 22 23:21:38.595 INFO Connection request from f5cf38e1-07a4-4403-b783-c67b3e5d3b37 with version 4, task: proc
24279 Sep 22 23:21:38.595 INFO upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 } connected, version 4, task: proc
24280 Sep 22 23:21:38.595 INFO [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (e5eef348-dcf9-4634-aa8e-08126b692f43) New New New ds_transition to WaitActive
24281 Sep 22 23:21:38.595 INFO [0] Transition from New to WaitActive
24282 Sep 22 23:21:38.595 INFO [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (e5eef348-dcf9-4634-aa8e-08126b692f43) WaitActive New New ds_transition to WaitActive
24283 Sep 22 23:21:38.595 INFO [1] Transition from New to WaitActive
24284 Sep 22 23:21:38.596 INFO [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (e5eef348-dcf9-4634-aa8e-08126b692f43) WaitActive WaitActive New ds_transition to WaitActive
24285 Sep 22 23:21:38.596 INFO [2] Transition from New to WaitActive
24286 The guest has requested activation
24287 Sep 22 23:21:38.596 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 active request set
24288 Sep 22 23:21:38.596 INFO [0] received activate with gen 3
24289 Sep 22 23:21:38.596 INFO [0] client got ds_active_rx, promote! session e5eef348-dcf9-4634-aa8e-08126b692f43
24290 Sep 22 23:21:38.596 INFO [1] received activate with gen 3
24291 Sep 22 23:21:38.596 INFO [1] client got ds_active_rx, promote! session e5eef348-dcf9-4634-aa8e-08126b692f43
24292 Sep 22 23:21:38.596 INFO [2] received activate with gen 3
24293 Sep 22 23:21:38.596 INFO [2] client got ds_active_rx, promote! session e5eef348-dcf9-4634-aa8e-08126b692f43
24294 Sep 22 23:21:38.596 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 } to UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 }
24295 Sep 22 23:21:38.596 WARN Signaling to UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 } thread that UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 } is being promoted (read-write)
24296 Sep 22 23:21:38.596 INFO UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 } is now active (read-write)
24297 Sep 22 23:21:38.596 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 } to UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 }
24298 Sep 22 23:21:38.596 WARN Signaling to UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 } thread that UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 } is being promoted (read-write)
24299 Sep 22 23:21:38.596 INFO UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 } is now active (read-write)
24300 Sep 22 23:21:38.596 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 } to UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 }
24301 Sep 22 23:21:38.596 WARN Signaling to UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 } thread that UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 } is being promoted (read-write)
24302 Sep 22 23:21:38.596 INFO UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 } is now active (read-write)
24303 Sep 22 23:21:38.596 WARN Another upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 }, task: main
24304 Sep 22 23:21:38.596 INFO connection (127.0.0.1:50295): all done
24305 Sep 22 23:21:38.596 WARN Another upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 }, task: main
24306 Sep 22 23:21:38.597 INFO connection (127.0.0.1:60639): all done
24307 Sep 22 23:21:38.597 WARN Another upstairs UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: e5eef348-dcf9-4634-aa8e-08126b692f43, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: f5cf38e1-07a4-4403-b783-c67b3e5d3b37, session_id: 9ca4c47d-0d69-4204-a6cd-e8c5971753b4, gen: 1 }, task: main
24308 Sep 22 23:21:38.597 INFO connection (127.0.0.1:55335): all done
243092023-09-22T23:21:38.597ZERROcrucible-pantry (datafile): [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (9ca4c47d-0d69-4204-a6cd-e8c5971753b4) cmd_loop saw YouAreNoLongerActive f5cf38e1-07a4-4403-b783-c67b3e5d3b37 e5eef348-dcf9-4634-aa8e-08126b692f43 3
243102023-09-22T23:21:38.597ZINFOcrucible-pantry (datafile): [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (9ca4c47d-0d69-4204-a6cd-e8c5971753b4) Active Active Active ds_transition to Disabled
243112023-09-22T23:21:38.597ZINFOcrucible-pantry (datafile): [0] Transition from Active to Disabled
243122023-09-22T23:21:38.597ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 set inactive, session 9ca4c47d-0d69-4204-a6cd-e8c5971753b4
243132023-09-22T23:21:38.597ZERROcrucible-pantry (datafile): 127.0.0.1:65363: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 1 looper = 0
243142023-09-22T23:21:38.597ZINFOcrucible-pantry (datafile): [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Gone missing, transition from Disabled to Disconnected
243152023-09-22T23:21:38.597ZINFOcrucible-pantry (datafile): [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 connection to 127.0.0.1:65363 closed looper = 0
243162023-09-22T23:21:38.597ZERROcrucible-pantry (datafile): [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (9ca4c47d-0d69-4204-a6cd-e8c5971753b4) cmd_loop saw YouAreNoLongerActive f5cf38e1-07a4-4403-b783-c67b3e5d3b37 e5eef348-dcf9-4634-aa8e-08126b692f43 3
243172023-09-22T23:21:38.597ZINFOcrucible-pantry (datafile): [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (9ca4c47d-0d69-4204-a6cd-e8c5971753b4) Disconnected Active Active ds_transition to Disabled
243182023-09-22T23:21:38.597ZINFOcrucible-pantry (datafile): [1] Transition from Active to Disabled
243192023-09-22T23:21:38.597ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 set inactive, session 9ca4c47d-0d69-4204-a6cd-e8c5971753b4
243202023-09-22T23:21:38.597ZERROcrucible-pantry (datafile): 127.0.0.1:63095: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 1 looper = 1
243212023-09-22T23:21:38.597ZINFOcrucible-pantry (datafile): [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Gone missing, transition from Disabled to Disconnected
243222023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 connection to 127.0.0.1:63095 closed looper = 1
243232023-09-22T23:21:38.598ZERROcrucible-pantry (datafile): [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (9ca4c47d-0d69-4204-a6cd-e8c5971753b4) cmd_loop saw YouAreNoLongerActive f5cf38e1-07a4-4403-b783-c67b3e5d3b37 e5eef348-dcf9-4634-aa8e-08126b692f43 3
243242023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (9ca4c47d-0d69-4204-a6cd-e8c5971753b4) Disconnected Disconnected Active ds_transition to Disabled
243252023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): [2] Transition from Active to Disabled
243262023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 set inactive, session 9ca4c47d-0d69-4204-a6cd-e8c5971753b4
24327 Sep 22 23:21:38.598 INFO [0] downstairs client at 127.0.0.1:65363 has UUID ad667607-b05e-4427-9709-433732d1d5ab
243282023-09-22T23:21:38.598ZERROcrucible-pantry (datafile): 127.0.0.1:64907: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 1 looper = 2
24329 Sep 22 23:21:38.598 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ad667607-b05e-4427-9709-433732d1d5ab, encrypted: true, database_read_version: 1, database_write_version: 1 }
243302023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Gone missing, transition from Disabled to Disconnected
24331 Sep 22 23:21:38.598 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 WaitActive WaitActive WaitActive
243322023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 connection to 127.0.0.1:64907 closed looper = 2
243332023-09-22T23:21:38.598ZWARNcrucible-pantry (datafile): [0] pm_task rx.recv() is None
24334 Sep 22 23:21:38.598 INFO [1] downstairs client at 127.0.0.1:63095 has UUID 995b5b97-67dd-43f0-b20a-d5b2c0c00924
24335 Sep 22 23:21:38.598 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 995b5b97-67dd-43f0-b20a-d5b2c0c00924, encrypted: true, database_read_version: 1, database_write_version: 1 }
243362023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:65363 task reports connection:false
243372023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Disconnected Disconnected Disconnected
24338 Sep 22 23:21:38.598 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 WaitActive WaitActive WaitActive
243392023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:65363 task reports offline
24340 Sep 22 23:21:38.598 INFO [2] downstairs client at 127.0.0.1:64907 has UUID f0f4ef77-64c9-4cae-ad93-ac2fb4943d50
243412023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:63095 task reports connection:false
24342 Sep 22 23:21:38.598 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f0f4ef77-64c9-4cae-ad93-ac2fb4943d50, encrypted: true, database_read_version: 1, database_write_version: 1 }
243432023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Disconnected Disconnected Disconnected
24344 Sep 22 23:21:38.598 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 WaitActive WaitActive WaitActive
243452023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:63095 task reports offline
243462023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:64907 task reports connection:false
243472023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Disconnected Disconnected Disconnected
243482023-09-22T23:21:38.598ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:64907 task reports offline
243492023-09-22T23:21:38.598ZWARNcrucible-pantry (datafile): [1] pm_task rx.recv() is None
243502023-09-22T23:21:38.598ZWARNcrucible-pantry (datafile): [2] pm_task rx.recv() is None
24351 Sep 22 23:21:38.598 INFO Current flush_numbers [0..12]: [1, 1]
24352 Sep 22 23:21:38.598 INFO Downstairs has completed Negotiation, task: proc
24353 Sep 22 23:21:38.599 INFO Current flush_numbers [0..12]: [1, 1]
24354 Sep 22 23:21:38.599 INFO Downstairs has completed Negotiation, task: proc
24355 Sep 22 23:21:38.599 INFO Current flush_numbers [0..12]: [1, 1]
24356 Sep 22 23:21:38.599 INFO Downstairs has completed Negotiation, task: proc
24357 Sep 22 23:21:38.599 INFO [0] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (e5eef348-dcf9-4634-aa8e-08126b692f43) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
24358 Sep 22 23:21:38.599 INFO [0] Transition from WaitActive to WaitQuorum
24359 Sep 22 23:21:38.599 WARN [0] new RM replaced this: None
24360 Sep 22 23:21:38.599 INFO [0] Starts reconcile loop
24361 Sep 22 23:21:38.599 INFO [1] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (e5eef348-dcf9-4634-aa8e-08126b692f43) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
24362 Sep 22 23:21:38.599 INFO [1] Transition from WaitActive to WaitQuorum
24363 Sep 22 23:21:38.599 WARN [1] new RM replaced this: None
24364 Sep 22 23:21:38.599 INFO [1] Starts reconcile loop
24365 Sep 22 23:21:38.599 INFO [2] f5cf38e1-07a4-4403-b783-c67b3e5d3b37 (e5eef348-dcf9-4634-aa8e-08126b692f43) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
24366 Sep 22 23:21:38.599 INFO [2] Transition from WaitActive to WaitQuorum
24367 Sep 22 23:21:38.599 WARN [2] new RM replaced this: None
24368 Sep 22 23:21:38.599 INFO [2] Starts reconcile loop
24369 Sep 22 23:21:38.599 INFO [0] 127.0.0.1:65363 task reports connection:true
24370 Sep 22 23:21:38.599 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 WaitQuorum WaitQuorum WaitQuorum
24371 Sep 22 23:21:38.599 INFO [0]R flush_numbers: [1, 1]
24372 Sep 22 23:21:38.599 INFO [0]R generation: [1, 1]
24373 Sep 22 23:21:38.599 INFO [0]R dirty: [false, false]
24374 Sep 22 23:21:38.599 INFO [1]R flush_numbers: [1, 1]
24375 Sep 22 23:21:38.599 INFO [1]R generation: [1, 1]
24376 Sep 22 23:21:38.599 INFO [1]R dirty: [false, false]
24377 Sep 22 23:21:38.599 INFO [2]R flush_numbers: [1, 1]
24378 Sep 22 23:21:38.599 INFO [2]R generation: [1, 1]
24379 Sep 22 23:21:38.599 INFO [2]R dirty: [false, false]
24380 Sep 22 23:21:38.599 INFO Max found gen is 2
24381 Sep 22 23:21:38.599 INFO Generation requested: 3 >= found:2
24382 Sep 22 23:21:38.599 INFO Next flush: 2
24383 Sep 22 23:21:38.600 INFO All extents match
24384 Sep 22 23:21:38.600 INFO No downstairs repair required
24385 Sep 22 23:21:38.600 INFO No initial repair work was required
24386 Sep 22 23:21:38.600 INFO Set Downstairs and Upstairs active
24387 Sep 22 23:21:38.600 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 is now active with session: e5eef348-dcf9-4634-aa8e-08126b692f43
24388 Sep 22 23:21:38.600 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Set Active after no repair
24389 Sep 22 23:21:38.600 INFO Notify all downstairs, region set compare is done.
24390 Sep 22 23:21:38.600 INFO Set check for repair
24391 Sep 22 23:21:38.600 INFO [1] 127.0.0.1:63095 task reports connection:true
24392 Sep 22 23:21:38.600 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Active Active Active
24393 Sep 22 23:21:38.600 INFO Set check for repair
24394 Sep 22 23:21:38.600 INFO [2] 127.0.0.1:64907 task reports connection:true
24395 Sep 22 23:21:38.600 INFO f5cf38e1-07a4-4403-b783-c67b3e5d3b37 Active Active Active
24396 Sep 22 23:21:38.600 INFO Set check for repair
24397 Sep 22 23:21:38.600 INFO [0] received reconcile message
24398 Sep 22 23:21:38.600 INFO [0] All repairs completed, exit
24399 Sep 22 23:21:38.600 INFO [0] Starts cmd_loop
24400 Sep 22 23:21:38.600 INFO [1] received reconcile message
24401 Sep 22 23:21:38.600 INFO [1] All repairs completed, exit
24402 Sep 22 23:21:38.600 INFO [1] Starts cmd_loop
24403 Sep 22 23:21:38.600 INFO [2] received reconcile message
24404 Sep 22 23:21:38.600 INFO [2] All repairs completed, exit
24405 Sep 22 23:21:38.600 INFO [2] Starts cmd_loop
24406 The guest has finished waiting for activation
24407 Sep 22 23:21:38.600 DEBG IO Read 1000 has deps []
24408 Sep 22 23:21:38.601 DEBG Read :1000 deps:[] res:true
24409 Sep 22 23:21:38.601 DEBG Read :1000 deps:[] res:true
24410 Sep 22 23:21:38.602 DEBG Read :1000 deps:[] res:true
24411 Sep 22 23:21:38.605 DEBG [0] Read AckReady 1000, : downstairs
24412 Sep 22 23:21:38.607 DEBG [1] Read already AckReady 1000, : downstairs
24413 Sep 22 23:21:38.609 DEBG [2] Read already AckReady 1000, : downstairs
24414 Sep 22 23:21:38.609 DEBG up_ds_listen was notified
24415 Sep 22 23:21:38.609 DEBG up_ds_listen process 1000
24416 Sep 22 23:21:38.609 DEBG [A] ack job 1000:1, : downstairs
24417 Sep 22 23:21:38.610 DEBG up_ds_listen checked 1 jobs, back to waiting
24418 test test::test_pantry_import_from_local_server ... ok
24419 Sep 22 23:21:38.622 INFO current number of open files limit 65536 is already the maximum
24420 Sep 22 23:21:38.622 INFO Created new region file "/tmp/downstairs-wuCOh34x/region.json"
24421 Sep 22 23:21:38.707 INFO Checking if live repair is needed
24422 Sep 22 23:21:38.707 INFO No Live Repair required at this time
24423 Sep 22 23:21:38.725 INFO accepted connection from 127.0.0.1:36068, task: main
24424 Sep 22 23:21:38.725 INFO [0] f712941d-0577-43e4-8a2e-7814ec270c09 looper connected, looper: 0
24425 Sep 22 23:21:38.725 INFO [0] Proc runs for 127.0.0.1:47135 in state Replaced
24426 Sep 22 23:21:38.725 INFO Connection request from f712941d-0577-43e4-8a2e-7814ec270c09 with version 4, task: proc
24427 Sep 22 23:21:38.725 INFO upstairs UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } connected, version 4, task: proc
24428 Sep 22 23:21:38.725 INFO [0] upstairs guest_io_ready=TRUE, promote! session 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae
24429 Sep 22 23:21:38.726 INFO UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } is now active (read-write)
24430 Sep 22 23:21:38.726 INFO [0] downstairs client at 127.0.0.1:47135 has UUID de5d1b92-54ba-463b-93eb-b2678e77643b
24431 Sep 22 23:21:38.726 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: de5d1b92-54ba-463b-93eb-b2678e77643b, encrypted: true, database_read_version: 1, database_write_version: 1 }
24432 Sep 22 23:21:38.726 WARN [0] replace downstairs uuid:ca34832d-be06-44a7-9364-f6c3bc44d7fb with de5d1b92-54ba-463b-93eb-b2678e77643b
24433 Sep 22 23:21:38.726 INFO f712941d-0577-43e4-8a2e-7814ec270c09 Replaced Active Active
24434 Sep 22 23:21:38.726 INFO Current flush_numbers [0..12]: [0, 0]
24435 Sep 22 23:21:38.727 INFO Downstairs has completed Negotiation, task: proc
24436 Sep 22 23:21:38.727 INFO [0] f712941d-0577-43e4-8a2e-7814ec270c09 (512f09eb-7e91-421e-9fa9-f1bb0acbe6ae) Replaced Active Active ds_transition to LiveRepairReady
24437 Sep 22 23:21:38.727 INFO [0] Transition from Replaced to LiveRepairReady
24438 Sep 22 23:21:38.727 WARN [0] new RM replaced this: None
24439 Sep 22 23:21:38.727 WARN [0] f712941d-0577-43e4-8a2e-7814ec270c09 Enter Ready for LiveRepair mode
24440 Sep 22 23:21:38.727 INFO [0] Starts cmd_loop
24441 Sep 22 23:21:38.727 INFO [0] 127.0.0.1:47135 task reports connection:true
24442 Sep 22 23:21:38.727 INFO f712941d-0577-43e4-8a2e-7814ec270c09 LiveRepairReady Active Active
24443 Sep 22 23:21:38.727 INFO Set check for repair
24444 Sep 22 23:21:38.745 INFO UUID: e8aacc37-8406-4fc6-b948-447b29fd08c0
24445 Sep 22 23:21:38.745 INFO Blocks per extent:512 Total Extents: 188
24446 Sep 22 23:21:38.745 INFO Crucible Version: Crucible Version: 0.0.1
24447 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24448 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24449 rustc: 1.70.0 stable x86_64-unknown-illumos
24450 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24451 Sep 22 23:21:38.745 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24452 Sep 22 23:21:38.745 INFO Using address: 127.0.0.1:36532, task: main
24453 Sep 22 23:21:38.746 INFO Repair listens on 127.0.0.1:0, task: repair
24454 Sep 22 23:21:38.746 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:59354, task: repair
24455 Sep 22 23:21:38.746 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:59354, task: repair
24456 Sep 22 23:21:38.746 INFO listening, local_addr: 127.0.0.1:59354, task: repair
24457 Sep 22 23:21:38.746 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:59354, task: repair
24458 Sep 22 23:21:38.746 INFO Using repair address: 127.0.0.1:59354, task: main
24459 Sep 22 23:21:38.746 INFO No SSL acceptor configured, task: main
24460 Sep 22 23:21:38.771 INFO listening on 127.0.0.1:0, task: main
24461 Sep 22 23:21:38.771 INFO current number of open files limit 65536 is already the maximum
24462 Sep 22 23:21:38.772 INFO Opened existing region file "/tmp/downstairs-hwOOuWZz/region.json"
24463 Sep 22 23:21:38.772 INFO Database read version 1
24464 Sep 22 23:21:38.772 INFO Database write version 1
24465 Sep 22 23:21:38.821 INFO current number of open files limit 65536 is already the maximum
24466 Sep 22 23:21:38.821 INFO Opened existing region file "/tmp/downstairs-rDPgnXdu/region.json"
24467 Sep 22 23:21:38.821 INFO Database read version 1
24468 Sep 22 23:21:38.821 INFO Database write version 1
24469 Sep 22 23:21:38.828 INFO current number of open files limit 65536 is already the maximum
24470 Sep 22 23:21:38.829 INFO Created new region file "/tmp/downstairs-t7mPFuJ0/region.json"
24471 Sep 22 23:21:38.896 INFO UUID: 6e2bf597-efa8-46d7-84b3-855cd0183cc5
24472 Sep 22 23:21:38.896 INFO Blocks per extent:512 Total Extents: 188
24473 Sep 22 23:21:38.896 INFO Crucible Version: Crucible Version: 0.0.1
24474 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24475 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24476 rustc: 1.70.0 stable x86_64-unknown-illumos
24477 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24478 Sep 22 23:21:38.896 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24479 Sep 22 23:21:38.896 INFO Using address: 127.0.0.1:33519, task: main
24480 Sep 22 23:21:38.896 INFO Repair listens on 127.0.0.1:0, task: repair
24481 Sep 22 23:21:38.897 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:44104, task: repair
24482 Sep 22 23:21:38.897 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:44104, task: repair
24483 Sep 22 23:21:38.897 INFO listening, local_addr: 127.0.0.1:44104, task: repair
24484 Sep 22 23:21:38.897 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:44104, task: repair
24485 Sep 22 23:21:38.897 INFO Using repair address: 127.0.0.1:44104, task: main
24486 Sep 22 23:21:38.897 INFO No SSL acceptor configured, task: main
24487 Sep 22 23:21:38.938 INFO UUID: aa05d4f9-364f-4612-a579-613b38785c79
24488 Sep 22 23:21:38.938 INFO Blocks per extent:512 Total Extents: 188
24489 Sep 22 23:21:38.938 INFO Crucible Version: Crucible Version: 0.0.1
24490 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24491 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24492 rustc: 1.70.0 stable x86_64-unknown-illumos
24493 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24494 Sep 22 23:21:38.938 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24495 Sep 22 23:21:38.938 INFO Using address: 127.0.0.1:37108, task: main
24496 Sep 22 23:21:38.938 INFO Repair listens on 127.0.0.1:0, task: repair
24497 Sep 22 23:21:38.939 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53054, task: repair
24498 Sep 22 23:21:38.939 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53054, task: repair
24499 Sep 22 23:21:38.939 INFO listening, local_addr: 127.0.0.1:53054, task: repair
24500 Sep 22 23:21:38.939 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53054, task: repair
24501 Sep 22 23:21:38.939 INFO Using repair address: 127.0.0.1:53054, task: main
24502 Sep 22 23:21:38.939 INFO No SSL acceptor configured, task: main
24503 Sep 22 23:21:38.953 INFO current number of open files limit 65536 is already the maximum
24504 Sep 22 23:21:38.953 INFO Opened existing region file "/tmp/downstairs-wuCOh34x/region.json"
24505 Sep 22 23:21:38.953 INFO Database read version 1
24506 Sep 22 23:21:38.953 INFO Database write version 1
24507 Sep 22 23:21:38.968 INFO Checking if live repair is needed
24508 Sep 22 23:21:38.968 INFO No Live Repair required at this time
24509 Sep 22 23:21:38.969 INFO current number of open files limit 65536 is already the maximum
24510 Sep 22 23:21:38.969 INFO Created new region file "/tmp/downstairs-VXdeuER3/region.json"
24511 Sep 22 23:21:38.991 INFO accepted connection from 127.0.0.1:51097, task: main
24512 Sep 22 23:21:38.991 INFO [0] 52fe8d40-333d-4b64-8663-2adf0476947f looper connected, looper: 0
24513 Sep 22 23:21:38.992 INFO [0] Proc runs for 127.0.0.1:62901 in state Replaced
24514 Sep 22 23:21:38.992 INFO Connection request from 52fe8d40-333d-4b64-8663-2adf0476947f with version 4, task: proc
24515 Sep 22 23:21:38.992 INFO upstairs UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } connected, version 4, task: proc
24516 Sep 22 23:21:38.992 INFO [0] upstairs guest_io_ready=TRUE, promote! session fc0a5f14-ae91-4f48-aded-c2a3fecef8a0
24517 Sep 22 23:21:38.992 INFO UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } is now active (read-write)
24518 Sep 22 23:21:38.992 INFO [0] downstairs client at 127.0.0.1:62901 has UUID f9881bf1-86b5-438d-b94a-613d437493c1
24519 Sep 22 23:21:38.992 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f9881bf1-86b5-438d-b94a-613d437493c1, encrypted: true, database_read_version: 1, database_write_version: 1 }
24520 Sep 22 23:21:38.992 WARN [0] replace downstairs uuid:da052c76-0745-44df-96e3-d0a8c019994b with f9881bf1-86b5-438d-b94a-613d437493c1
24521 Sep 22 23:21:38.992 INFO 52fe8d40-333d-4b64-8663-2adf0476947f Replaced Active Active
24522 Sep 22 23:21:38.993 INFO Current flush_numbers [0..12]: [0, 0]
24523 Sep 22 23:21:38.993 INFO Downstairs has completed Negotiation, task: proc
24524 Sep 22 23:21:38.993 INFO [0] 52fe8d40-333d-4b64-8663-2adf0476947f (fc0a5f14-ae91-4f48-aded-c2a3fecef8a0) Replaced Active Active ds_transition to LiveRepairReady
24525 Sep 22 23:21:38.993 INFO [0] Transition from Replaced to LiveRepairReady
24526 Sep 22 23:21:38.993 WARN [0] new RM replaced this: None
24527 Sep 22 23:21:38.993 WARN [0] 52fe8d40-333d-4b64-8663-2adf0476947f Enter Ready for LiveRepair mode
24528 Sep 22 23:21:38.993 INFO [0] Starts cmd_loop
24529 Sep 22 23:21:38.994 INFO [0] 127.0.0.1:62901 task reports connection:true
24530 Sep 22 23:21:38.994 INFO 52fe8d40-333d-4b64-8663-2adf0476947f LiveRepairReady Active Active
24531 Sep 22 23:21:38.994 INFO Set check for repair
24532 Sep 22 23:21:38.995 INFO current number of open files limit 65536 is already the maximum
24533 Sep 22 23:21:38.995 INFO Created new region file "/tmp/downstairs-UGCoHqWo/region.json"
24534 Sep 22 23:21:39.085 INFO UUID: 0b1643bf-b244-4616-9246-4922ec6009af
24535 Sep 22 23:21:39.085 INFO Blocks per extent:512 Total Extents: 188
24536 Sep 22 23:21:39.085 INFO Crucible Version: Crucible Version: 0.0.1
24537 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24538 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24539 rustc: 1.70.0 stable x86_64-unknown-illumos
24540 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24541 Sep 22 23:21:39.085 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24542 Sep 22 23:21:39.085 INFO Using address: 127.0.0.1:48339, task: main
24543 Sep 22 23:21:39.085 INFO Repair listens on 127.0.0.1:0, task: repair
24544 Sep 22 23:21:39.085 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52864, task: repair
24545 Sep 22 23:21:39.086 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52864, task: repair
24546 Sep 22 23:21:39.086 INFO listening, local_addr: 127.0.0.1:52864, task: repair
24547 Sep 22 23:21:39.086 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52864, task: repair
24548 Sep 22 23:21:39.086 INFO Using repair address: 127.0.0.1:52864, task: main
24549 Sep 22 23:21:39.086 INFO No SSL acceptor configured, task: main
24550 Sep 22 23:21:39.120 INFO listening on 127.0.0.1:0, task: main
24551 Sep 22 23:21:39.121 INFO current number of open files limit 65536 is already the maximum
24552 Sep 22 23:21:39.121 INFO Opened existing region file "/tmp/downstairs-t7mPFuJ0/region.json"
24553 Sep 22 23:21:39.121 INFO Database read version 1
24554 Sep 22 23:21:39.121 INFO Database write version 1
24555 Sep 22 23:21:39.149 INFO UUID: b740b481-b810-4686-a6ad-5e7a129e9669
24556 Sep 22 23:21:39.150 INFO Blocks per extent:512 Total Extents: 188
24557 Sep 22 23:21:39.150 INFO Crucible Version: Crucible Version: 0.0.1
24558 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24559 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24560 rustc: 1.70.0 stable x86_64-unknown-illumos
24561 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24562 Sep 22 23:21:39.150 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24563 Sep 22 23:21:39.150 INFO Using address: 127.0.0.1:55664, task: main
24564 Sep 22 23:21:39.150 INFO Repair listens on 127.0.0.1:0, task: repair
24565 Sep 22 23:21:39.150 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:40259, task: repair
24566 Sep 22 23:21:39.150 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:40259, task: repair
24567 Sep 22 23:21:39.150 INFO listening, local_addr: 127.0.0.1:40259, task: repair
24568 Sep 22 23:21:39.151 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:40259, task: repair
24569 Sep 22 23:21:39.151 INFO Using repair address: 127.0.0.1:40259, task: main
24570 Sep 22 23:21:39.151 INFO No SSL acceptor configured, task: main
24571 Sep 22 23:21:39.161 INFO current number of open files limit 65536 is already the maximum
24572 Sep 22 23:21:39.161 INFO Created new region file "/tmp/downstairs-zy0O1MvL/region.json"
24573 Sep 22 23:21:39.234 INFO current number of open files limit 65536 is already the maximum
24574 Sep 22 23:21:39.234 INFO Created new region file "/tmp/downstairs-pHcECflQ/region.json"
24575 Sep 22 23:21:39.243 INFO listening on 127.0.0.1:0, task: main
24576 Sep 22 23:21:39.244 INFO current number of open files limit 65536 is already the maximum
24577 Sep 22 23:21:39.244 INFO Opened existing region file "/tmp/downstairs-VXdeuER3/region.json"
24578 Sep 22 23:21:39.244 INFO Database read version 1
24579 Sep 22 23:21:39.244 INFO Database write version 1
24580 Sep 22 23:21:39.247 INFO UUID: 1f0dc9eb-57e9-42b1-a5c8-0ed1810eb444
24581 Sep 22 23:21:39.247 INFO Blocks per extent:512 Total Extents: 188
24582 Sep 22 23:21:39.247 INFO Crucible Version: Crucible Version: 0.0.1
24583 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24584 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24585 rustc: 1.70.0 stable x86_64-unknown-illumos
24586 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24587 Sep 22 23:21:39.247 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24588 Sep 22 23:21:39.247 INFO Using address: 127.0.0.1:40057, task: main
24589 Sep 22 23:21:39.248 INFO Repair listens on 127.0.0.1:0, task: repair
24590 Sep 22 23:21:39.248 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:59363, task: repair
24591 Sep 22 23:21:39.248 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:59363, task: repair
24592 Sep 22 23:21:39.248 INFO listening, local_addr: 127.0.0.1:59363, task: repair
24593 Sep 22 23:21:39.248 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:59363, task: repair
24594 Sep 22 23:21:39.248 INFO Using repair address: 127.0.0.1:59363, task: main
24595 Sep 22 23:21:39.248 INFO No SSL acceptor configured, task: main
24596 Sep 22 23:21:39.281 INFO listening on 127.0.0.1:0, task: main
24597 Sep 22 23:21:39.282 INFO current number of open files limit 65536 is already the maximum
24598 Sep 22 23:21:39.282 INFO Opened existing region file "/tmp/downstairs-UGCoHqWo/region.json"
24599 Sep 22 23:21:39.282 INFO Database read version 1
24600 Sep 22 23:21:39.282 INFO Database write version 1
24601 Sep 22 23:21:39.326 INFO current number of open files limit 65536 is already the maximum
24602 Sep 22 23:21:39.326 INFO Created new region file "/tmp/downstairs-sK3WzHBh/region.json"
24603 Sep 22 23:21:39.478 INFO UUID: 213a7130-8159-4409-8d01-9d7ac9bbe737
24604 Sep 22 23:21:39.478 INFO Blocks per extent:512 Total Extents: 188
24605 Sep 22 23:21:39.478 INFO Crucible Version: Crucible Version: 0.0.1
24606 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24607 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24608 rustc: 1.70.0 stable x86_64-unknown-illumos
24609 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24610 Sep 22 23:21:39.478 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24611 Sep 22 23:21:39.478 INFO Using address: 127.0.0.1:33670, task: main
24612 Sep 22 23:21:39.479 INFO Repair listens on 127.0.0.1:0, task: repair
24613 Sep 22 23:21:39.479 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:49396, task: repair
24614 Sep 22 23:21:39.479 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:49396, task: repair
24615 Sep 22 23:21:39.479 INFO listening, local_addr: 127.0.0.1:49396, task: repair
24616 Sep 22 23:21:39.479 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:49396, task: repair
24617 Sep 22 23:21:39.479 INFO Using repair address: 127.0.0.1:49396, task: main
24618 Sep 22 23:21:39.479 INFO No SSL acceptor configured, task: main
24619 Sep 22 23:21:39.497 INFO listening on 127.0.0.1:0, task: main
24620 Sep 22 23:21:39.498 INFO current number of open files limit 65536 is already the maximum
24621 Sep 22 23:21:39.498 INFO Opened existing region file "/tmp/downstairs-zy0O1MvL/region.json"
24622 Sep 22 23:21:39.498 INFO Database read version 1
24623 Sep 22 23:21:39.498 INFO Database write version 1
24624 Sep 22 23:21:39.567 INFO listening on 127.0.0.1:0, task: main
24625 Sep 22 23:21:39.567 INFO current number of open files limit 65536 is already the maximum
24626 Sep 22 23:21:39.567 INFO Opened existing region file "/tmp/downstairs-pHcECflQ/region.json"
24627 Sep 22 23:21:39.567 INFO Database read version 1
24628 Sep 22 23:21:39.567 INFO Database write version 1
24629 Sep 22 23:21:39.571 INFO Upstairs starts
24630 Sep 22 23:21:39.571 INFO Crucible Version: BuildInfo {
24631 version: "0.0.1",
24632 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
24633 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
24634 git_branch: "main",
24635 rustc_semver: "1.70.0",
24636 rustc_channel: "stable",
24637 rustc_host_triple: "x86_64-unknown-illumos",
24638 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
24639 cargo_triple: "x86_64-unknown-illumos",
24640 debug: true,
24641 opt_level: 0,
24642 }
24643 Sep 22 23:21:39.571 INFO Upstairs <-> Downstairs Message Version: 4
24644 Sep 22 23:21:39.571 INFO Crucible stats registered with UUID: b937f86b-985c-4e08-8b98-07f7aae5f860
24645 Sep 22 23:21:39.571 INFO Crucible b937f86b-985c-4e08-8b98-07f7aae5f860 has session id: 62cf098e-ed8c-438a-a15a-59c67d1d98af
24646 Sep 22 23:21:39.572 INFO listening on 127.0.0.1:0, task: main
24647 Sep 22 23:21:39.572 INFO [0] connecting to 127.0.0.1:36172, looper: 0
24648 Sep 22 23:21:39.572 INFO [1] connecting to 127.0.0.1:36532, looper: 1
24649 Sep 22 23:21:39.572 INFO [2] connecting to 127.0.0.1:33670, looper: 2
24650 Sep 22 23:21:39.572 INFO up_listen starts, task: up_listen
24651 Sep 22 23:21:39.572 INFO Wait for all three downstairs to come online
24652 Sep 22 23:21:39.572 INFO Flush timeout: 0.5
24653 Sep 22 23:21:39.572 INFO accepted connection from 127.0.0.1:63095, task: main
24654 Sep 22 23:21:39.572 INFO accepted connection from 127.0.0.1:33942, task: main
24655 Sep 22 23:21:39.572 INFO accepted connection from 127.0.0.1:47489, task: main
24656 Sep 22 23:21:39.572 INFO [0] b937f86b-985c-4e08-8b98-07f7aae5f860 looper connected, looper: 0
24657 Sep 22 23:21:39.573 INFO [0] Proc runs for 127.0.0.1:36172 in state New
24658 Sep 22 23:21:39.573 INFO [1] b937f86b-985c-4e08-8b98-07f7aae5f860 looper connected, looper: 1
24659 Sep 22 23:21:39.573 INFO [1] Proc runs for 127.0.0.1:36532 in state New
24660 Sep 22 23:21:39.573 INFO [2] b937f86b-985c-4e08-8b98-07f7aae5f860 looper connected, looper: 2
24661 Sep 22 23:21:39.573 INFO [2] Proc runs for 127.0.0.1:33670 in state New
24662 Sep 22 23:21:39.573 INFO Connection request from b937f86b-985c-4e08-8b98-07f7aae5f860 with version 4, task: proc
24663 Sep 22 23:21:39.573 INFO upstairs UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } connected, version 4, task: proc
24664 Sep 22 23:21:39.573 INFO Connection request from b937f86b-985c-4e08-8b98-07f7aae5f860 with version 4, task: proc
24665 Sep 22 23:21:39.573 INFO upstairs UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } connected, version 4, task: proc
24666 Sep 22 23:21:39.573 INFO Connection request from b937f86b-985c-4e08-8b98-07f7aae5f860 with version 4, task: proc
24667 Sep 22 23:21:39.573 INFO upstairs UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } connected, version 4, task: proc
24668 The guest has requested activation
24669 Sep 22 23:21:39.573 INFO [0] b937f86b-985c-4e08-8b98-07f7aae5f860 (27d81b06-bab5-4e4f-b4d3-4909cf4b2397) New New New ds_transition to WaitActive
24670 Sep 22 23:21:39.573 INFO [0] Transition from New to WaitActive
24671 Sep 22 23:21:39.573 INFO [1] b937f86b-985c-4e08-8b98-07f7aae5f860 (27d81b06-bab5-4e4f-b4d3-4909cf4b2397) WaitActive New New ds_transition to WaitActive
24672 Sep 22 23:21:39.573 INFO [1] Transition from New to WaitActive
24673 Sep 22 23:21:39.574 INFO [2] b937f86b-985c-4e08-8b98-07f7aae5f860 (27d81b06-bab5-4e4f-b4d3-4909cf4b2397) WaitActive WaitActive New ds_transition to WaitActive
24674 Sep 22 23:21:39.574 INFO [2] Transition from New to WaitActive
24675 Sep 22 23:21:39.574 INFO b937f86b-985c-4e08-8b98-07f7aae5f860 active request set
24676 Sep 22 23:21:39.574 INFO [0] received activate with gen 1
24677 Sep 22 23:21:39.574 INFO [0] client got ds_active_rx, promote! session 27d81b06-bab5-4e4f-b4d3-4909cf4b2397
24678 Sep 22 23:21:39.574 INFO [1] received activate with gen 1
24679 Sep 22 23:21:39.574 INFO [1] client got ds_active_rx, promote! session 27d81b06-bab5-4e4f-b4d3-4909cf4b2397
24680 Sep 22 23:21:39.574 INFO [2] received activate with gen 1
24681 Sep 22 23:21:39.574 INFO [2] client got ds_active_rx, promote! session 27d81b06-bab5-4e4f-b4d3-4909cf4b2397
24682 Sep 22 23:21:39.574 INFO UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } is now active (read-write)
24683 Sep 22 23:21:39.574 INFO UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } is now active (read-write)
24684 Sep 22 23:21:39.575 INFO UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } is now active (read-write)
24685 Sep 22 23:21:39.575 INFO [0] downstairs client at 127.0.0.1:36172 has UUID d1402af7-f301-4500-84d1-bcd26b1c483b
24686 Sep 22 23:21:39.575 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: d1402af7-f301-4500-84d1-bcd26b1c483b, encrypted: true, database_read_version: 1, database_write_version: 1 }
24687 Sep 22 23:21:39.575 INFO b937f86b-985c-4e08-8b98-07f7aae5f860 WaitActive WaitActive WaitActive
24688 Sep 22 23:21:39.575 INFO [1] downstairs client at 127.0.0.1:36532 has UUID e8aacc37-8406-4fc6-b948-447b29fd08c0
24689 Sep 22 23:21:39.575 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: e8aacc37-8406-4fc6-b948-447b29fd08c0, encrypted: true, database_read_version: 1, database_write_version: 1 }
24690 Sep 22 23:21:39.575 INFO b937f86b-985c-4e08-8b98-07f7aae5f860 WaitActive WaitActive WaitActive
24691 Sep 22 23:21:39.575 INFO [2] downstairs client at 127.0.0.1:33670 has UUID 213a7130-8159-4409-8d01-9d7ac9bbe737
24692 Sep 22 23:21:39.575 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 213a7130-8159-4409-8d01-9d7ac9bbe737, encrypted: true, database_read_version: 1, database_write_version: 1 }
24693 Sep 22 23:21:39.575 INFO b937f86b-985c-4e08-8b98-07f7aae5f860 WaitActive WaitActive WaitActive
24694 Sep 22 23:21:39.598 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24695 Sep 22 23:21:39.601 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24696 Sep 22 23:21:39.605 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24697 Sep 22 23:21:39.615 INFO UUID: 13d4e6fc-362c-415f-bc7d-909deb752536
24698 Sep 22 23:21:39.615 INFO Blocks per extent:512 Total Extents: 188
24699 Sep 22 23:21:39.615 INFO Crucible Version: Crucible Version: 0.0.1
24700 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24701 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24702 rustc: 1.70.0 stable x86_64-unknown-illumos
24703 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24704 Sep 22 23:21:39.615 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24705 Sep 22 23:21:39.615 INFO Using address: 127.0.0.1:52265, task: main
24706 Sep 22 23:21:39.615 INFO Repair listens on 127.0.0.1:0, task: repair
24707 Sep 22 23:21:39.615 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:37101, task: repair
24708 Sep 22 23:21:39.615 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:37101, task: repair
24709 Sep 22 23:21:39.615 INFO listening, local_addr: 127.0.0.1:37101, task: repair
24710 Sep 22 23:21:39.615 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:37101, task: repair
24711 Sep 22 23:21:39.615 INFO Using repair address: 127.0.0.1:37101, task: main
24712 Sep 22 23:21:39.615 INFO No SSL acceptor configured, task: main
24713 Sep 22 23:21:39.636 INFO UUID: 9c7d4f0a-db95-4538-bb7e-7b1ed1936555
24714 Sep 22 23:21:39.636 INFO Blocks per extent:512 Total Extents: 188
24715 Sep 22 23:21:39.636 INFO Crucible Version: Crucible Version: 0.0.1
24716 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24717 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24718 rustc: 1.70.0 stable x86_64-unknown-illumos
24719 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24720 Sep 22 23:21:39.636 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24721 Sep 22 23:21:39.636 INFO Using address: 127.0.0.1:35718, task: main
24722 Sep 22 23:21:39.636 INFO Repair listens on 127.0.0.1:0, task: repair
24723 Sep 22 23:21:39.636 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:40347, task: repair
24724 Sep 22 23:21:39.636 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:40347, task: repair
24725 Sep 22 23:21:39.636 INFO listening, local_addr: 127.0.0.1:40347, task: repair
24726 Sep 22 23:21:39.636 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:40347, task: repair
24727 Sep 22 23:21:39.636 INFO Using repair address: 127.0.0.1:40347, task: main
24728 Sep 22 23:21:39.636 INFO No SSL acceptor configured, task: main
24729 Sep 22 23:21:39.656 INFO Downstairs has completed Negotiation, task: proc
24730 Sep 22 23:21:39.658 INFO Downstairs has completed Negotiation, task: proc
24731 Sep 22 23:21:39.660 INFO Downstairs has completed Negotiation, task: proc
24732 Sep 22 23:21:39.660 INFO [0] b937f86b-985c-4e08-8b98-07f7aae5f860 (27d81b06-bab5-4e4f-b4d3-4909cf4b2397) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
24733 Sep 22 23:21:39.660 INFO [0] Transition from WaitActive to WaitQuorum
24734 Sep 22 23:21:39.660 WARN [0] new RM replaced this: None
24735 Sep 22 23:21:39.660 INFO [0] Starts reconcile loop
24736 Sep 22 23:21:39.661 INFO [1] b937f86b-985c-4e08-8b98-07f7aae5f860 (27d81b06-bab5-4e4f-b4d3-4909cf4b2397) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
24737 Sep 22 23:21:39.661 INFO [1] Transition from WaitActive to WaitQuorum
24738 Sep 22 23:21:39.661 WARN [1] new RM replaced this: None
24739 Sep 22 23:21:39.661 INFO [1] Starts reconcile loop
24740 Sep 22 23:21:39.661 INFO [2] b937f86b-985c-4e08-8b98-07f7aae5f860 (27d81b06-bab5-4e4f-b4d3-4909cf4b2397) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
24741 Sep 22 23:21:39.661 INFO [2] Transition from WaitActive to WaitQuorum
24742 Sep 22 23:21:39.661 WARN [2] new RM replaced this: None
24743 Sep 22 23:21:39.661 INFO [2] Starts reconcile loop
24744 Sep 22 23:21:39.661 INFO [0] 127.0.0.1:36172 task reports connection:true
24745 Sep 22 23:21:39.661 INFO listening on 127.0.0.1:0, task: main
24746 Sep 22 23:21:39.661 INFO b937f86b-985c-4e08-8b98-07f7aae5f860 WaitQuorum WaitQuorum WaitQuorum
24747 Sep 22 23:21:39.661 INFO [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24748 Sep 22 23:21:39.661 INFO [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24749 Sep 22 23:21:39.661 INFO [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
24750 Sep 22 23:21:39.661 INFO [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24751 Sep 22 23:21:39.661 INFO [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24752 Sep 22 23:21:39.661 INFO [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
24753 Sep 22 23:21:39.661 INFO [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24754 Sep 22 23:21:39.661 INFO [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24755 Sep 22 23:21:39.661 INFO [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
24756 Sep 22 23:21:39.661 INFO Max found gen is 1
24757 Sep 22 23:21:39.661 INFO Generation requested: 1 >= found:1
24758 Sep 22 23:21:39.661 INFO Next flush: 1
24759 Sep 22 23:21:39.661 INFO All extents match
24760 Sep 22 23:21:39.661 INFO No downstairs repair required
24761 Sep 22 23:21:39.661 INFO No initial repair work was required
24762 Sep 22 23:21:39.661 INFO Set Downstairs and Upstairs active
24763 Sep 22 23:21:39.661 INFO b937f86b-985c-4e08-8b98-07f7aae5f860 is now active with session: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397
24764 Sep 22 23:21:39.661 INFO b937f86b-985c-4e08-8b98-07f7aae5f860 Set Active after no repair
24765 Sep 22 23:21:39.661 INFO Notify all downstairs, region set compare is done.
24766 Sep 22 23:21:39.661 INFO current number of open files limit 65536 is already the maximum
24767 Sep 22 23:21:39.661 INFO Opened existing region file "/tmp/downstairs-sK3WzHBh/region.json"
24768 Sep 22 23:21:39.661 INFO Set check for repair
24769 Sep 22 23:21:39.661 INFO Database read version 1
24770 Sep 22 23:21:39.662 INFO Database write version 1
24771 Sep 22 23:21:39.662 INFO [1] 127.0.0.1:36532 task reports connection:true
24772 Sep 22 23:21:39.662 INFO b937f86b-985c-4e08-8b98-07f7aae5f860 Active Active Active
24773 Sep 22 23:21:39.662 INFO Set check for repair
24774 Sep 22 23:21:39.662 INFO [2] 127.0.0.1:33670 task reports connection:true
24775 Sep 22 23:21:39.662 INFO b937f86b-985c-4e08-8b98-07f7aae5f860 Active Active Active
24776 Sep 22 23:21:39.662 INFO Set check for repair
24777 Sep 22 23:21:39.662 INFO [0] received reconcile message
24778 Sep 22 23:21:39.662 INFO [0] All repairs completed, exit
24779 Sep 22 23:21:39.662 INFO [0] Starts cmd_loop
24780 Sep 22 23:21:39.662 INFO [1] received reconcile message
24781 Sep 22 23:21:39.662 INFO [1] All repairs completed, exit
24782 Sep 22 23:21:39.662 INFO [1] Starts cmd_loop
24783 Sep 22 23:21:39.662 INFO [2] received reconcile message
24784 Sep 22 23:21:39.662 INFO [2] All repairs completed, exit
24785 Sep 22 23:21:39.662 INFO [2] Starts cmd_loop
24786 The guest has finished waiting for activation
24787 Sep 22 23:21:39.673 INFO current number of open files limit 65536 is already the maximum
24788 Sep 22 23:21:39.673 INFO Created new region file "/tmp/downstairs-lohaNZJ5/region.json"
24789 Sep 22 23:21:39.681 INFO current number of open files limit 65536 is already the maximum
24790 Sep 22 23:21:39.681 INFO Created new region file "/tmp/downstairs-Qt3syeMq/region.json"
24791 Sep 22 23:21:39.723 WARN f712941d-0577-43e4-8a2e-7814ec270c09 request to replace downstairs 127.0.0.1:46295 with 127.0.0.1:47135
24792 Sep 22 23:21:39.724 INFO f712941d-0577-43e4-8a2e-7814ec270c09 found new target: 127.0.0.1:47135 at 0
24793 Sep 22 23:21:39.724 INFO Waiting for replacement to finish
24794 Sep 22 23:21:39.729 INFO Checking if live repair is needed
24795 Sep 22 23:21:39.729 INFO [0] f712941d-0577-43e4-8a2e-7814ec270c09 (512f09eb-7e91-421e-9fa9-f1bb0acbe6ae) LiveRepairReady Active Active ds_transition to LiveRepair
24796 Sep 22 23:21:39.729 INFO [0] Transition from LiveRepairReady to LiveRepair
24797 Sep 22 23:21:39.729 INFO Live Repair started
24798 Sep 22 23:21:39.729 WARN Live Repair main task begins., task: repair
24799 Sep 22 23:21:39.729 INFO Start Live Repair of extents 0 to 2, task: repair
24800 Sep 22 23:21:39.729 INFO Start extent 0 repair, task: repair
24801 Sep 22 23:21:39.729 DEBG RE:0 Repair extent begins
24802 Sep 22 23:21:39.729 DEBG Create new job ids for 0, : downstairs
24803 Sep 22 23:21:39.729 INFO RE:0 repair extent with ids 1002,1003,1004,1005 deps:[]
24804 Sep 22 23:21:39.729 DEBG Enqueue repair job 1005, : downstairs
24805 Sep 22 23:21:39.729 DEBG Enqueue repair job 1002, : downstairs
24806 Sep 22 23:21:39.730 INFO RE:0 close id:1002 queued, notify DS
24807 Sep 22 23:21:39.730 INFO RE:0 Wait for result from close command 1002:3
24808 Sep 22 23:21:39.730 DEBG [0] 1002 Remove check skipped:{JobId(1001)} from deps:[], : downstairs
24809 Sep 22 23:21:39.730 DEBG [0] 1002 Remove check < min repaired:1002 from deps:[], : downstairs
24810 Sep 22 23:21:39.730 INFO [0] 1002 final dependency list [], : downstairs
24811 Sep 22 23:21:39.730 DEBG [0] 1005 Remove check skipped:{JobId(1001)} from deps:[JobId(1002), JobId(1003), JobId(1004)], : downstairs
24812 Sep 22 23:21:39.730 DEBG [0] 1005 Remove check < min repaired:1002 from deps:[JobId(1002), JobId(1003), JobId(1004)], : downstairs
24813 Sep 22 23:21:39.730 INFO [0] 1005 final dependency list [JobId(1002), JobId(1003), JobId(1004)], : downstairs
24814 Sep 22 23:21:39.731 DEBG Flush just extent 0 with f:2 and g:1
24815 Sep 22 23:21:39.732 DEBG FlushClose:1002 extent 0 deps:[] res:true f:2 g:1
24816 Sep 22 23:21:39.732 DEBG Flush just extent 0 with f:2 and g:1
24817 Sep 22 23:21:39.733 DEBG FlushClose:1002 extent 0 deps:[] res:true f:2 g:1
24818 Sep 22 23:21:39.734 DEBG JustClose :1002 extent 0 deps:[] res:true
24819 Sep 22 23:21:39.734 DEBG [1] ELC got g:1 f:1 d:false
24820 Sep 22 23:21:39.734 DEBG [2] ELC got g:1 f:1 d:false
24821 Sep 22 23:21:39.734 DEBG [0] ELC got g:0 f:0 d:false
24822 Sep 22 23:21:39.734 DEBG [0] ExtentFlushClose 1002 AckReady, : downstairs
24823 Sep 22 23:21:39.734 DEBG up_ds_listen was notified
24824 Sep 22 23:21:39.734 DEBG up_ds_listen process 1002
24825 Sep 22 23:21:39.734 DEBG [A] ack job 1002:3, : downstairs
24826 Sep 22 23:21:39.734 DEBG up_ds_listen checked 1 jobs, back to waiting
24827 Sep 22 23:21:39.734 DEBG Extent 0 id:1002 Done
24828 Sep 22 23:21:39.735 DEBG Get repair info for 2 source, : downstairs
24829 Sep 22 23:21:39.735 DEBG Get repair info for 0 bad, : downstairs
24830 Sep 22 23:21:39.735 INFO Repair for extent 0 s:2 d:[ClientId(0)], : downstairs
24831 Sep 22 23:21:39.735 DEBG Enqueue repair job 1003, : downstairs
24832 Sep 22 23:21:39.735 INFO RE:0 Wait for result from repair command 1003:4
24833 Sep 22 23:21:39.735 DEBG [0] 1003 Remove check skipped:{JobId(1001)} from deps:[JobId(1002)], : downstairs
24834 Sep 22 23:21:39.735 DEBG [0] 1003 Remove check < min repaired:1002 from deps:[JobId(1002)], : downstairs
24835 Sep 22 23:21:39.735 INFO [0] 1003 final dependency list [JobId(1002)], : downstairs
24836 Sep 22 23:21:39.735 DEBG Received NoOP 1003
24837 Sep 22 23:21:39.735 DEBG Received NoOP 1003
24838 Sep 22 23:21:39.735 DEBG Received ExtentLiveRepair 1003
24839 Sep 22 23:21:39.735 DEBG Work of: LiveNoOp 1003
24840 Sep 22 23:21:39.735 DEBG LiveNoOp :1003 deps:[JobId(1002)] res:true
24841 Sep 22 23:21:39.735 WARN 1005 job ELiveReopen for connection UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } waiting on 1 deps, role: work
24842 Sep 22 23:21:39.735 DEBG Work of: LiveNoOp 1003
24843 Sep 22 23:21:39.735 DEBG LiveNoOp :1003 deps:[JobId(1002)] res:true
24844 Sep 22 23:21:39.736 WARN 1005 job ELiveReopen for connection UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } waiting on 1 deps, role: work
24845 Sep 22 23:21:39.736 DEBG ExtentLiveRepair: extent 0 sra:127.0.0.1:39249
24846 Sep 22 23:21:39.736 INFO Created copy dir "/tmp/downstairs-qImf5Xje/00/000/000.copy"
24847 Sep 22 23:21:39.775 INFO UUID: 99fe08ae-482d-4baf-90c5-1e5bdfb86144
24848 Sep 22 23:21:39.775 INFO Blocks per extent:512 Total Extents: 188
24849 Sep 22 23:21:39.775 INFO Crucible Version: Crucible Version: 0.0.1
24850 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24851 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24852 rustc: 1.70.0 stable x86_64-unknown-illumos
24853 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24854 Sep 22 23:21:39.775 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24855 Sep 22 23:21:39.775 INFO Using address: 127.0.0.1:33021, task: main
24856 Sep 22 23:21:39.776 INFO Repair listens on 127.0.0.1:0, task: repair
24857 Sep 22 23:21:39.776 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:46198, task: repair
24858 Sep 22 23:21:39.776 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:46198, task: repair
24859 Sep 22 23:21:39.776 INFO listening, local_addr: 127.0.0.1:46198, task: repair
24860 Sep 22 23:21:39.776 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:46198, task: repair
24861 Sep 22 23:21:39.776 INFO Using repair address: 127.0.0.1:46198, task: main
24862 Sep 22 23:21:39.776 INFO No SSL acceptor configured, task: main
24863 Sep 22 23:21:39.822 INFO accepted connection, remote_addr: 127.0.0.1:62976, local_addr: 127.0.0.1:39249, task: repair
24864 Sep 22 23:21:39.823 TRCE incoming request, uri: /extent/0/files, method: GET, req_id: 1d365637-bb93-4b60-850e-025d1c909172, remote_addr: 127.0.0.1:62976, local_addr: 127.0.0.1:39249, task: repair
24865 Sep 22 23:21:39.823 INFO request completed, latency_us: 339, response_code: 200, uri: /extent/0/files, method: GET, req_id: 1d365637-bb93-4b60-850e-025d1c909172, remote_addr: 127.0.0.1:62976, local_addr: 127.0.0.1:39249, task: repair
24866 Sep 22 23:21:39.824 INFO eid:0 Found repair files: ["000", "000.db"]
24867 Sep 22 23:21:39.824 TRCE incoming request, uri: /newextent/0/data, method: GET, req_id: b54257c3-7bf3-420e-b2ca-082dc62e4f6a, remote_addr: 127.0.0.1:62976, local_addr: 127.0.0.1:39249, task: repair
24868 Sep 22 23:21:39.824 INFO request completed, latency_us: 494, response_code: 200, uri: /newextent/0/data, method: GET, req_id: b54257c3-7bf3-420e-b2ca-082dc62e4f6a, remote_addr: 127.0.0.1:62976, local_addr: 127.0.0.1:39249, task: repair
24869 Sep 22 23:21:39.825 TRCE incoming request, uri: /newextent/0/db, method: GET, req_id: 9ca46789-c153-401c-aaa3-28385442d849, remote_addr: 127.0.0.1:62976, local_addr: 127.0.0.1:39249, task: repair
24870 Sep 22 23:21:39.826 INFO request completed, latency_us: 326, response_code: 200, uri: /newextent/0/db, method: GET, req_id: 9ca46789-c153-401c-aaa3-28385442d849, remote_addr: 127.0.0.1:62976, local_addr: 127.0.0.1:39249, task: repair
24871 Sep 22 23:21:39.826 INFO Repair files downloaded, move directory "/tmp/downstairs-qImf5Xje/00/000/000.copy" to "/tmp/downstairs-qImf5Xje/00/000/000.replace"
24872 Sep 22 23:21:39.827 DEBG fsync completed for: "/tmp/downstairs-qImf5Xje/00/000"
24873 Sep 22 23:21:39.828 INFO Copy files from "/tmp/downstairs-qImf5Xje/00/000/000.replace" in "/tmp/downstairs-qImf5Xje/00/000"
24874 Sep 22 23:21:39.828 DEBG fsync completed for: "/tmp/downstairs-qImf5Xje/00/000/000"
24875 Sep 22 23:21:39.828 DEBG fsync completed for: "/tmp/downstairs-qImf5Xje/00/000/000.db"
24876 Sep 22 23:21:39.828 DEBG fsync completed for: "/tmp/downstairs-qImf5Xje/00/000"
24877 Sep 22 23:21:39.828 INFO Move directory "/tmp/downstairs-qImf5Xje/00/000/000.replace" to "/tmp/downstairs-qImf5Xje/00/000/000.completed"
24878 Sep 22 23:21:39.828 DEBG fsync completed for: "/tmp/downstairs-qImf5Xje/00/000"
24879 Sep 22 23:21:39.828 DEBG fsync completed for: "/tmp/downstairs-qImf5Xje/00/000"
24880 Sep 22 23:21:39.828 DEBG LiveRepair:1003 extent 0 deps:[JobId(1002)] res:true
24881 Sep 22 23:21:39.828 WARN 1005 job ELiveReopen for connection UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } waiting on 1 deps, role: work
24882 Sep 22 23:21:39.828 DEBG [0] ExtentLiveRepair AckReady 1003, : downstairs
24883 Sep 22 23:21:39.828 DEBG up_ds_listen was notified
24884 Sep 22 23:21:39.828 DEBG up_ds_listen process 1003
24885 Sep 22 23:21:39.828 DEBG [A] ack job 1003:4, : downstairs
24886 Sep 22 23:21:39.829 DEBG up_ds_listen checked 1 jobs, back to waiting
24887 Sep 22 23:21:39.829 DEBG Extent 0 id:1003 Done
24888 Sep 22 23:21:39.829 DEBG Enqueue repair job 1004, : downstairs
24889 Sep 22 23:21:39.829 INFO RE:0 Wait for result from NoOp command 1004:5
24890 Sep 22 23:21:39.829 DEBG [0] 1004 Remove check skipped:{JobId(1001)} from deps:[JobId(1002), JobId(1003)], : downstairs
24891 Sep 22 23:21:39.829 DEBG [0] 1004 Remove check < min repaired:1002 from deps:[JobId(1002), JobId(1003)], : downstairs
24892 Sep 22 23:21:39.829 INFO [0] 1004 final dependency list [JobId(1002), JobId(1003)], : downstairs
24893 Sep 22 23:21:39.829 DEBG Received NoOP 1004
24894 Sep 22 23:21:39.829 DEBG Received NoOP 1004
24895 Sep 22 23:21:39.829 DEBG Received NoOP 1004
24896 Sep 22 23:21:39.829 DEBG Work of: LiveNoOp 1004
24897 Sep 22 23:21:39.829 DEBG LiveNoOp :1004 deps:[JobId(1002), JobId(1003)] res:true
24898 Sep 22 23:21:39.830 DEBG LiveReopen:1005 extent 0 deps:[JobId(1002), JobId(1003), JobId(1004)] res:true
24899 Sep 22 23:21:39.830 DEBG Work of: LiveNoOp 1004
24900 Sep 22 23:21:39.830 DEBG LiveNoOp :1004 deps:[JobId(1002), JobId(1003)] res:true
24901 Sep 22 23:21:39.831 DEBG LiveReopen:1005 extent 0 deps:[JobId(1002), JobId(1003), JobId(1004)] res:true
24902 Sep 22 23:21:39.832 DEBG Work of: LiveNoOp 1004
24903 Sep 22 23:21:39.832 DEBG LiveNoOp :1004 deps:[JobId(1002), JobId(1003)] res:true
24904 Sep 22 23:21:39.833 DEBG LiveReopen:1005 extent 0 deps:[JobId(1002), JobId(1003), JobId(1004)] res:true
24905 Sep 22 23:21:39.833 DEBG [0] ExtentLiveNoOp AckReady 1004, : downstairs
24906 Sep 22 23:21:39.833 DEBG [0] ExtentLiveReopen AckReady 1005, : downstairs
24907 Sep 22 23:21:39.833 DEBG up_ds_listen was notified
24908 Sep 22 23:21:39.833 DEBG up_ds_listen process 1004
24909 Sep 22 23:21:39.833 DEBG [A] ack job 1004:5, : downstairs
24910 Sep 22 23:21:39.833 DEBG up_ds_listen process 1005
24911 Sep 22 23:21:39.833 DEBG [A] ack job 1005:6, : downstairs
24912 Sep 22 23:21:39.834 DEBG up_ds_listen checked 2 jobs, back to waiting
24913 Sep 22 23:21:39.834 DEBG up_ds_listen was notified
24914 Sep 22 23:21:39.834 DEBG up_ds_listen checked 0 jobs, back to waiting
24915 Sep 22 23:21:39.834 DEBG Extent 0 id:1004 Done
24916 Sep 22 23:21:39.834 INFO RE:0 Wait for result from reopen command 1005:6
24917 Sep 22 23:21:39.834 DEBG Extent 0 id:1005 Done
24918 Sep 22 23:21:39.834 INFO Start extent 1 repair, task: repair
24919 Sep 22 23:21:39.834 DEBG RE:1 Repair extent begins
24920 Sep 22 23:21:39.834 DEBG Create new job ids for 1, : downstairs
24921 Sep 22 23:21:39.834 INFO RE:1 repair extent with ids 1006,1007,1008,1009 deps:[]
24922 Sep 22 23:21:39.834 DEBG Enqueue repair job 1009, : downstairs
24923 Sep 22 23:21:39.834 DEBG Enqueue repair job 1006, : downstairs
24924 Sep 22 23:21:39.834 INFO RE:1 close id:1006 queued, notify DS
24925 Sep 22 23:21:39.834 INFO RE:1 Wait for result from close command 1006:7
24926 Sep 22 23:21:39.834 DEBG [0] 1006 Remove check skipped:{JobId(1001)} from deps:[], : downstairs
24927 Sep 22 23:21:39.834 DEBG [0] 1006 Remove check < min repaired:1002 from deps:[], : downstairs
24928 Sep 22 23:21:39.834 INFO [0] 1006 final dependency list [], : downstairs
24929 Sep 22 23:21:39.834 DEBG [0] 1009 Remove check skipped:{JobId(1001)} from deps:[JobId(1006), JobId(1007), JobId(1008)], : downstairs
24930 Sep 22 23:21:39.834 DEBG [0] 1009 Remove check < min repaired:1002 from deps:[JobId(1006), JobId(1007), JobId(1008)], : downstairs
24931 Sep 22 23:21:39.834 INFO [0] 1009 final dependency list [JobId(1006), JobId(1007), JobId(1008)], : downstairs
24932 Sep 22 23:21:39.834 DEBG Flush just extent 1 with f:3 and g:1
24933 Sep 22 23:21:39.835 DEBG FlushClose:1006 extent 1 deps:[] res:true f:3 g:1
24934 Sep 22 23:21:39.835 DEBG Flush just extent 1 with f:3 and g:1
24935 Sep 22 23:21:39.836 DEBG FlushClose:1006 extent 1 deps:[] res:true f:3 g:1
24936 Sep 22 23:21:39.837 DEBG JustClose :1006 extent 1 deps:[] res:true
24937 Sep 22 23:21:39.838 DEBG [1] ELC got g:1 f:1 d:false
24938 Sep 22 23:21:39.838 DEBG [2] ELC got g:1 f:1 d:false
24939 Sep 22 23:21:39.838 DEBG [0] ELC got g:0 f:0 d:false
24940 Sep 22 23:21:39.838 DEBG [0] ExtentFlushClose 1006 AckReady, : downstairs
24941 Sep 22 23:21:39.838 DEBG up_ds_listen was notified
24942 Sep 22 23:21:39.838 DEBG up_ds_listen process 1006
24943 Sep 22 23:21:39.838 DEBG [A] ack job 1006:7, : downstairs
24944 Sep 22 23:21:39.838 DEBG up_ds_listen checked 1 jobs, back to waiting
24945 Sep 22 23:21:39.838 DEBG Extent 1 id:1006 Done
24946 Sep 22 23:21:39.838 DEBG Get repair info for 2 source, : downstairs
24947 Sep 22 23:21:39.838 DEBG Get repair info for 0 bad, : downstairs
24948 Sep 22 23:21:39.838 INFO Repair for extent 1 s:2 d:[ClientId(0)], : downstairs
24949 Sep 22 23:21:39.838 DEBG Enqueue repair job 1007, : downstairs
24950 Sep 22 23:21:39.838 INFO RE:1 Wait for result from repair command 1007:8
24951 Sep 22 23:21:39.838 DEBG [0] 1007 Remove check skipped:{JobId(1001)} from deps:[JobId(1006)], : downstairs
24952 Sep 22 23:21:39.838 DEBG [0] 1007 Remove check < min repaired:1002 from deps:[JobId(1006)], : downstairs
24953 Sep 22 23:21:39.838 INFO [0] 1007 final dependency list [JobId(1006)], : downstairs
24954 Sep 22 23:21:39.838 DEBG Received NoOP 1007
24955 Sep 22 23:21:39.838 DEBG Received NoOP 1007
24956 Sep 22 23:21:39.838 DEBG Received ExtentLiveRepair 1007
24957 Sep 22 23:21:39.839 DEBG Work of: LiveNoOp 1007
24958 Sep 22 23:21:39.839 DEBG LiveNoOp :1007 deps:[JobId(1006)] res:true
24959 Sep 22 23:21:39.839 WARN 1009 job ELiveReopen for connection UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } waiting on 1 deps, role: work
24960 Sep 22 23:21:39.839 DEBG Work of: LiveNoOp 1007
24961 Sep 22 23:21:39.839 DEBG LiveNoOp :1007 deps:[JobId(1006)] res:true
24962 Sep 22 23:21:39.839 WARN 1009 job ELiveReopen for connection UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } waiting on 1 deps, role: work
24963 Sep 22 23:21:39.839 DEBG ExtentLiveRepair: extent 1 sra:127.0.0.1:39249
24964 Sep 22 23:21:39.839 INFO Created copy dir "/tmp/downstairs-qImf5Xje/00/000/001.copy"
24965 Sep 22 23:21:39.851 INFO Upstairs starts
24966 Sep 22 23:21:39.851 INFO Crucible Version: BuildInfo {
24967 version: "0.0.1",
24968 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
24969 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
24970 git_branch: "main",
24971 rustc_semver: "1.70.0",
24972 rustc_channel: "stable",
24973 rustc_host_triple: "x86_64-unknown-illumos",
24974 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
24975 cargo_triple: "x86_64-unknown-illumos",
24976 debug: true,
24977 opt_level: 0,
24978 }
24979 Sep 22 23:21:39.851 INFO Upstairs <-> Downstairs Message Version: 4
24980 Sep 22 23:21:39.851 INFO Crucible stats registered with UUID: 9144e02c-c312-47c4-9b1c-f03618834608
24981 Sep 22 23:21:39.851 INFO Crucible 9144e02c-c312-47c4-9b1c-f03618834608 has session id: 20abe3f6-9684-44cd-ad20-faf7fc8fd696
24982 Sep 22 23:21:39.851 INFO listening on 127.0.0.1:0, task: main
24983 Sep 22 23:21:39.851 INFO [0] connecting to 127.0.0.1:52165, looper: 0
24984 Sep 22 23:21:39.851 INFO UUID: efa5c554-210a-4196-815d-fa3d1dfbb858
24985 Sep 22 23:21:39.851 INFO Blocks per extent:512 Total Extents: 188
24986 Sep 22 23:21:39.852 INFO Crucible Version: Crucible Version: 0.0.1
24987 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24988 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24989 rustc: 1.70.0 stable x86_64-unknown-illumos
24990 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24991 Sep 22 23:21:39.852 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24992 Sep 22 23:21:39.852 INFO Using address: 127.0.0.1:37034, task: main
24993 Sep 22 23:21:39.852 INFO [1] connecting to 127.0.0.1:48339, looper: 1
24994 Sep 22 23:21:39.852 INFO [2] connecting to 127.0.0.1:33021, looper: 2
24995 Sep 22 23:21:39.852 INFO up_listen starts, task: up_listen
24996 Sep 22 23:21:39.852 INFO Wait for all three downstairs to come online
24997 Sep 22 23:21:39.852 INFO Flush timeout: 0.5
24998 Sep 22 23:21:39.852 INFO Repair listens on 127.0.0.1:0, task: repair
24999 Sep 22 23:21:39.852 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:59864, task: repair
25000 Sep 22 23:21:39.852 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:59864, task: repair
25001 Sep 22 23:21:39.852 INFO listening, local_addr: 127.0.0.1:59864, task: repair
25002 Sep 22 23:21:39.852 INFO accepted connection from 127.0.0.1:33221, task: main
25003 Sep 22 23:21:39.852 INFO accepted connection from 127.0.0.1:35948, task: main
25004 Sep 22 23:21:39.852 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:59864, task: repair
25005 Sep 22 23:21:39.852 INFO Using repair address: 127.0.0.1:59864, task: main
25006 Sep 22 23:21:39.852 INFO No SSL acceptor configured, task: main
25007 Sep 22 23:21:39.852 INFO accepted connection from 127.0.0.1:45718, task: main
25008 Sep 22 23:21:39.852 INFO [0] 9144e02c-c312-47c4-9b1c-f03618834608 looper connected, looper: 0
25009 Sep 22 23:21:39.852 INFO [0] Proc runs for 127.0.0.1:52165 in state New
25010 Sep 22 23:21:39.852 INFO [1] 9144e02c-c312-47c4-9b1c-f03618834608 looper connected, looper: 1
25011 Sep 22 23:21:39.853 INFO [1] Proc runs for 127.0.0.1:48339 in state New
25012 Sep 22 23:21:39.853 INFO [2] 9144e02c-c312-47c4-9b1c-f03618834608 looper connected, looper: 2
25013 Sep 22 23:21:39.853 INFO [2] Proc runs for 127.0.0.1:33021 in state New
25014 Sep 22 23:21:39.853 INFO Connection request from 9144e02c-c312-47c4-9b1c-f03618834608 with version 4, task: proc
25015 Sep 22 23:21:39.853 INFO upstairs UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } connected, version 4, task: proc
25016 Sep 22 23:21:39.853 INFO Connection request from 9144e02c-c312-47c4-9b1c-f03618834608 with version 4, task: proc
25017 Sep 22 23:21:39.853 INFO upstairs UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } connected, version 4, task: proc
25018 Sep 22 23:21:39.853 INFO Connection request from 9144e02c-c312-47c4-9b1c-f03618834608 with version 4, task: proc
25019 Sep 22 23:21:39.853 INFO upstairs UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } connected, version 4, task: proc
25020 The guest has requested activation
25021 Sep 22 23:21:39.854 INFO [0] 9144e02c-c312-47c4-9b1c-f03618834608 (cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f) New New New ds_transition to WaitActive
25022 Sep 22 23:21:39.854 INFO [0] Transition from New to WaitActive
25023 Sep 22 23:21:39.854 INFO [1] 9144e02c-c312-47c4-9b1c-f03618834608 (cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f) WaitActive New New ds_transition to WaitActive
25024 Sep 22 23:21:39.854 INFO [1] Transition from New to WaitActive
25025 Sep 22 23:21:39.854 INFO [2] 9144e02c-c312-47c4-9b1c-f03618834608 (cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f) WaitActive WaitActive New ds_transition to WaitActive
25026 Sep 22 23:21:39.854 INFO [2] Transition from New to WaitActive
25027 Sep 22 23:21:39.854 INFO 9144e02c-c312-47c4-9b1c-f03618834608 active request set
25028 Sep 22 23:21:39.854 INFO [0] received activate with gen 1
25029 Sep 22 23:21:39.854 INFO [0] client got ds_active_rx, promote! session cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f
25030 Sep 22 23:21:39.854 INFO [1] received activate with gen 1
25031 Sep 22 23:21:39.854 INFO [1] client got ds_active_rx, promote! session cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f
25032 Sep 22 23:21:39.854 INFO [2] received activate with gen 1
25033 Sep 22 23:21:39.854 INFO [2] client got ds_active_rx, promote! session cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f
25034 Sep 22 23:21:39.855 INFO UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } is now active (read-write)
25035 Sep 22 23:21:39.855 INFO UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } is now active (read-write)
25036 Sep 22 23:21:39.855 INFO UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } is now active (read-write)
25037 Sep 22 23:21:39.856 INFO [0] downstairs client at 127.0.0.1:52165 has UUID 63b48fd3-ffb2-4c20-bff7-4a64980fdfc8
25038 Sep 22 23:21:39.856 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 63b48fd3-ffb2-4c20-bff7-4a64980fdfc8, encrypted: true, database_read_version: 1, database_write_version: 1 }
25039 Sep 22 23:21:39.856 INFO 9144e02c-c312-47c4-9b1c-f03618834608 WaitActive WaitActive WaitActive
25040 Sep 22 23:21:39.856 INFO [1] downstairs client at 127.0.0.1:48339 has UUID 0b1643bf-b244-4616-9246-4922ec6009af
25041 Sep 22 23:21:39.856 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 0b1643bf-b244-4616-9246-4922ec6009af, encrypted: true, database_read_version: 1, database_write_version: 1 }
25042 Sep 22 23:21:39.856 INFO 9144e02c-c312-47c4-9b1c-f03618834608 WaitActive WaitActive WaitActive
25043 Sep 22 23:21:39.856 INFO [2] downstairs client at 127.0.0.1:33021 has UUID 99fe08ae-482d-4baf-90c5-1e5bdfb86144
25044 Sep 22 23:21:39.856 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 99fe08ae-482d-4baf-90c5-1e5bdfb86144, encrypted: true, database_read_version: 1, database_write_version: 1 }
25045 Sep 22 23:21:39.856 INFO 9144e02c-c312-47c4-9b1c-f03618834608 WaitActive WaitActive WaitActive
25046 Sep 22 23:21:39.916 INFO current number of open files limit 65536 is already the maximum
25047 Sep 22 23:21:39.916 INFO Created new region file "/tmp/downstairs-6BO8rCTH/region.json"
25048 Sep 22 23:21:39.917 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25049 Sep 22 23:21:39.919 INFO accepted connection, remote_addr: 127.0.0.1:60896, local_addr: 127.0.0.1:39249, task: repair
25050 Sep 22 23:21:39.919 TRCE incoming request, uri: /extent/1/files, method: GET, req_id: c23f1323-c074-47d8-a9d1-bb3a0f448154, remote_addr: 127.0.0.1:60896, local_addr: 127.0.0.1:39249, task: repair
25051 Sep 22 23:21:39.919 INFO request completed, latency_us: 321, response_code: 200, uri: /extent/1/files, method: GET, req_id: c23f1323-c074-47d8-a9d1-bb3a0f448154, remote_addr: 127.0.0.1:60896, local_addr: 127.0.0.1:39249, task: repair
25052 Sep 22 23:21:39.920 INFO eid:1 Found repair files: ["001", "001.db"]
25053 Sep 22 23:21:39.920 TRCE incoming request, uri: /newextent/1/data, method: GET, req_id: b118c6b2-4bdf-44de-905e-b7463c90509c, remote_addr: 127.0.0.1:60896, local_addr: 127.0.0.1:39249, task: repair
25054 Sep 22 23:21:39.921 INFO request completed, latency_us: 419, response_code: 200, uri: /newextent/1/data, method: GET, req_id: b118c6b2-4bdf-44de-905e-b7463c90509c, remote_addr: 127.0.0.1:60896, local_addr: 127.0.0.1:39249, task: repair
25055 Sep 22 23:21:39.922 TRCE incoming request, uri: /newextent/1/db, method: GET, req_id: 5e669d1a-a88e-41a6-ba42-2171b56bd167, remote_addr: 127.0.0.1:60896, local_addr: 127.0.0.1:39249, task: repair
25056 Sep 22 23:21:39.922 INFO request completed, latency_us: 342, response_code: 200, uri: /newextent/1/db, method: GET, req_id: 5e669d1a-a88e-41a6-ba42-2171b56bd167, remote_addr: 127.0.0.1:60896, local_addr: 127.0.0.1:39249, task: repair
25057 Sep 22 23:21:39.923 INFO Repair files downloaded, move directory "/tmp/downstairs-qImf5Xje/00/000/001.copy" to "/tmp/downstairs-qImf5Xje/00/000/001.replace"
25058 Sep 22 23:21:39.923 DEBG fsync completed for: "/tmp/downstairs-qImf5Xje/00/000"
25059 Sep 22 23:21:39.924 INFO Copy files from "/tmp/downstairs-qImf5Xje/00/000/001.replace" in "/tmp/downstairs-qImf5Xje/00/000"
25060 Sep 22 23:21:39.924 DEBG fsync completed for: "/tmp/downstairs-qImf5Xje/00/000/001"
25061 Sep 22 23:21:39.924 DEBG fsync completed for: "/tmp/downstairs-qImf5Xje/00/000/001.db"
25062 Sep 22 23:21:39.924 DEBG fsync completed for: "/tmp/downstairs-qImf5Xje/00/000"
25063 Sep 22 23:21:39.924 INFO Move directory "/tmp/downstairs-qImf5Xje/00/000/001.replace" to "/tmp/downstairs-qImf5Xje/00/000/001.completed"
25064 Sep 22 23:21:39.924 DEBG fsync completed for: "/tmp/downstairs-qImf5Xje/00/000"
25065 Sep 22 23:21:39.924 DEBG fsync completed for: "/tmp/downstairs-qImf5Xje/00/000"
25066 Sep 22 23:21:39.924 DEBG LiveRepair:1007 extent 1 deps:[JobId(1006)] res:true
25067 Sep 22 23:21:39.925 WARN 1009 job ELiveReopen for connection UpstairsConnection { upstairs_id: f712941d-0577-43e4-8a2e-7814ec270c09, session_id: 512f09eb-7e91-421e-9fa9-f1bb0acbe6ae, gen: 1 } waiting on 1 deps, role: work
25068 Sep 22 23:21:39.925 DEBG [0] ExtentLiveRepair AckReady 1007, : downstairs
25069 Sep 22 23:21:39.925 DEBG up_ds_listen was notified
25070 Sep 22 23:21:39.925 DEBG up_ds_listen process 1007
25071 Sep 22 23:21:39.925 DEBG [A] ack job 1007:8, : downstairs
25072 Sep 22 23:21:39.925 DEBG up_ds_listen checked 1 jobs, back to waiting
25073 Sep 22 23:21:39.925 DEBG Extent 1 id:1007 Done
25074 Sep 22 23:21:39.925 DEBG Enqueue repair job 1008, : downstairs
25075 Sep 22 23:21:39.925 INFO RE:1 Wait for result from NoOp command 1008:9
25076 Sep 22 23:21:39.925 DEBG [0] 1008 Remove check skipped:{JobId(1001)} from deps:[JobId(1006), JobId(1007)], : downstairs
25077 Sep 22 23:21:39.925 DEBG [0] 1008 Remove check < min repaired:1002 from deps:[JobId(1006), JobId(1007)], : downstairs
25078 Sep 22 23:21:39.925 INFO [0] 1008 final dependency list [JobId(1006), JobId(1007)], : downstairs
25079 Sep 22 23:21:39.926 DEBG Received NoOP 1008
25080 Sep 22 23:21:39.926 DEBG Received NoOP 1008
25081 Sep 22 23:21:39.926 DEBG Received NoOP 1008
25082 Sep 22 23:21:39.926 DEBG Work of: LiveNoOp 1008
25083 Sep 22 23:21:39.926 DEBG LiveNoOp :1008 deps:[JobId(1006), JobId(1007)] res:true
25084 Sep 22 23:21:39.927 DEBG LiveReopen:1009 extent 1 deps:[JobId(1006), JobId(1007), JobId(1008)] res:true
25085 Sep 22 23:21:39.927 INFO listening on 127.0.0.1:0, task: main
25086 Sep 22 23:21:39.927 DEBG Work of: LiveNoOp 1008
25087 Sep 22 23:21:39.927 DEBG LiveNoOp :1008 deps:[JobId(1006), JobId(1007)] res:true
25088 Sep 22 23:21:39.927 INFO current number of open files limit 65536 is already the maximum
25089 Sep 22 23:21:39.927 INFO Opened existing region file "/tmp/downstairs-lohaNZJ5/region.json"
25090 Sep 22 23:21:39.927 INFO Database read version 1
25091 Sep 22 23:21:39.927 INFO Database write version 1
25092 Sep 22 23:21:39.929 DEBG LiveReopen:1009 extent 1 deps:[JobId(1006), JobId(1007), JobId(1008)] res:true
25093 Sep 22 23:21:39.929 DEBG Work of: LiveNoOp 1008
25094 Sep 22 23:21:39.929 DEBG LiveNoOp :1008 deps:[JobId(1006), JobId(1007)] res:true
25095 Sep 22 23:21:39.930 INFO listening on 127.0.0.1:0, task: main
25096 Sep 22 23:21:39.930 DEBG LiveReopen:1009 extent 1 deps:[JobId(1006), JobId(1007), JobId(1008)] res:true
25097 Sep 22 23:21:39.931 DEBG [0] ExtentLiveNoOp AckReady 1008, : downstairs
25098 Sep 22 23:21:39.931 INFO current number of open files limit 65536 is already the maximum
25099 Sep 22 23:21:39.931 INFO Opened existing region file "/tmp/downstairs-Qt3syeMq/region.json"
25100 Sep 22 23:21:39.931 DEBG [0] ExtentLiveReopen AckReady 1009, : downstairs
25101 Sep 22 23:21:39.931 INFO Database read version 1
25102 Sep 22 23:21:39.931 INFO Database write version 1
25103 Sep 22 23:21:39.931 DEBG up_ds_listen was notified
25104 Sep 22 23:21:39.931 DEBG up_ds_listen process 1008
25105 Sep 22 23:21:39.931 DEBG [A] ack job 1008:9, : downstairs
25106 Sep 22 23:21:39.931 DEBG up_ds_listen process 1009
25107 Sep 22 23:21:39.931 DEBG [A] ack job 1009:10, : downstairs
25108 Sep 22 23:21:39.931 DEBG up_ds_listen checked 2 jobs, back to waiting
25109 Sep 22 23:21:39.931 DEBG up_ds_listen was notified
25110 Sep 22 23:21:39.931 DEBG up_ds_listen checked 0 jobs, back to waiting
25111 Sep 22 23:21:39.931 DEBG Extent 1 id:1008 Done
25112 Sep 22 23:21:39.931 INFO RE:1 Wait for result from reopen command 1009:10
25113 Sep 22 23:21:39.931 DEBG Extent 1 id:1009 Done
25114 Sep 22 23:21:39.931 DEBG IO Flush 1010 has deps [JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)]
25115 Sep 22 23:21:39.931 INFO LiveRepair final flush submitted
25116 Sep 22 23:21:39.931 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25117 Sep 22 23:21:39.931 DEBG [0] 1010 Remove check skipped:{JobId(1001)} from deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)], : downstairs
25118 Sep 22 23:21:39.931 DEBG [0] 1010 Remove check < min repaired:1002 from deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)], : downstairs
25119 Sep 22 23:21:39.931 INFO [0] 1010 final dependency list [JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)], : downstairs
25120 Sep 22 23:21:39.932 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)] res:true f:4 g:1
25121 Sep 22 23:21:39.932 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)] res:true f:4 g:1
25122 Sep 22 23:21:39.932 DEBG Flush :1010 extent_limit Some(1) deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)] res:true f:4 g:1
25123 Sep 22 23:21:39.932 DEBG up_ds_listen was notified
25124 Sep 22 23:21:39.933 DEBG up_ds_listen process 1010
25125 Sep 22 23:21:39.933 DEBG [A] ack job 1010:11, : downstairs
25126 Sep 22 23:21:39.933 DEBG [rc] retire 1010 clears [JobId(1002), JobId(1003), JobId(1004), JobId(1005), JobId(1006), JobId(1007), JobId(1008), JobId(1009), JobId(1010)], : downstairs
25127 Sep 22 23:21:39.933 DEBG up_ds_listen checked 1 jobs, back to waiting
25128 Sep 22 23:21:39.933 INFO LiveRepair final flush completed
25129 Sep 22 23:21:39.933 INFO [0] f712941d-0577-43e4-8a2e-7814ec270c09 (512f09eb-7e91-421e-9fa9-f1bb0acbe6ae) LiveRepair Active Active ds_transition to Active
25130 Sep 22 23:21:39.933 INFO [0] Transition from LiveRepair to Active
25131 Sep 22 23:21:39.933 WARN Live Repair returns Ok(())
25132 Sep 22 23:21:39.935 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25133 Sep 22 23:21:39.952 INFO UUID: dfb16c90-27d7-4a45-bd56-019dbea3b0bb
25134 Sep 22 23:21:39.952 INFO Blocks per extent:512 Total Extents: 188
25135 Sep 22 23:21:39.952 INFO Crucible Version: Crucible Version: 0.0.1
25136 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
25137 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
25138 rustc: 1.70.0 stable x86_64-unknown-illumos
25139 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
25140 Sep 22 23:21:39.952 INFO Upstairs <-> Downstairs Message Version: 4, task: main
25141 Sep 22 23:21:39.952 INFO Using address: 127.0.0.1:65451, task: main
25142 Sep 22 23:21:39.952 INFO Repair listens on 127.0.0.1:0, task: repair
25143 Sep 22 23:21:39.953 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:56310, task: repair
25144 Sep 22 23:21:39.953 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:56310, task: repair
25145 Sep 22 23:21:39.953 INFO listening, local_addr: 127.0.0.1:56310, task: repair
25146 Sep 22 23:21:39.953 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:56310, task: repair
25147 Sep 22 23:21:39.953 INFO Using repair address: 127.0.0.1:56310, task: main
25148 Sep 22 23:21:39.953 INFO No SSL acceptor configured, task: main
25149 Sep 22 23:21:39.990 WARN 52fe8d40-333d-4b64-8663-2adf0476947f request to replace downstairs 127.0.0.1:38958 with 127.0.0.1:62901
25150 Sep 22 23:21:39.990 INFO 52fe8d40-333d-4b64-8663-2adf0476947f found new target: 127.0.0.1:62901 at 0
25151 Waiting for replacement to finish
25152 Sep 22 23:21:39.995 INFO Checking if live repair is needed
25153 Sep 22 23:21:39.995 INFO [0] 52fe8d40-333d-4b64-8663-2adf0476947f (fc0a5f14-ae91-4f48-aded-c2a3fecef8a0) LiveRepairReady Active Active ds_transition to LiveRepair
25154 Sep 22 23:21:39.995 INFO [0] Transition from LiveRepairReady to LiveRepair
25155 Sep 22 23:21:39.995 INFO Live Repair started
25156 Sep 22 23:21:39.995 WARN Live Repair main task begins., task: repair
25157 Sep 22 23:21:39.995 INFO Start Live Repair of extents 0 to 2, task: repair
25158 Sep 22 23:21:39.995 INFO Start extent 0 repair, task: repair
25159 Sep 22 23:21:39.995 DEBG RE:0 Repair extent begins
25160 Sep 22 23:21:39.995 DEBG Create new job ids for 0, : downstairs
25161 Sep 22 23:21:39.995 INFO RE:0 repair extent with ids 1002,1003,1004,1005 deps:[]
25162 Sep 22 23:21:39.996 DEBG Enqueue repair job 1005, : downstairs
25163 Sep 22 23:21:39.996 DEBG Enqueue repair job 1002, : downstairs
25164 Sep 22 23:21:39.996 INFO RE:0 close id:1002 queued, notify DS
25165 Sep 22 23:21:39.996 INFO RE:0 Wait for result from close command 1002:3
25166 Sep 22 23:21:39.996 DEBG [0] 1002 Remove check skipped:{JobId(1001)} from deps:[], : downstairs
25167 Sep 22 23:21:39.996 DEBG [0] 1002 Remove check < min repaired:1002 from deps:[], : downstairs
25168 Sep 22 23:21:39.996 INFO [0] 1002 final dependency list [], : downstairs
25169 Sep 22 23:21:39.996 DEBG [0] 1005 Remove check skipped:{JobId(1001)} from deps:[JobId(1002), JobId(1003), JobId(1004)], : downstairs
25170 Sep 22 23:21:39.996 DEBG [0] 1005 Remove check < min repaired:1002 from deps:[JobId(1002), JobId(1003), JobId(1004)], : downstairs
25171 Sep 22 23:21:39.996 INFO [0] 1005 final dependency list [JobId(1002), JobId(1003), JobId(1004)], : downstairs
25172 Sep 22 23:21:39.996 DEBG Flush just extent 0 with f:2 and g:1
25173 Sep 22 23:21:39.997 DEBG FlushClose:1002 extent 0 deps:[] res:true f:2 g:1
25174 Sep 22 23:21:39.997 DEBG Flush just extent 0 with f:2 and g:1
25175 Sep 22 23:21:39.998 DEBG FlushClose:1002 extent 0 deps:[] res:true f:2 g:1
25176 Sep 22 23:21:39.999 DEBG JustClose :1002 extent 0 deps:[] res:true
25177 Sep 22 23:21:39.999 DEBG [1] ELC got g:1 f:1 d:false
25178 Sep 22 23:21:39.999 DEBG [2] ELC got g:1 f:1 d:false
25179 Sep 22 23:21:39.999 DEBG [0] ELC got g:0 f:0 d:false
25180 Sep 22 23:21:39.999 DEBG [0] ExtentFlushClose 1002 AckReady, : downstairs
25181 Sep 22 23:21:39.999 DEBG up_ds_listen was notified
25182 Sep 22 23:21:39.999 DEBG up_ds_listen process 1002
25183 Sep 22 23:21:39.999 DEBG [A] ack job 1002:3, : downstairs
25184 Sep 22 23:21:39.999 DEBG up_ds_listen checked 1 jobs, back to waiting
25185 Sep 22 23:21:39.999 DEBG Extent 0 id:1002 Done
25186 Sep 22 23:21:39.999 DEBG Get repair info for 2 source, : downstairs
25187 Sep 22 23:21:39.999 DEBG Get repair info for 0 bad, : downstairs
25188 Sep 22 23:21:39.999 INFO Repair for extent 0 s:2 d:[ClientId(0)], : downstairs
25189 Sep 22 23:21:39.999 DEBG Enqueue repair job 1003, : downstairs
25190 Sep 22 23:21:39.999 INFO RE:0 Wait for result from repair command 1003:4
25191 Sep 22 23:21:40.000 DEBG [0] 1003 Remove check skipped:{JobId(1001)} from deps:[JobId(1002)], : downstairs
25192 Sep 22 23:21:40.000 DEBG [0] 1003 Remove check < min repaired:1002 from deps:[JobId(1002)], : downstairs
25193 Sep 22 23:21:40.000 INFO [0] 1003 final dependency list [JobId(1002)], : downstairs
25194 Sep 22 23:21:40.000 INFO current number of open files limit 65536 is already the maximum
25195 Sep 22 23:21:40.000 DEBG Received NoOP 1003
25196 Sep 22 23:21:40.000 DEBG Received NoOP 1003
25197 Sep 22 23:21:40.000 INFO Created new region file "/tmp/downstairs-AuFNRJc7/region.json"
25198 Sep 22 23:21:40.000 DEBG Received ExtentLiveRepair 1003
25199 Sep 22 23:21:40.000 DEBG Work of: LiveNoOp 1003
25200 Sep 22 23:21:40.000 DEBG LiveNoOp :1003 deps:[JobId(1002)] res:true
25201 Sep 22 23:21:40.000 WARN 1005 job ELiveReopen for connection UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } waiting on 1 deps, role: work
25202 Sep 22 23:21:40.000 DEBG Work of: LiveNoOp 1003
25203 Sep 22 23:21:40.000 DEBG LiveNoOp :1003 deps:[JobId(1002)] res:true
25204 Sep 22 23:21:40.000 WARN 1005 job ELiveReopen for connection UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } waiting on 1 deps, role: work
25205 Sep 22 23:21:40.001 DEBG ExtentLiveRepair: extent 0 sra:127.0.0.1:53407
25206 Sep 22 23:21:40.001 INFO Created copy dir "/tmp/downstairs-kiLstbeI/00/000/000.copy"
25207 Sep 22 23:21:40.014 INFO Downstairs has completed Negotiation, task: proc
25208 Sep 22 23:21:40.019 INFO Downstairs has completed Negotiation, task: proc
25209 Sep 22 23:21:40.023 INFO Downstairs has completed Negotiation, task: proc
25210 Sep 22 23:21:40.024 INFO [0] 9144e02c-c312-47c4-9b1c-f03618834608 (cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
25211 Sep 22 23:21:40.024 INFO [0] Transition from WaitActive to WaitQuorum
25212 Sep 22 23:21:40.024 WARN [0] new RM replaced this: None
25213 Sep 22 23:21:40.024 INFO [0] Starts reconcile loop
25214 Sep 22 23:21:40.024 INFO [1] 9144e02c-c312-47c4-9b1c-f03618834608 (cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
25215 Sep 22 23:21:40.024 INFO [1] Transition from WaitActive to WaitQuorum
25216 Sep 22 23:21:40.024 WARN [1] new RM replaced this: None
25217 Sep 22 23:21:40.024 INFO [1] Starts reconcile loop
25218 Sep 22 23:21:40.025 INFO [2] 9144e02c-c312-47c4-9b1c-f03618834608 (cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
25219 Sep 22 23:21:40.025 INFO [2] Transition from WaitActive to WaitQuorum
25220 Sep 22 23:21:40.025 WARN [2] new RM replaced this: None
25221 Sep 22 23:21:40.025 INFO [2] Starts reconcile loop
25222 Sep 22 23:21:40.025 INFO [0] 127.0.0.1:52165 task reports connection:true
25223 Sep 22 23:21:40.025 INFO 9144e02c-c312-47c4-9b1c-f03618834608 WaitQuorum WaitQuorum WaitQuorum
25224 Sep 22 23:21:40.025 INFO [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25225 Sep 22 23:21:40.025 INFO [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25226 Sep 22 23:21:40.025 INFO [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
25227 Sep 22 23:21:40.025 INFO [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25228 Sep 22 23:21:40.025 INFO [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25229 Sep 22 23:21:40.025 INFO [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
25230 Sep 22 23:21:40.025 INFO [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25231 Sep 22 23:21:40.025 INFO [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25232 Sep 22 23:21:40.025 INFO [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
25233 Sep 22 23:21:40.025 INFO Max found gen is 1
25234 Sep 22 23:21:40.025 INFO Generation requested: 1 >= found:1
25235 Sep 22 23:21:40.025 INFO Next flush: 1
25236 Sep 22 23:21:40.025 INFO All extents match
25237 Sep 22 23:21:40.025 INFO No downstairs repair required
25238 Sep 22 23:21:40.025 INFO No initial repair work was required
25239 Sep 22 23:21:40.025 INFO Set Downstairs and Upstairs active
25240 Sep 22 23:21:40.025 INFO 9144e02c-c312-47c4-9b1c-f03618834608 is now active with session: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f
25241 Sep 22 23:21:40.025 INFO 9144e02c-c312-47c4-9b1c-f03618834608 Set Active after no repair
25242 Sep 22 23:21:40.025 INFO Notify all downstairs, region set compare is done.
25243 Sep 22 23:21:40.026 INFO Set check for repair
25244 Sep 22 23:21:40.026 INFO [1] 127.0.0.1:48339 task reports connection:true
25245 Sep 22 23:21:40.026 INFO 9144e02c-c312-47c4-9b1c-f03618834608 Active Active Active
25246 Sep 22 23:21:40.026 INFO Set check for repair
25247 Sep 22 23:21:40.026 INFO [2] 127.0.0.1:33021 task reports connection:true
25248 Sep 22 23:21:40.026 INFO 9144e02c-c312-47c4-9b1c-f03618834608 Active Active Active
25249 Sep 22 23:21:40.026 INFO Set check for repair
25250 Sep 22 23:21:40.026 INFO [0] received reconcile message
25251 Sep 22 23:21:40.026 INFO [0] All repairs completed, exit
25252 Sep 22 23:21:40.026 INFO [0] Starts cmd_loop
25253 Sep 22 23:21:40.026 INFO [1] received reconcile message
25254 Sep 22 23:21:40.026 INFO [1] All repairs completed, exit
25255 Sep 22 23:21:40.026 INFO [1] Starts cmd_loop
25256 Sep 22 23:21:40.026 INFO [2] received reconcile message
25257 Sep 22 23:21:40.026 INFO [2] All repairs completed, exit
25258 Sep 22 23:21:40.026 INFO [2] Starts cmd_loop
25259 The guest has finished waiting for activation
25260 Sep 22 23:21:40.102 INFO accepted connection, remote_addr: 127.0.0.1:36805, local_addr: 127.0.0.1:53407, task: repair
25261 Sep 22 23:21:40.103 TRCE incoming request, uri: /extent/0/files, method: GET, req_id: fe714542-aed6-499e-b3c9-47b0afd62646, remote_addr: 127.0.0.1:36805, local_addr: 127.0.0.1:53407, task: repair
25262 Sep 22 23:21:40.103 INFO request completed, latency_us: 348, response_code: 200, uri: /extent/0/files, method: GET, req_id: fe714542-aed6-499e-b3c9-47b0afd62646, remote_addr: 127.0.0.1:36805, local_addr: 127.0.0.1:53407, task: repair
25263 Sep 22 23:21:40.103 INFO eid:0 Found repair files: ["000", "000.db"]
25264 Sep 22 23:21:40.104 TRCE incoming request, uri: /newextent/0/data, method: GET, req_id: 799cab3d-ab6f-4bed-a0be-5df21ab7e965, remote_addr: 127.0.0.1:36805, local_addr: 127.0.0.1:53407, task: repair
25265 Sep 22 23:21:40.104 INFO request completed, latency_us: 554, response_code: 200, uri: /newextent/0/data, method: GET, req_id: 799cab3d-ab6f-4bed-a0be-5df21ab7e965, remote_addr: 127.0.0.1:36805, local_addr: 127.0.0.1:53407, task: repair
25266 Sep 22 23:21:40.105 TRCE incoming request, uri: /newextent/0/db, method: GET, req_id: 7fb1c097-e8a3-45ec-908b-3041a969cd7c, remote_addr: 127.0.0.1:36805, local_addr: 127.0.0.1:53407, task: repair
25267 Sep 22 23:21:40.106 INFO request completed, latency_us: 397, response_code: 200, uri: /newextent/0/db, method: GET, req_id: 7fb1c097-e8a3-45ec-908b-3041a969cd7c, remote_addr: 127.0.0.1:36805, local_addr: 127.0.0.1:53407, task: repair
25268 Sep 22 23:21:40.107 INFO Repair files downloaded, move directory "/tmp/downstairs-kiLstbeI/00/000/000.copy" to "/tmp/downstairs-kiLstbeI/00/000/000.replace"
25269 Sep 22 23:21:40.107 DEBG fsync completed for: "/tmp/downstairs-kiLstbeI/00/000"
25270 Sep 22 23:21:40.108 INFO Copy files from "/tmp/downstairs-kiLstbeI/00/000/000.replace" in "/tmp/downstairs-kiLstbeI/00/000"
25271 Sep 22 23:21:40.108 DEBG fsync completed for: "/tmp/downstairs-kiLstbeI/00/000/000"
25272 Sep 22 23:21:40.108 DEBG fsync completed for: "/tmp/downstairs-kiLstbeI/00/000/000.db"
25273 Sep 22 23:21:40.108 DEBG fsync completed for: "/tmp/downstairs-kiLstbeI/00/000"
25274 Sep 22 23:21:40.108 INFO Move directory "/tmp/downstairs-kiLstbeI/00/000/000.replace" to "/tmp/downstairs-kiLstbeI/00/000/000.completed"
25275 Sep 22 23:21:40.108 DEBG fsync completed for: "/tmp/downstairs-kiLstbeI/00/000"
25276 Sep 22 23:21:40.108 DEBG fsync completed for: "/tmp/downstairs-kiLstbeI/00/000"
25277 Sep 22 23:21:40.108 DEBG LiveRepair:1003 extent 0 deps:[JobId(1002)] res:true
25278 Sep 22 23:21:40.108 WARN 1005 job ELiveReopen for connection UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } waiting on 1 deps, role: work
25279 Sep 22 23:21:40.108 DEBG [0] ExtentLiveRepair AckReady 1003, : downstairs
25280 Sep 22 23:21:40.109 DEBG up_ds_listen was notified
25281 Sep 22 23:21:40.109 DEBG up_ds_listen process 1003
25282 Sep 22 23:21:40.109 DEBG [A] ack job 1003:4, : downstairs
25283 Sep 22 23:21:40.109 DEBG up_ds_listen checked 1 jobs, back to waiting
25284 Sep 22 23:21:40.109 DEBG Extent 0 id:1003 Done
25285 Sep 22 23:21:40.109 DEBG Enqueue repair job 1004, : downstairs
25286 Sep 22 23:21:40.109 INFO RE:0 Wait for result from NoOp command 1004:5
25287 Sep 22 23:21:40.109 DEBG [0] 1004 Remove check skipped:{JobId(1001)} from deps:[JobId(1002), JobId(1003)], : downstairs
25288 Sep 22 23:21:40.109 DEBG [0] 1004 Remove check < min repaired:1002 from deps:[JobId(1002), JobId(1003)], : downstairs
25289 Sep 22 23:21:40.109 INFO [0] 1004 final dependency list [JobId(1002), JobId(1003)], : downstairs
25290 Sep 22 23:21:40.109 DEBG Received NoOP 1004
25291 Sep 22 23:21:40.109 DEBG Received NoOP 1004
25292 Sep 22 23:21:40.109 DEBG Received NoOP 1004
25293 Sep 22 23:21:40.109 DEBG Work of: LiveNoOp 1004
25294 Sep 22 23:21:40.109 DEBG LiveNoOp :1004 deps:[JobId(1002), JobId(1003)] res:true
25295 Sep 22 23:21:40.112 DEBG LiveReopen:1005 extent 0 deps:[JobId(1002), JobId(1003), JobId(1004)] res:true
25296 Sep 22 23:21:40.112 DEBG Work of: LiveNoOp 1004
25297 Sep 22 23:21:40.112 DEBG LiveNoOp :1004 deps:[JobId(1002), JobId(1003)] res:true
25298 Sep 22 23:21:40.114 DEBG LiveReopen:1005 extent 0 deps:[JobId(1002), JobId(1003), JobId(1004)] res:true
25299 Sep 22 23:21:40.114 DEBG Work of: LiveNoOp 1004
25300 Sep 22 23:21:40.114 DEBG LiveNoOp :1004 deps:[JobId(1002), JobId(1003)] res:true
25301 Sep 22 23:21:40.118 DEBG LiveReopen:1005 extent 0 deps:[JobId(1002), JobId(1003), JobId(1004)] res:true
25302 Sep 22 23:21:40.118 DEBG [0] ExtentLiveNoOp AckReady 1004, : downstairs
25303 Sep 22 23:21:40.118 DEBG [0] ExtentLiveReopen AckReady 1005, : downstairs
25304 Sep 22 23:21:40.118 DEBG up_ds_listen was notified
25305 Sep 22 23:21:40.118 DEBG up_ds_listen process 1004
25306 Sep 22 23:21:40.118 DEBG [A] ack job 1004:5, : downstairs
25307 Sep 22 23:21:40.118 DEBG up_ds_listen process 1005
25308 Sep 22 23:21:40.118 DEBG [A] ack job 1005:6, : downstairs
25309 Sep 22 23:21:40.118 DEBG up_ds_listen checked 2 jobs, back to waiting
25310 Sep 22 23:21:40.118 DEBG up_ds_listen was notified
25311 Sep 22 23:21:40.118 DEBG up_ds_listen checked 0 jobs, back to waiting
25312 Sep 22 23:21:40.118 DEBG Extent 0 id:1004 Done
25313 Sep 22 23:21:40.118 INFO RE:0 Wait for result from reopen command 1005:6
25314 Sep 22 23:21:40.118 DEBG Extent 0 id:1005 Done
25315 Sep 22 23:21:40.119 INFO Start extent 1 repair, task: repair
25316 Sep 22 23:21:40.119 DEBG RE:1 Repair extent begins
25317 Sep 22 23:21:40.119 DEBG Create new job ids for 1, : downstairs
25318 Sep 22 23:21:40.119 INFO RE:1 repair extent with ids 1006,1007,1008,1009 deps:[]
25319 Sep 22 23:21:40.119 DEBG Enqueue repair job 1009, : downstairs
25320 Sep 22 23:21:40.119 DEBG Enqueue repair job 1006, : downstairs
25321 Sep 22 23:21:40.119 INFO RE:1 close id:1006 queued, notify DS
25322 Sep 22 23:21:40.119 INFO RE:1 Wait for result from close command 1006:7
25323 Sep 22 23:21:40.119 DEBG [0] 1006 Remove check skipped:{JobId(1001)} from deps:[], : downstairs
25324 Sep 22 23:21:40.119 DEBG [0] 1006 Remove check < min repaired:1002 from deps:[], : downstairs
25325 Sep 22 23:21:40.119 INFO [0] 1006 final dependency list [], : downstairs
25326 Sep 22 23:21:40.119 DEBG [0] 1009 Remove check skipped:{JobId(1001)} from deps:[JobId(1006), JobId(1007), JobId(1008)], : downstairs
25327 Sep 22 23:21:40.119 DEBG [0] 1009 Remove check < min repaired:1002 from deps:[JobId(1006), JobId(1007), JobId(1008)], : downstairs
25328 Sep 22 23:21:40.119 INFO [0] 1009 final dependency list [JobId(1006), JobId(1007), JobId(1008)], : downstairs
25329 Sep 22 23:21:40.119 DEBG Flush just extent 1 with f:3 and g:1
25330 Sep 22 23:21:40.122 DEBG FlushClose:1006 extent 1 deps:[] res:true f:3 g:1
25331 Sep 22 23:21:40.122 DEBG Flush just extent 1 with f:3 and g:1
25332 Sep 22 23:21:40.123 DEBG FlushClose:1006 extent 1 deps:[] res:true f:3 g:1
25333 Sep 22 23:21:40.126 DEBG JustClose :1006 extent 1 deps:[] res:true
25334 Sep 22 23:21:40.126 DEBG [1] ELC got g:1 f:1 d:false
25335 Sep 22 23:21:40.126 DEBG [2] ELC got g:1 f:1 d:false
25336 Sep 22 23:21:40.126 DEBG [0] ELC got g:0 f:0 d:false
25337 Sep 22 23:21:40.126 DEBG [0] ExtentFlushClose 1006 AckReady, : downstairs
25338 Sep 22 23:21:40.126 DEBG up_ds_listen was notified
25339 Sep 22 23:21:40.126 DEBG up_ds_listen process 1006
25340 Sep 22 23:21:40.126 DEBG [A] ack job 1006:7, : downstairs
25341 Sep 22 23:21:40.127 DEBG up_ds_listen checked 1 jobs, back to waiting
25342 Sep 22 23:21:40.127 DEBG Extent 1 id:1006 Done
25343 Sep 22 23:21:40.127 DEBG Get repair info for 2 source, : downstairs
25344 Sep 22 23:21:40.127 DEBG Get repair info for 0 bad, : downstairs
25345 Sep 22 23:21:40.127 INFO Repair for extent 1 s:2 d:[ClientId(0)], : downstairs
25346 Sep 22 23:21:40.127 DEBG Enqueue repair job 1007, : downstairs
25347 Sep 22 23:21:40.127 INFO RE:1 Wait for result from repair command 1007:8
25348 Sep 22 23:21:40.127 DEBG [0] 1007 Remove check skipped:{JobId(1001)} from deps:[JobId(1006)], : downstairs
25349 Sep 22 23:21:40.127 DEBG [0] 1007 Remove check < min repaired:1002 from deps:[JobId(1006)], : downstairs
25350 Sep 22 23:21:40.127 INFO [0] 1007 final dependency list [JobId(1006)], : downstairs
25351 Sep 22 23:21:40.127 DEBG Received NoOP 1007
25352 Sep 22 23:21:40.127 DEBG Received NoOP 1007
25353 Sep 22 23:21:40.127 DEBG Received ExtentLiveRepair 1007
25354 Sep 22 23:21:40.127 DEBG Work of: LiveNoOp 1007
25355 Sep 22 23:21:40.127 DEBG LiveNoOp :1007 deps:[JobId(1006)] res:true
25356 Sep 22 23:21:40.128 WARN 1009 job ELiveReopen for connection UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } waiting on 1 deps, role: work
25357 Sep 22 23:21:40.128 DEBG Work of: LiveNoOp 1007
25358 Sep 22 23:21:40.128 DEBG LiveNoOp :1007 deps:[JobId(1006)] res:true
25359 Sep 22 23:21:40.128 WARN 1009 job ELiveReopen for connection UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } waiting on 1 deps, role: work
25360 Sep 22 23:21:40.128 DEBG ExtentLiveRepair: extent 1 sra:127.0.0.1:53407
25361 Sep 22 23:21:40.128 INFO Created copy dir "/tmp/downstairs-kiLstbeI/00/000/001.copy"
25362 Sep 22 23:21:40.214 INFO listening on 127.0.0.1:0, task: main
25363 Sep 22 23:21:40.214 INFO current number of open files limit 65536 is already the maximum
25364 Sep 22 23:21:40.214 INFO Opened existing region file "/tmp/downstairs-6BO8rCTH/region.json"
25365 Sep 22 23:21:40.214 INFO Database read version 1
25366 Sep 22 23:21:40.214 INFO Database write version 1
25367 Sep 22 23:21:40.223 INFO accepted connection, remote_addr: 127.0.0.1:60945, local_addr: 127.0.0.1:53407, task: repair
25368 Sep 22 23:21:40.223 TRCE incoming request, uri: /extent/1/files, method: GET, req_id: 822d821d-6e99-4e6b-919c-f3c7025fc2ae, remote_addr: 127.0.0.1:60945, local_addr: 127.0.0.1:53407, task: repair
25369 Sep 22 23:21:40.224 INFO request completed, latency_us: 259, response_code: 200, uri: /extent/1/files, method: GET, req_id: 822d821d-6e99-4e6b-919c-f3c7025fc2ae, remote_addr: 127.0.0.1:60945, local_addr: 127.0.0.1:53407, task: repair
25370 Sep 22 23:21:40.224 INFO eid:1 Found repair files: ["001", "001.db"]
25371 Sep 22 23:21:40.224 TRCE incoming request, uri: /newextent/1/data, method: GET, req_id: 197e88fb-9c3b-4d88-917d-0477de97be12, remote_addr: 127.0.0.1:60945, local_addr: 127.0.0.1:53407, task: repair
25372 Sep 22 23:21:40.225 INFO request completed, latency_us: 382, response_code: 200, uri: /newextent/1/data, method: GET, req_id: 197e88fb-9c3b-4d88-917d-0477de97be12, remote_addr: 127.0.0.1:60945, local_addr: 127.0.0.1:53407, task: repair
25373 Sep 22 23:21:40.225 TRCE incoming request, uri: /newextent/1/db, method: GET, req_id: 1820bf47-7117-43da-b320-a55797afd8e6, remote_addr: 127.0.0.1:60945, local_addr: 127.0.0.1:53407, task: repair
25374 Sep 22 23:21:40.226 INFO request completed, latency_us: 324, response_code: 200, uri: /newextent/1/db, method: GET, req_id: 1820bf47-7117-43da-b320-a55797afd8e6, remote_addr: 127.0.0.1:60945, local_addr: 127.0.0.1:53407, task: repair
25375 Sep 22 23:21:40.227 INFO Repair files downloaded, move directory "/tmp/downstairs-kiLstbeI/00/000/001.copy" to "/tmp/downstairs-kiLstbeI/00/000/001.replace"
25376 Sep 22 23:21:40.227 DEBG fsync completed for: "/tmp/downstairs-kiLstbeI/00/000"
25377 Sep 22 23:21:40.228 INFO Copy files from "/tmp/downstairs-kiLstbeI/00/000/001.replace" in "/tmp/downstairs-kiLstbeI/00/000"
25378 Sep 22 23:21:40.228 DEBG fsync completed for: "/tmp/downstairs-kiLstbeI/00/000/001"
25379 Sep 22 23:21:40.228 DEBG fsync completed for: "/tmp/downstairs-kiLstbeI/00/000/001.db"
25380 Sep 22 23:21:40.228 DEBG fsync completed for: "/tmp/downstairs-kiLstbeI/00/000"
25381 Sep 22 23:21:40.228 INFO Move directory "/tmp/downstairs-kiLstbeI/00/000/001.replace" to "/tmp/downstairs-kiLstbeI/00/000/001.completed"
25382 Sep 22 23:21:40.228 DEBG fsync completed for: "/tmp/downstairs-kiLstbeI/00/000"
25383 Sep 22 23:21:40.228 DEBG fsync completed for: "/tmp/downstairs-kiLstbeI/00/000"
25384 Sep 22 23:21:40.228 DEBG LiveRepair:1007 extent 1 deps:[JobId(1006)] res:true
25385 Sep 22 23:21:40.228 WARN 1009 job ELiveReopen for connection UpstairsConnection { upstairs_id: 52fe8d40-333d-4b64-8663-2adf0476947f, session_id: fc0a5f14-ae91-4f48-aded-c2a3fecef8a0, gen: 1 } waiting on 1 deps, role: work
25386 Sep 22 23:21:40.228 DEBG [0] ExtentLiveRepair AckReady 1007, : downstairs
25387 Sep 22 23:21:40.228 DEBG up_ds_listen was notified
25388 Sep 22 23:21:40.228 DEBG up_ds_listen process 1007
25389 Sep 22 23:21:40.228 DEBG [A] ack job 1007:8, : downstairs
25390 Sep 22 23:21:40.228 DEBG up_ds_listen checked 1 jobs, back to waiting
25391 Sep 22 23:21:40.228 DEBG Extent 1 id:1007 Done
25392 Sep 22 23:21:40.229 DEBG Enqueue repair job 1008, : downstairs
25393 Sep 22 23:21:40.229 INFO RE:1 Wait for result from NoOp command 1008:9
25394 Sep 22 23:21:40.229 DEBG [0] 1008 Remove check skipped:{JobId(1001)} from deps:[JobId(1006), JobId(1007)], : downstairs
25395 Sep 22 23:21:40.229 DEBG [0] 1008 Remove check < min repaired:1002 from deps:[JobId(1006), JobId(1007)], : downstairs
25396 Sep 22 23:21:40.229 INFO [0] 1008 final dependency list [JobId(1006), JobId(1007)], : downstairs
25397 Sep 22 23:21:40.229 DEBG Received NoOP 1008
25398 Sep 22 23:21:40.229 DEBG Received NoOP 1008
25399 Sep 22 23:21:40.229 DEBG Received NoOP 1008
25400 Sep 22 23:21:40.229 DEBG Work of: LiveNoOp 1008
25401 Sep 22 23:21:40.229 DEBG LiveNoOp :1008 deps:[JobId(1006), JobId(1007)] res:true
25402 Sep 22 23:21:40.230 DEBG LiveReopen:1009 extent 1 deps:[JobId(1006), JobId(1007), JobId(1008)] res:true
25403 Sep 22 23:21:40.230 DEBG Work of: LiveNoOp 1008
25404 Sep 22 23:21:40.230 DEBG LiveNoOp :1008 deps:[JobId(1006), JobId(1007)] res:true
25405 Sep 22 23:21:40.232 DEBG LiveReopen:1009 extent 1 deps:[JobId(1006), JobId(1007), JobId(1008)] res:true
25406 Sep 22 23:21:40.232 DEBG Work of: LiveNoOp 1008
25407 Sep 22 23:21:40.232 DEBG LiveNoOp :1008 deps:[JobId(1006), JobId(1007)] res:true
25408 Sep 22 23:21:40.234 DEBG LiveReopen:1009 extent 1 deps:[JobId(1006), JobId(1007), JobId(1008)] res:true
25409 Sep 22 23:21:40.234 DEBG [0] ExtentLiveNoOp AckReady 1008, : downstairs
25410 Sep 22 23:21:40.234 DEBG [0] ExtentLiveReopen AckReady 1009, : downstairs
25411 Sep 22 23:21:40.234 DEBG up_ds_listen was notified
25412 Sep 22 23:21:40.234 DEBG up_ds_listen process 1008
25413 Sep 22 23:21:40.234 DEBG [A] ack job 1008:9, : downstairs
25414 Sep 22 23:21:40.234 DEBG up_ds_listen process 1009
25415 Sep 22 23:21:40.234 DEBG [A] ack job 1009:10, : downstairs
25416 Sep 22 23:21:40.234 DEBG up_ds_listen checked 2 jobs, back to waiting
25417 Sep 22 23:21:40.234 DEBG up_ds_listen was notified
25418 Sep 22 23:21:40.234 DEBG up_ds_listen checked 0 jobs, back to waiting
25419 Sep 22 23:21:40.234 DEBG Extent 1 id:1008 Done
25420 Sep 22 23:21:40.234 INFO RE:1 Wait for result from reopen command 1009:10
25421 Sep 22 23:21:40.234 DEBG Extent 1 id:1009 Done
25422 Sep 22 23:21:40.234 DEBG IO Flush 1010 has deps [JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)]
25423 Sep 22 23:21:40.234 INFO LiveRepair final flush submitted
25424 Sep 22 23:21:40.234 DEBG [0] 1010 Remove check skipped:{JobId(1001)} from deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)], : downstairs
25425 Sep 22 23:21:40.234 DEBG [0] 1010 Remove check < min repaired:1002 from deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)], : downstairs
25426 Sep 22 23:21:40.234 INFO [0] 1010 final dependency list [JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)], : downstairs
25427 Sep 22 23:21:40.235 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)] res:true f:4 g:1
25428 Sep 22 23:21:40.235 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)] res:true f:4 g:1
25429 Sep 22 23:21:40.235 DEBG Flush :1010 extent_limit Some(1) deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)] res:true f:4 g:1
25430 Sep 22 23:21:40.235 DEBG up_ds_listen was notified
25431 Sep 22 23:21:40.235 DEBG up_ds_listen process 1010
25432 Sep 22 23:21:40.235 DEBG [A] ack job 1010:11, : downstairs
25433 Sep 22 23:21:40.235 DEBG [rc] retire 1010 clears [JobId(1002), JobId(1003), JobId(1004), JobId(1005), JobId(1006), JobId(1007), JobId(1008), JobId(1009), JobId(1010)], : downstairs
25434 Sep 22 23:21:40.235 DEBG up_ds_listen checked 1 jobs, back to waiting
25435 Sep 22 23:21:40.235 INFO LiveRepair final flush completed
25436 Sep 22 23:21:40.235 INFO [0] 52fe8d40-333d-4b64-8663-2adf0476947f (fc0a5f14-ae91-4f48-aded-c2a3fecef8a0) LiveRepair Active Active ds_transition to Active
25437 Sep 22 23:21:40.235 INFO [0] Transition from LiveRepair to Active
25438 Sep 22 23:21:40.235 WARN Live Repair returns Ok(())
25439 Sep 22 23:21:40.240 INFO UUID: 3184f538-c5cb-4487-be43-e6cb9becc716
25440 Sep 22 23:21:40.240 INFO Blocks per extent:512 Total Extents: 188
25441 Sep 22 23:21:40.240 INFO Crucible Version: Crucible Version: 0.0.1
25442 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
25443 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
25444 rustc: 1.70.0 stable x86_64-unknown-illumos
25445 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
25446 Sep 22 23:21:40.240 INFO Upstairs <-> Downstairs Message Version: 4, task: main
25447 Sep 22 23:21:40.240 INFO Using address: 127.0.0.1:46698, task: main
25448 Sep 22 23:21:40.241 INFO Repair listens on 127.0.0.1:0, task: repair
25449 Sep 22 23:21:40.241 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38546, task: repair
25450 Sep 22 23:21:40.241 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38546, task: repair
25451 Sep 22 23:21:40.241 INFO listening, local_addr: 127.0.0.1:38546, task: repair
25452 Sep 22 23:21:40.241 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38546, task: repair
25453 Sep 22 23:21:40.241 INFO Using repair address: 127.0.0.1:38546, task: main
25454 Sep 22 23:21:40.241 INFO No SSL acceptor configured, task: main
25455 Sep 22 23:21:40.249 INFO UUID: b63118d3-af4f-496f-b02c-9f9b92d23429
25456 Sep 22 23:21:40.249 INFO Blocks per extent:512 Total Extents: 188
25457 Sep 22 23:21:40.249 INFO Crucible Version: Crucible Version: 0.0.1
25458 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
25459 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
25460 rustc: 1.70.0 stable x86_64-unknown-illumos
25461 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
25462 Sep 22 23:21:40.249 INFO Upstairs <-> Downstairs Message Version: 4, task: main
25463 Sep 22 23:21:40.249 INFO Using address: 127.0.0.1:60546, task: main
25464 Sep 22 23:21:40.249 INFO Repair listens on 127.0.0.1:0, task: repair
25465 Sep 22 23:21:40.249 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:63823, task: repair
25466 Sep 22 23:21:40.249 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:63823, task: repair
25467 Sep 22 23:21:40.249 INFO listening, local_addr: 127.0.0.1:63823, task: repair
25468 Sep 22 23:21:40.250 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:63823, task: repair
25469 Sep 22 23:21:40.250 INFO Using repair address: 127.0.0.1:63823, task: main
25470 Sep 22 23:21:40.250 INFO No SSL acceptor configured, task: main
25471 note: configured to log to "/dev/stdout"
25472 note: configured to log to "/dev/stdout"
254732023-09-22T23:21:40.296ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:46176
254742023-09-22T23:21:40.296ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:47828
254752023-09-22T23:21:40.296ZINFOcrucible-pantry: listen IP: 127.0.0.1:46176
254762023-09-22T23:21:40.296ZINFOcrucible-pantry: listen IP: 127.0.0.1:47828
25477 Sep 22 23:21:40.297 INFO listening on 127.0.0.1:0, task: main
25478 Sep 22 23:21:40.297 INFO current number of open files limit 65536 is already the maximum
25479 Sep 22 23:21:40.297 INFO Opened existing region file "/tmp/downstairs-AuFNRJc7/region.json"
25480 Sep 22 23:21:40.297 INFO Database read version 1
25481 Sep 22 23:21:40.297 INFO Database write version 1
25482 Sep 22 23:21:40.343 INFO UUID: e1ba5200-2113-47b5-8eab-dc553c509bb6
25483 Sep 22 23:21:40.343 INFO Blocks per extent:512 Total Extents: 188
25484 Sep 22 23:21:40.343 INFO Crucible Version: Crucible Version: 0.0.1
25485 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
25486 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
25487 rustc: 1.70.0 stable x86_64-unknown-illumos
25488 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
25489 Sep 22 23:21:40.343 INFO Upstairs <-> Downstairs Message Version: 4, task: main
25490 Sep 22 23:21:40.343 INFO Using address: 127.0.0.1:53040, task: main
25491 Sep 22 23:21:40.344 INFO Repair listens on 127.0.0.1:0, task: repair
25492 Sep 22 23:21:40.344 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:65165, task: repair
25493 Sep 22 23:21:40.344 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:65165, task: repair
25494 Sep 22 23:21:40.344 INFO listening, local_addr: 127.0.0.1:65165, task: repair
25495 Sep 22 23:21:40.344 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:65165, task: repair
25496 Sep 22 23:21:40.344 INFO Using repair address: 127.0.0.1:65165, task: main
25497 Sep 22 23:21:40.344 INFO No SSL acceptor configured, task: main
25498 note: configured to log to "/dev/stdout"
254992023-09-22T23:21:40.364ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:47396
255002023-09-22T23:21:40.364ZINFOcrucible-pantry: listen IP: 127.0.0.1:47396
25501 Sep 22 23:21:40.385 INFO UUID: 121dfa8b-dae0-4803-ad33-1b8e5b36d846
25502 Sep 22 23:21:40.385 INFO Blocks per extent:512 Total Extents: 188
25503 Sep 22 23:21:40.385 INFO Crucible Version: Crucible Version: 0.0.1
25504 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
25505 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
25506 rustc: 1.70.0 stable x86_64-unknown-illumos
25507 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
25508 Sep 22 23:21:40.385 INFO Upstairs <-> Downstairs Message Version: 4, task: main
25509 Sep 22 23:21:40.385 INFO Using address: 127.0.0.1:60454, task: main
25510 Sep 22 23:21:40.385 INFO Repair listens on 127.0.0.1:0, task: repair
25511 Sep 22 23:21:40.386 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:35500, task: repair
25512 Sep 22 23:21:40.386 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:35500, task: repair
25513 Sep 22 23:21:40.386 INFO listening, local_addr: 127.0.0.1:35500, task: repair
25514 Sep 22 23:21:40.386 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:35500, task: repair
25515 Sep 22 23:21:40.386 INFO Using repair address: 127.0.0.1:35500, task: main
25516 Sep 22 23:21:40.386 INFO No SSL acceptor configured, task: main
25517 note: configured to log to "/dev/stdout"
255182023-09-22T23:21:40.399ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:52393
255192023-09-22T23:21:40.400ZINFOcrucible-pantry: listen IP: 127.0.0.1:52393
25520 Sep 22 23:21:40.499 INFO listening on 127.0.0.1:0, task: main
255212023-09-22T23:21:40.500ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:47828 remote_addr = 127.0.0.1:35008
255222023-09-22T23:21:40.501ZINFOcrucible-pantry (datafile): no entry exists for volume 07fcaddf-9197-4aa4-a6a7-ae8fc7c212ec, constructing...
255232023-09-22T23:21:40.501ZINFOcrucible-pantry (datafile): Upstairs starts
255242023-09-22T23:21:40.501ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
255252023-09-22T23:21:40.501ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
255262023-09-22T23:21:40.501ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 6d97a177-f79d-4abd-9fa5-a1779824805b
255272023-09-22T23:21:40.501ZINFOcrucible-pantry (datafile): Crucible 6d97a177-f79d-4abd-9fa5-a1779824805b has session id: b26982c0-3685-4f78-910a-02e75227794c
255282023-09-22T23:21:40.501ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:33519 looper = 0
255292023-09-22T23:21:40.501ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:52265 looper = 1
255302023-09-22T23:21:40.501ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:46698 looper = 2
255312023-09-22T23:21:40.502ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
255322023-09-22T23:21:40.502ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
255332023-09-22T23:21:40.502ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
255342023-09-22T23:21:40.502ZINFOcrucible-pantry (datafile): volume 07fcaddf-9197-4aa4-a6a7-ae8fc7c212ec constructed ok
25535 The guest has requested activation
255362023-09-22T23:21:40.502ZINFOcrucible-pantry (datafile): 6d97a177-f79d-4abd-9fa5-a1779824805b active request set
255372023-09-22T23:21:40.502ZINFOcrucible-pantry (datafile): [0] 6d97a177-f79d-4abd-9fa5-a1779824805b looper connected looper = 0
255382023-09-22T23:21:40.502ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:33519 in state New
255392023-09-22T23:21:40.502ZINFOcrucible-pantry (datafile): [1] 6d97a177-f79d-4abd-9fa5-a1779824805b looper connected looper = 1
255402023-09-22T23:21:40.502ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:52265 in state New
255412023-09-22T23:21:40.502ZINFOcrucible-pantry (datafile): [2] 6d97a177-f79d-4abd-9fa5-a1779824805b looper connected looper = 2
255422023-09-22T23:21:40.502ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:46698 in state New
25543 Sep 22 23:21:40.502 INFO accepted connection from 127.0.0.1:50318, task: main
25544 Sep 22 23:21:40.503 INFO accepted connection from 127.0.0.1:34759, task: main
25545 Sep 22 23:21:40.503 INFO accepted connection from 127.0.0.1:56062, task: main
25546 Sep 22 23:21:40.503 INFO Connection request from 6d97a177-f79d-4abd-9fa5-a1779824805b with version 4, task: proc
25547 Sep 22 23:21:40.503 INFO upstairs UpstairsConnection { upstairs_id: 6d97a177-f79d-4abd-9fa5-a1779824805b, session_id: ae25a180-0753-4d68-9404-5e9bb9308f43, gen: 1 } connected, version 4, task: proc
25548 Sep 22 23:21:40.503 INFO Connection request from 6d97a177-f79d-4abd-9fa5-a1779824805b with version 4, task: proc
25549 Sep 22 23:21:40.503 INFO upstairs UpstairsConnection { upstairs_id: 6d97a177-f79d-4abd-9fa5-a1779824805b, session_id: ae25a180-0753-4d68-9404-5e9bb9308f43, gen: 1 } connected, version 4, task: proc
25550 Sep 22 23:21:40.503 INFO Connection request from 6d97a177-f79d-4abd-9fa5-a1779824805b with version 4, task: proc
25551 Sep 22 23:21:40.503 INFO upstairs UpstairsConnection { upstairs_id: 6d97a177-f79d-4abd-9fa5-a1779824805b, session_id: ae25a180-0753-4d68-9404-5e9bb9308f43, gen: 1 } connected, version 4, task: proc
255522023-09-22T23:21:40.503ZINFOcrucible-pantry (datafile): [0] 6d97a177-f79d-4abd-9fa5-a1779824805b (ae25a180-0753-4d68-9404-5e9bb9308f43) New New New ds_transition to WaitActive
255532023-09-22T23:21:40.503ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
255542023-09-22T23:21:40.503ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session ae25a180-0753-4d68-9404-5e9bb9308f43
255552023-09-22T23:21:40.503ZINFOcrucible-pantry (datafile): [1] 6d97a177-f79d-4abd-9fa5-a1779824805b (ae25a180-0753-4d68-9404-5e9bb9308f43) WaitActive New New ds_transition to WaitActive
255562023-09-22T23:21:40.503ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
255572023-09-22T23:21:40.503ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session ae25a180-0753-4d68-9404-5e9bb9308f43
255582023-09-22T23:21:40.503ZINFOcrucible-pantry (datafile): [2] 6d97a177-f79d-4abd-9fa5-a1779824805b (ae25a180-0753-4d68-9404-5e9bb9308f43) WaitActive WaitActive New ds_transition to WaitActive
255592023-09-22T23:21:40.504ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
255602023-09-22T23:21:40.504ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session ae25a180-0753-4d68-9404-5e9bb9308f43
25561 Sep 22 23:21:40.504 INFO UpstairsConnection { upstairs_id: 6d97a177-f79d-4abd-9fa5-a1779824805b, session_id: ae25a180-0753-4d68-9404-5e9bb9308f43, gen: 1 } is now active (read-write)
25562 Sep 22 23:21:40.504 INFO UpstairsConnection { upstairs_id: 6d97a177-f79d-4abd-9fa5-a1779824805b, session_id: ae25a180-0753-4d68-9404-5e9bb9308f43, gen: 1 } is now active (read-write)
25563 Sep 22 23:21:40.504 INFO UpstairsConnection { upstairs_id: 6d97a177-f79d-4abd-9fa5-a1779824805b, session_id: ae25a180-0753-4d68-9404-5e9bb9308f43, gen: 1 } is now active (read-write)
255642023-09-22T23:21:40.505ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:33519 has UUID 6e2bf597-efa8-46d7-84b3-855cd0183cc5
255652023-09-22T23:21:40.505ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 6e2bf597-efa8-46d7-84b3-855cd0183cc5, encrypted: true, database_read_version: 1, database_write_version: 1 }
255662023-09-22T23:21:40.505ZINFOcrucible-pantry (datafile): 6d97a177-f79d-4abd-9fa5-a1779824805b WaitActive WaitActive WaitActive
255672023-09-22T23:21:40.505ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:52265 has UUID 13d4e6fc-362c-415f-bc7d-909deb752536
255682023-09-22T23:21:40.505ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 13d4e6fc-362c-415f-bc7d-909deb752536, encrypted: true, database_read_version: 1, database_write_version: 1 }
255692023-09-22T23:21:40.505ZINFOcrucible-pantry (datafile): 6d97a177-f79d-4abd-9fa5-a1779824805b WaitActive WaitActive WaitActive
255702023-09-22T23:21:40.505ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:46698 has UUID 3184f538-c5cb-4487-be43-e6cb9becc716
255712023-09-22T23:21:40.505ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 3184f538-c5cb-4487-be43-e6cb9becc716, encrypted: true, database_read_version: 1, database_write_version: 1 }
255722023-09-22T23:21:40.505ZINFOcrucible-pantry (datafile): 6d97a177-f79d-4abd-9fa5-a1779824805b WaitActive WaitActive WaitActive
25573 Sep 22 23:21:40.515 INFO listening on 127.0.0.1:0, task: main
255742023-09-22T23:21:40.515ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:46176 remote_addr = 127.0.0.1:35475
255752023-09-22T23:21:40.516ZINFOcrucible-pantry (datafile): no entry exists for volume ed764ae9-e3ce-441f-8278-67469b5636e4, constructing...
255762023-09-22T23:21:40.516ZINFOcrucible-pantry (datafile): Upstairs starts
255772023-09-22T23:21:40.516ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
255782023-09-22T23:21:40.516ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
255792023-09-22T23:21:40.516ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 585127d2-6fce-4638-823a-9e437ec79e5b
255802023-09-22T23:21:40.516ZINFOcrucible-pantry (datafile): Crucible 585127d2-6fce-4638-823a-9e437ec79e5b has session id: fe8dcfff-e2c4-48d5-bda2-4cb07880027f
255812023-09-22T23:21:40.517ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:37108 looper = 0
255822023-09-22T23:21:40.517ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:35718 looper = 1
255832023-09-22T23:21:40.517ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:60546 looper = 2
255842023-09-22T23:21:40.517ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
255852023-09-22T23:21:40.517ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
255862023-09-22T23:21:40.517ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
255872023-09-22T23:21:40.517ZINFOcrucible-pantry (datafile): volume ed764ae9-e3ce-441f-8278-67469b5636e4 constructed ok
25588 The guest has requested activation
255892023-09-22T23:21:40.518ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b active request set
25590 Sep 22 23:21:40.518 INFO accepted connection from 127.0.0.1:43703, task: main
255912023-09-22T23:21:40.518ZINFOcrucible-pantry (datafile): [0] 585127d2-6fce-4638-823a-9e437ec79e5b looper connected looper = 0
255922023-09-22T23:21:40.518ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:37108 in state New
255932023-09-22T23:21:40.518ZINFOcrucible-pantry (datafile): [1] 585127d2-6fce-4638-823a-9e437ec79e5b looper connected looper = 1
255942023-09-22T23:21:40.518ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:35718 in state New
255952023-09-22T23:21:40.518ZINFOcrucible-pantry (datafile): [2] 585127d2-6fce-4638-823a-9e437ec79e5b looper connected looper = 2
255962023-09-22T23:21:40.518ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:60546 in state New
25597 Sep 22 23:21:40.518 INFO accepted connection from 127.0.0.1:60507, task: main
25598 Sep 22 23:21:40.518 INFO accepted connection from 127.0.0.1:49103, task: main
25599 Sep 22 23:21:40.518 INFO Connection request from 585127d2-6fce-4638-823a-9e437ec79e5b with version 4, task: proc
25600 Sep 22 23:21:40.518 INFO upstairs UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 } connected, version 4, task: proc
25601 Sep 22 23:21:40.518 INFO Connection request from 585127d2-6fce-4638-823a-9e437ec79e5b with version 4, task: proc
25602 Sep 22 23:21:40.519 INFO upstairs UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 } connected, version 4, task: proc
25603 Sep 22 23:21:40.519 INFO Connection request from 585127d2-6fce-4638-823a-9e437ec79e5b with version 4, task: proc
25604 Sep 22 23:21:40.519 INFO upstairs UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 } connected, version 4, task: proc
256052023-09-22T23:21:40.519ZINFOcrucible-pantry (datafile): [0] 585127d2-6fce-4638-823a-9e437ec79e5b (3a227d09-1b16-48c0-8c37-0e4189fb3e80) New New New ds_transition to WaitActive
256062023-09-22T23:21:40.519ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
256072023-09-22T23:21:40.519ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 3a227d09-1b16-48c0-8c37-0e4189fb3e80
256082023-09-22T23:21:40.519ZINFOcrucible-pantry (datafile): [1] 585127d2-6fce-4638-823a-9e437ec79e5b (3a227d09-1b16-48c0-8c37-0e4189fb3e80) WaitActive New New ds_transition to WaitActive
256092023-09-22T23:21:40.519ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
256102023-09-22T23:21:40.519ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 3a227d09-1b16-48c0-8c37-0e4189fb3e80
256112023-09-22T23:21:40.519ZINFOcrucible-pantry (datafile): [2] 585127d2-6fce-4638-823a-9e437ec79e5b (3a227d09-1b16-48c0-8c37-0e4189fb3e80) WaitActive WaitActive New ds_transition to WaitActive
256122023-09-22T23:21:40.519ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
256132023-09-22T23:21:40.519ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 3a227d09-1b16-48c0-8c37-0e4189fb3e80
25614 Sep 22 23:21:40.520 INFO UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 } is now active (read-write)
25615 Sep 22 23:21:40.520 INFO UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 } is now active (read-write)
25616 Sep 22 23:21:40.520 INFO UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 } is now active (read-write)
256172023-09-22T23:21:40.521ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:37108 has UUID aa05d4f9-364f-4612-a579-613b38785c79
256182023-09-22T23:21:40.521ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: aa05d4f9-364f-4612-a579-613b38785c79, encrypted: true, database_read_version: 1, database_write_version: 1 }
256192023-09-22T23:21:40.521ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b WaitActive WaitActive WaitActive
256202023-09-22T23:21:40.521ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:35718 has UUID 9c7d4f0a-db95-4538-bb7e-7b1ed1936555
256212023-09-22T23:21:40.521ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 9c7d4f0a-db95-4538-bb7e-7b1ed1936555, encrypted: true, database_read_version: 1, database_write_version: 1 }
256222023-09-22T23:21:40.521ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b WaitActive WaitActive WaitActive
256232023-09-22T23:21:40.521ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:60546 has UUID b63118d3-af4f-496f-b02c-9f9b92d23429
256242023-09-22T23:21:40.521ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: b63118d3-af4f-496f-b02c-9f9b92d23429, encrypted: true, database_read_version: 1, database_write_version: 1 }
256252023-09-22T23:21:40.521ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b WaitActive WaitActive WaitActive
25626 Sep 22 23:21:40.521 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25627 Sep 22 23:21:40.524 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25628 Sep 22 23:21:40.526 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25629 Sep 22 23:21:40.539 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25630 Sep 22 23:21:40.542 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25631 Sep 22 23:21:40.545 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25632 Sep 22 23:21:40.550 INFO Downstairs has completed Negotiation, task: proc
25633 Sep 22 23:21:40.551 INFO Downstairs has completed Negotiation, task: proc
25634 Sep 22 23:21:40.551 INFO Downstairs has completed Negotiation, task: proc
256352023-09-22T23:21:40.552ZINFOcrucible-pantry (datafile): [0] 6d97a177-f79d-4abd-9fa5-a1779824805b (ae25a180-0753-4d68-9404-5e9bb9308f43) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
256362023-09-22T23:21:40.552ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
256372023-09-22T23:21:40.552ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
256382023-09-22T23:21:40.552ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
256392023-09-22T23:21:40.552ZINFOcrucible-pantry (datafile): [1] 6d97a177-f79d-4abd-9fa5-a1779824805b (ae25a180-0753-4d68-9404-5e9bb9308f43) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
256402023-09-22T23:21:40.552ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
256412023-09-22T23:21:40.552ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
256422023-09-22T23:21:40.552ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
256432023-09-22T23:21:40.552ZINFOcrucible-pantry (datafile): [2] 6d97a177-f79d-4abd-9fa5-a1779824805b (ae25a180-0753-4d68-9404-5e9bb9308f43) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
256442023-09-22T23:21:40.552ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
256452023-09-22T23:21:40.552ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
256462023-09-22T23:21:40.552ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
256472023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:33519 task reports connection:true
256482023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): 6d97a177-f79d-4abd-9fa5-a1779824805b WaitQuorum WaitQuorum WaitQuorum
256492023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
256502023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
256512023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
25652 The guest has finished waiting for activation
256532023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
256542023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
256552023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
256562023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
256572023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
256582023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
256592023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): Max found gen is 1
256602023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
256612023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): Next flush: 1
256622023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): All extents match
256632023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): No downstairs repair required
256642023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): No initial repair work was required
256652023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
256662023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): 6d97a177-f79d-4abd-9fa5-a1779824805b is now active with session: ae25a180-0753-4d68-9404-5e9bb9308f43
256672023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): 6d97a177-f79d-4abd-9fa5-a1779824805b Set Active after no repair
256682023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
256692023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): Set check for repair
256702023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:52265 task reports connection:true
256712023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): 6d97a177-f79d-4abd-9fa5-a1779824805b Active Active Active
256722023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): Set check for repair
256732023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:46698 task reports connection:true
256742023-09-22T23:21:40.553ZINFOcrucible-pantry (datafile): 6d97a177-f79d-4abd-9fa5-a1779824805b Active Active Active
256752023-09-22T23:21:40.554ZINFOcrucible-pantry (datafile): Set check for repair
256762023-09-22T23:21:40.554ZINFOcrucible-pantry (datafile): [0] received reconcile message
256772023-09-22T23:21:40.554ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
256782023-09-22T23:21:40.554ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
256792023-09-22T23:21:40.554ZINFOcrucible-pantry (datafile): [1] received reconcile message
256802023-09-22T23:21:40.554ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
256812023-09-22T23:21:40.554ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
256822023-09-22T23:21:40.554ZINFOcrucible-pantry (datafile): [2] received reconcile message
256832023-09-22T23:21:40.554ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
256842023-09-22T23:21:40.554ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
256852023-09-22T23:21:40.554ZINFOcrucible-pantry (datafile): volume 07fcaddf-9197-4aa4-a6a7-ae8fc7c212ec activated ok
256862023-09-22T23:21:40.554ZINFOcrucible-pantry (datafile): volume 07fcaddf-9197-4aa4-a6a7-ae8fc7c212ec constructed and inserted ok
256872023-09-22T23:21:40.554ZINFOcrucible-pantry (dropshot): request completed latency_us = 52941 local_addr = 127.0.0.1:47828 method = POST remote_addr = 127.0.0.1:35008 req_id = 0744ae93-fe5e-48b4-8805-16081d978cf1 response_code = 200 uri = /crucible/pantry/0/volume/07fcaddf-9197-4aa4-a6a7-ae8fc7c212ec
25688 Sep 22 23:21:40.579 INFO Downstairs has completed Negotiation, task: proc
25689 Sep 22 23:21:40.580 INFO Downstairs has completed Negotiation, task: proc
25690 Sep 22 23:21:40.582 INFO Downstairs has completed Negotiation, task: proc
256912023-09-22T23:21:40.583ZINFOcrucible-pantry (datafile): [0] 585127d2-6fce-4638-823a-9e437ec79e5b (3a227d09-1b16-48c0-8c37-0e4189fb3e80) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
256922023-09-22T23:21:40.583ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
256932023-09-22T23:21:40.583ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
256942023-09-22T23:21:40.583ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
256952023-09-22T23:21:40.583ZINFOcrucible-pantry (datafile): [1] 585127d2-6fce-4638-823a-9e437ec79e5b (3a227d09-1b16-48c0-8c37-0e4189fb3e80) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
256962023-09-22T23:21:40.583ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
256972023-09-22T23:21:40.583ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
256982023-09-22T23:21:40.583ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
256992023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): [2] 585127d2-6fce-4638-823a-9e437ec79e5b (3a227d09-1b16-48c0-8c37-0e4189fb3e80) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
257002023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
257012023-09-22T23:21:40.584ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
257022023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
257032023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:37108 task reports connection:true
257042023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b WaitQuorum WaitQuorum WaitQuorum
257052023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257062023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257072023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
257082023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257092023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25710 The guest has finished waiting for activation
257112023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
257122023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257132023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257142023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
257152023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): Max found gen is 1
257162023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
257172023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): Next flush: 1
257182023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): All extents match
257192023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): No downstairs repair required
257202023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): No initial repair work was required
257212023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
257222023-09-22T23:21:40.584ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b is now active with session: 3a227d09-1b16-48c0-8c37-0e4189fb3e80
257232023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b Set Active after no repair
257242023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
257252023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): Set check for repair
257262023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:35718 task reports connection:true
257272023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b Active Active Active
257282023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): Set check for repair
257292023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:60546 task reports connection:true
257302023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b Active Active Active
257312023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): Set check for repair
257322023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): [0] received reconcile message
257332023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
257342023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
257352023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): [1] received reconcile message
257362023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
257372023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
257382023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): [2] received reconcile message
257392023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
257402023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
257412023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): volume ed764ae9-e3ce-441f-8278-67469b5636e4 activated ok
257422023-09-22T23:21:40.585ZINFOcrucible-pantry (datafile): volume ed764ae9-e3ce-441f-8278-67469b5636e4 constructed and inserted ok
257432023-09-22T23:21:40.585ZINFOcrucible-pantry (dropshot): request completed latency_us = 68837 local_addr = 127.0.0.1:46176 method = POST remote_addr = 127.0.0.1:35475 req_id = c8f070d8-292e-4ce5-a796-8dc6973edfe1 response_code = 200 uri = /crucible/pantry/0/volume/ed764ae9-e3ce-441f-8278-67469b5636e4
25744 Sep 22 23:21:40.587 INFO listening on 127.0.0.1:0, task: main
257452023-09-22T23:21:40.588ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:47396 remote_addr = 127.0.0.1:39199
257462023-09-22T23:21:40.588ZINFOcrucible-pantry (datafile): no entry exists for volume 1eb9cf2b-fac5-48fd-86a9-e21629057f3e, constructing...
257472023-09-22T23:21:40.589ZINFOcrucible-pantry (datafile): Upstairs starts
257482023-09-22T23:21:40.589ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
257492023-09-22T23:21:40.589ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
257502023-09-22T23:21:40.589ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 40e06af7-e69f-43e2-974b-bb00dc5e8960
257512023-09-22T23:21:40.589ZINFOcrucible-pantry (datafile): Crucible 40e06af7-e69f-43e2-974b-bb00dc5e8960 has session id: fa831e57-f170-4210-8b5e-b43db9390e2c
257522023-09-22T23:21:40.589ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:55664 looper = 0
257532023-09-22T23:21:40.589ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:37034 looper = 1
25754 The guest has requested activation
257552023-09-22T23:21:40.589ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:53040 looper = 2
257562023-09-22T23:21:40.589ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
25757 Sep 22 23:21:40.590 INFO accepted connection from 127.0.0.1:49946, task: main
257582023-09-22T23:21:40.590ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
25759 Sep 22 23:21:40.590 INFO accepted connection from 127.0.0.1:45897, task: main
25760 Sep 22 23:21:40.590 INFO accepted connection from 127.0.0.1:36645, task: main
257612023-09-22T23:21:40.590ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
257622023-09-22T23:21:40.590ZINFOcrucible-pantry (datafile): volume 1eb9cf2b-fac5-48fd-86a9-e21629057f3e constructed ok
25763 Sep 22 23:21:40.590 INFO Connection request from 40e06af7-e69f-43e2-974b-bb00dc5e8960 with version 4, task: proc
25764 Sep 22 23:21:40.590 INFO upstairs UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } connected, version 4, task: proc
257652023-09-22T23:21:40.590ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 active request set
25766 Sep 22 23:21:40.590 INFO Connection request from 40e06af7-e69f-43e2-974b-bb00dc5e8960 with version 4, task: proc
25767 Sep 22 23:21:40.590 INFO upstairs UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } connected, version 4, task: proc
257682023-09-22T23:21:40.590ZINFOcrucible-pantry (datafile): [0] 40e06af7-e69f-43e2-974b-bb00dc5e8960 looper connected looper = 0
25769 Sep 22 23:21:40.590 INFO Connection request from 40e06af7-e69f-43e2-974b-bb00dc5e8960 with version 4, task: proc
25770 Sep 22 23:21:40.590 INFO upstairs UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } connected, version 4, task: proc
257712023-09-22T23:21:40.590ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:55664 in state New
257722023-09-22T23:21:40.590ZINFOcrucible-pantry (datafile): [1] 40e06af7-e69f-43e2-974b-bb00dc5e8960 looper connected looper = 1
257732023-09-22T23:21:40.590ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:37034 in state New
257742023-09-22T23:21:40.590ZINFOcrucible-pantry (datafile): [2] 40e06af7-e69f-43e2-974b-bb00dc5e8960 looper connected looper = 2
257752023-09-22T23:21:40.591ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:53040 in state New
257762023-09-22T23:21:40.591ZINFOcrucible-pantry (datafile): [0] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) New New New ds_transition to WaitActive
257772023-09-22T23:21:40.591ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
25778 Sep 22 23:21:40.591 INFO UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } is now active (read-write)
257792023-09-22T23:21:40.591ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session d90ddc5f-b441-432b-bb73-5ab29b73ba5b
25780 Sep 22 23:21:40.591 INFO UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } is now active (read-write)
257812023-09-22T23:21:40.591ZINFOcrucible-pantry (datafile): [1] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) WaitActive New New ds_transition to WaitActive
257822023-09-22T23:21:40.591ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
25783 Sep 22 23:21:40.591 INFO UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } is now active (read-write)
257842023-09-22T23:21:40.591ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session d90ddc5f-b441-432b-bb73-5ab29b73ba5b
257852023-09-22T23:21:40.591ZINFOcrucible-pantry (datafile): [2] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) WaitActive WaitActive New ds_transition to WaitActive
257862023-09-22T23:21:40.591ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
257872023-09-22T23:21:40.591ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session d90ddc5f-b441-432b-bb73-5ab29b73ba5b
257882023-09-22T23:21:40.591ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:55664 has UUID b740b481-b810-4686-a6ad-5e7a129e9669
257892023-09-22T23:21:40.591ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: b740b481-b810-4686-a6ad-5e7a129e9669, encrypted: true, database_read_version: 1, database_write_version: 1 }
257902023-09-22T23:21:40.592ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 WaitActive WaitActive WaitActive
257912023-09-22T23:21:40.592ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:37034 has UUID efa5c554-210a-4196-815d-fa3d1dfbb858
257922023-09-22T23:21:40.592ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: efa5c554-210a-4196-815d-fa3d1dfbb858, encrypted: true, database_read_version: 1, database_write_version: 1 }
257932023-09-22T23:21:40.592ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 WaitActive WaitActive WaitActive
257942023-09-22T23:21:40.592ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:53040 has UUID e1ba5200-2113-47b5-8eab-dc553c509bb6
257952023-09-22T23:21:40.592ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: e1ba5200-2113-47b5-8eab-dc553c509bb6, encrypted: true, database_read_version: 1, database_write_version: 1 }
257962023-09-22T23:21:40.592ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 WaitActive WaitActive WaitActive
25797 Sep 22 23:21:40.603 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25798 Sep 22 23:21:40.605 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25799 Sep 22 23:21:40.607 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25800 Sep 22 23:21:40.619 INFO listening on 127.0.0.1:0, task: main
258012023-09-22T23:21:40.619ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:52393 remote_addr = 127.0.0.1:57584
258022023-09-22T23:21:40.620ZINFOcrucible-pantry (datafile): no entry exists for volume 3251378f-ea13-40c9-9d37-7f38d07fbf35, constructing...
258032023-09-22T23:21:40.620ZINFOcrucible-pantry (datafile): Upstairs starts
258042023-09-22T23:21:40.620ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
258052023-09-22T23:21:40.620ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
258062023-09-22T23:21:40.620ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: cf566b9e-aff0-4665-9031-98e179d159a4
258072023-09-22T23:21:40.620ZINFOcrucible-pantry (datafile): Crucible cf566b9e-aff0-4665-9031-98e179d159a4 has session id: f610c67f-9d19-426f-9069-eec75dc91553
258082023-09-22T23:21:40.620ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:40057 looper = 0
258092023-09-22T23:21:40.621ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:65451 looper = 1
258102023-09-22T23:21:40.621ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:60454 looper = 2
258112023-09-22T23:21:40.621ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
258122023-09-22T23:21:40.621ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
258132023-09-22T23:21:40.621ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
258142023-09-22T23:21:40.621ZINFOcrucible-pantry (datafile): volume 3251378f-ea13-40c9-9d37-7f38d07fbf35 constructed ok
25815 The guest has requested activation
258162023-09-22T23:21:40.621ZINFOcrucible-pantry (datafile): cf566b9e-aff0-4665-9031-98e179d159a4 active request set
258172023-09-22T23:21:40.622ZINFOcrucible-pantry (datafile): [0] cf566b9e-aff0-4665-9031-98e179d159a4 looper connected looper = 0
258182023-09-22T23:21:40.622ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:40057 in state New
258192023-09-22T23:21:40.622ZINFOcrucible-pantry (datafile): [1] cf566b9e-aff0-4665-9031-98e179d159a4 looper connected looper = 1
258202023-09-22T23:21:40.622ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:65451 in state New
258212023-09-22T23:21:40.622ZINFOcrucible-pantry (datafile): [2] cf566b9e-aff0-4665-9031-98e179d159a4 looper connected looper = 2
258222023-09-22T23:21:40.622ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:60454 in state New
25823 Sep 22 23:21:40.622 INFO accepted connection from 127.0.0.1:42340, task: main
25824 Sep 22 23:21:40.622 INFO accepted connection from 127.0.0.1:42583, task: main
25825 Sep 22 23:21:40.622 INFO accepted connection from 127.0.0.1:40516, task: main
25826 Sep 22 23:21:40.622 INFO Connection request from cf566b9e-aff0-4665-9031-98e179d159a4 with version 4, task: proc
25827 Sep 22 23:21:40.622 INFO upstairs UpstairsConnection { upstairs_id: cf566b9e-aff0-4665-9031-98e179d159a4, session_id: e7612fa3-27f9-4d09-a4f3-b6224148140c, gen: 1 } connected, version 4, task: proc
25828 Sep 22 23:21:40.622 INFO Connection request from cf566b9e-aff0-4665-9031-98e179d159a4 with version 4, task: proc
25829 Sep 22 23:21:40.622 INFO upstairs UpstairsConnection { upstairs_id: cf566b9e-aff0-4665-9031-98e179d159a4, session_id: e7612fa3-27f9-4d09-a4f3-b6224148140c, gen: 1 } connected, version 4, task: proc
25830 Sep 22 23:21:40.623 INFO Connection request from cf566b9e-aff0-4665-9031-98e179d159a4 with version 4, task: proc
25831 Sep 22 23:21:40.623 INFO upstairs UpstairsConnection { upstairs_id: cf566b9e-aff0-4665-9031-98e179d159a4, session_id: e7612fa3-27f9-4d09-a4f3-b6224148140c, gen: 1 } connected, version 4, task: proc
258322023-09-22T23:21:40.623ZINFOcrucible-pantry (datafile): [0] cf566b9e-aff0-4665-9031-98e179d159a4 (e7612fa3-27f9-4d09-a4f3-b6224148140c) New New New ds_transition to WaitActive
258332023-09-22T23:21:40.623ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
258342023-09-22T23:21:40.623ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session e7612fa3-27f9-4d09-a4f3-b6224148140c
258352023-09-22T23:21:40.623ZINFOcrucible-pantry (datafile): [1] cf566b9e-aff0-4665-9031-98e179d159a4 (e7612fa3-27f9-4d09-a4f3-b6224148140c) WaitActive New New ds_transition to WaitActive
258362023-09-22T23:21:40.623ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
258372023-09-22T23:21:40.623ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session e7612fa3-27f9-4d09-a4f3-b6224148140c
258382023-09-22T23:21:40.623ZINFOcrucible-pantry (datafile): [2] cf566b9e-aff0-4665-9031-98e179d159a4 (e7612fa3-27f9-4d09-a4f3-b6224148140c) WaitActive WaitActive New ds_transition to WaitActive
258392023-09-22T23:21:40.623ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
258402023-09-22T23:21:40.623ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session e7612fa3-27f9-4d09-a4f3-b6224148140c
25841 Sep 22 23:21:40.624 INFO UpstairsConnection { upstairs_id: cf566b9e-aff0-4665-9031-98e179d159a4, session_id: e7612fa3-27f9-4d09-a4f3-b6224148140c, gen: 1 } is now active (read-write)
25842 Sep 22 23:21:40.624 INFO UpstairsConnection { upstairs_id: cf566b9e-aff0-4665-9031-98e179d159a4, session_id: e7612fa3-27f9-4d09-a4f3-b6224148140c, gen: 1 } is now active (read-write)
25843 Sep 22 23:21:40.624 INFO UpstairsConnection { upstairs_id: cf566b9e-aff0-4665-9031-98e179d159a4, session_id: e7612fa3-27f9-4d09-a4f3-b6224148140c, gen: 1 } is now active (read-write)
258442023-09-22T23:21:40.625ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:40057 has UUID 1f0dc9eb-57e9-42b1-a5c8-0ed1810eb444
258452023-09-22T23:21:40.625ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 1f0dc9eb-57e9-42b1-a5c8-0ed1810eb444, encrypted: true, database_read_version: 1, database_write_version: 1 }
258462023-09-22T23:21:40.625ZINFOcrucible-pantry (datafile): cf566b9e-aff0-4665-9031-98e179d159a4 WaitActive WaitActive WaitActive
258472023-09-22T23:21:40.625ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:65451 has UUID dfb16c90-27d7-4a45-bd56-019dbea3b0bb
258482023-09-22T23:21:40.625ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: dfb16c90-27d7-4a45-bd56-019dbea3b0bb, encrypted: true, database_read_version: 1, database_write_version: 1 }
258492023-09-22T23:21:40.625ZINFOcrucible-pantry (datafile): cf566b9e-aff0-4665-9031-98e179d159a4 WaitActive WaitActive WaitActive
258502023-09-22T23:21:40.625ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:60454 has UUID 121dfa8b-dae0-4803-ad33-1b8e5b36d846
258512023-09-22T23:21:40.625ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 121dfa8b-dae0-4803-ad33-1b8e5b36d846, encrypted: true, database_read_version: 1, database_write_version: 1 }
258522023-09-22T23:21:40.625ZINFOcrucible-pantry (datafile): cf566b9e-aff0-4665-9031-98e179d159a4 WaitActive WaitActive WaitActive
25853 Sep 22 23:21:40.625 INFO Downstairs has completed Negotiation, task: proc
25854 Sep 22 23:21:40.626 INFO Downstairs has completed Negotiation, task: proc
25855 Sep 22 23:21:40.627 INFO Downstairs has completed Negotiation, task: proc
258562023-09-22T23:21:40.627ZINFOcrucible-pantry (datafile): [0] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
258572023-09-22T23:21:40.627ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
258582023-09-22T23:21:40.628ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
258592023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
258602023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [1] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
258612023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
258622023-09-22T23:21:40.628ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
258632023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
258642023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [2] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
258652023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
258662023-09-22T23:21:40.628ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
258672023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
258682023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:55664 task reports connection:true
258692023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 WaitQuorum WaitQuorum WaitQuorum
258702023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
258712023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25872 The guest has finished waiting for activation
258732023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
258742023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
258752023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
258762023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
258772023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
258782023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
258792023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
258802023-09-22T23:21:40.628ZINFOcrucible-pantry (datafile): Max found gen is 1
258812023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
258822023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): Next flush: 1
258832023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): All extents match
258842023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): No downstairs repair required
258852023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): No initial repair work was required
258862023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
258872023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 is now active with session: d90ddc5f-b441-432b-bb73-5ab29b73ba5b
258882023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 Set Active after no repair
258892023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
258902023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): Set check for repair
258912023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:37034 task reports connection:true
258922023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 Active Active Active
258932023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): Set check for repair
258942023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:53040 task reports connection:true
258952023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 Active Active Active
258962023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): Set check for repair
258972023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): [0] received reconcile message
258982023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
258992023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
259002023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): [1] received reconcile message
259012023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
259022023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
259032023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): [2] received reconcile message
259042023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
259052023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
259062023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): volume 1eb9cf2b-fac5-48fd-86a9-e21629057f3e activated ok
259072023-09-22T23:21:40.629ZINFOcrucible-pantry (datafile): volume 1eb9cf2b-fac5-48fd-86a9-e21629057f3e constructed and inserted ok
259082023-09-22T23:21:40.630ZINFOcrucible-pantry (dropshot): request completed latency_us = 40515 local_addr = 127.0.0.1:47396 method = POST remote_addr = 127.0.0.1:39199 req_id = 19e8e4b7-584d-4860-9bb9-d77596a10c8e response_code = 200 uri = /crucible/pantry/0/volume/1eb9cf2b-fac5-48fd-86a9-e21629057f3e
259092023-09-22T23:21:40.630ZINFOcrucible-pantry (dropshot): request completed latency_us = 226 local_addr = 127.0.0.1:47396 method = POST remote_addr = 127.0.0.1:39199 req_id = 40b20567-b3b8-4142-9c18-058bb40bfd73 response_code = 200 uri = /crucible/pantry/0/volume/1eb9cf2b-fac5-48fd-86a9-e21629057f3e/import_from_url
25910 Sep 22 23:21:40.643 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25911 Sep 22 23:21:40.645 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25912 Sep 22 23:21:40.648 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25913 Sep 22 23:21:40.678 INFO Downstairs has completed Negotiation, task: proc
25914 Sep 22 23:21:40.680 INFO Downstairs has completed Negotiation, task: proc
25915 Sep 22 23:21:40.681 INFO Downstairs has completed Negotiation, task: proc
259162023-09-22T23:21:40.682ZINFOcrucible-pantry (datafile): [0] cf566b9e-aff0-4665-9031-98e179d159a4 (e7612fa3-27f9-4d09-a4f3-b6224148140c) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
259172023-09-22T23:21:40.682ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
259182023-09-22T23:21:40.682ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
259192023-09-22T23:21:40.682ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
259202023-09-22T23:21:40.682ZINFOcrucible-pantry (datafile): [1] cf566b9e-aff0-4665-9031-98e179d159a4 (e7612fa3-27f9-4d09-a4f3-b6224148140c) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
259212023-09-22T23:21:40.682ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
259222023-09-22T23:21:40.682ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
259232023-09-22T23:21:40.682ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
259242023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): [2] cf566b9e-aff0-4665-9031-98e179d159a4 (e7612fa3-27f9-4d09-a4f3-b6224148140c) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
259252023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
259262023-09-22T23:21:40.683ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
259272023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
259282023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:40057 task reports connection:true
259292023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): cf566b9e-aff0-4665-9031-98e179d159a4 WaitQuorum WaitQuorum WaitQuorum
259302023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
259312023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
259322023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
259332023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
259342023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
259352023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
259362023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25937 The guest has finished waiting for activation
259382023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
259392023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
259402023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): Max found gen is 1
259412023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
259422023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): Next flush: 1
259432023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): All extents match
259442023-09-22T23:21:40.683ZINFOcrucible-pantry (datafile): No downstairs repair required
259452023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): No initial repair work was required
259462023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
259472023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): cf566b9e-aff0-4665-9031-98e179d159a4 is now active with session: e7612fa3-27f9-4d09-a4f3-b6224148140c
259482023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): cf566b9e-aff0-4665-9031-98e179d159a4 Set Active after no repair
259492023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
259502023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): Set check for repair
259512023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:65451 task reports connection:true
259522023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): cf566b9e-aff0-4665-9031-98e179d159a4 Active Active Active
259532023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): Set check for repair
259542023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:60454 task reports connection:true
259552023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): cf566b9e-aff0-4665-9031-98e179d159a4 Active Active Active
259562023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): Set check for repair
259572023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): [0] received reconcile message
259582023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
259592023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
259602023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): [1] received reconcile message
259612023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
259622023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
259632023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): [2] received reconcile message
259642023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
259652023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
259662023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): volume 3251378f-ea13-40c9-9d37-7f38d07fbf35 activated ok
259672023-09-22T23:21:40.684ZINFOcrucible-pantry (datafile): volume 3251378f-ea13-40c9-9d37-7f38d07fbf35 constructed and inserted ok
259682023-09-22T23:21:40.684ZINFOcrucible-pantry (dropshot): request completed latency_us = 64198 local_addr = 127.0.0.1:52393 method = POST remote_addr = 127.0.0.1:57584 req_id = cc4a97f6-c0bc-4193-a279-a2f075f7f767 response_code = 200 uri = /crucible/pantry/0/volume/3251378f-ea13-40c9-9d37-7f38d07fbf35
259692023-09-22T23:21:40.685ZINFOcrucible-pantry (dropshot): request completed latency_us = 394 local_addr = 127.0.0.1:52393 method = POST remote_addr = 127.0.0.1:57584 req_id = 21ae3786-e768-471e-904a-7755a2d1296d response_code = 200 uri = /crucible/pantry/0/volume/3251378f-ea13-40c9-9d37-7f38d07fbf35/import_from_url
259702023-09-22T23:21:40.700ZINFOcrucible-pantry (dropshot): request completed latency_us = 228 local_addr = 127.0.0.1:47396 method = GET remote_addr = 127.0.0.1:39199 req_id = b91e04c4-0783-4764-86b6-ba3828e8be17 response_code = 200 uri = /crucible/pantry/0/job/e14704dd-f376-471e-a250-3ebae85bb495/is_finished
259712023-09-22T23:21:40.762ZINFOcrucible-pantry (dropshot): request completed latency_us = 234 local_addr = 127.0.0.1:52393 method = GET remote_addr = 127.0.0.1:57584 req_id = 1d1fe7f5-7d19-4477-a4bd-246857fb6569 response_code = 200 uri = /crucible/pantry/0/job/7165c87e-0953-466c-822f-e26bef4d9d7d/is_finished
259722023-09-22T23:21:40.807ZINFOcrucible-pantry (dropshot): request completed latency_us = 223917 local_addr = 127.0.0.1:47828 method = POST remote_addr = 127.0.0.1:35008 req_id = d2b8adcd-68bf-4c7d-a905-f102a2a96cac response_code = 204 uri = /crucible/pantry/0/volume/07fcaddf-9197-4aa4-a6a7-ae8fc7c212ec/bulk_write
259732023-09-22T23:21:40.909ZINFOcrucible-pantry (dropshot): request completed latency_us = 278122 local_addr = 127.0.0.1:46176 method = POST remote_addr = 127.0.0.1:35475 req_id = 1f6a8a20-1e59-4998-ad8a-00c90bab92aa response_code = 204 uri = /crucible/pantry/0/volume/ed764ae9-e3ce-441f-8278-67469b5636e4/bulk_write
259742023-09-22T23:21:40.913ZINFOcrucible-pantry (datafile): detach removing entry for volume ed764ae9-e3ce-441f-8278-67469b5636e4
259752023-09-22T23:21:40.913ZINFOcrucible-pantry (datafile): detaching volume ed764ae9-e3ce-441f-8278-67469b5636e4
25976 Sep 22 23:21:40.916 DEBG Write :1000 deps:[] res:true
25977 Sep 22 23:21:40.920 DEBG Read :1001 deps:[JobId(1000)] res:true
25978 Sep 22 23:21:40.945 DEBG Write :1000 deps:[] res:true
25979 Sep 22 23:21:40.949 DEBG Read :1001 deps:[JobId(1000)] res:true
25980 Sep 22 23:21:40.976 DEBG Write :1000 deps:[] res:true
25981 Sep 22 23:21:40.980 DEBG Read :1001 deps:[JobId(1000)] res:true
25982 Sep 22 23:21:41.005 DEBG Write :1000 deps:[] res:true
25983 Sep 22 23:21:41.021 DEBG Write :1000 deps:[] res:true
25984 Sep 22 23:21:41.037 DEBG Write :1000 deps:[] res:true
25985 Sep 22 23:21:41.042 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
25986 Sep 22 23:21:41.042 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
25987 Sep 22 23:21:41.042 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
259882023-09-22T23:21:41.043ZINFOcrucible-pantry (datafile): Request to deactivate this guest
259892023-09-22T23:21:41.043ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b set deactivating.
259902023-09-22T23:21:41.044ZINFOcrucible-pantry (dropshot): request completed latency_us = 130716 local_addr = 127.0.0.1:46176 method = DELETE remote_addr = 127.0.0.1:35475 req_id = 2de4f42e-7986-4816-831b-385fccf593e4 response_code = 204 uri = /crucible/pantry/0/volume/ed764ae9-e3ce-441f-8278-67469b5636e4
25991 Sep 22 23:21:41.044 INFO Upstairs starts
25992 Sep 22 23:21:41.044 INFO Crucible Version: BuildInfo {
25993 version: "0.0.1",
25994 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
25995 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
25996 git_branch: "main",
25997 rustc_semver: "1.70.0",
25998 rustc_channel: "stable",
25999 rustc_host_triple: "x86_64-unknown-illumos",
26000 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
26001 cargo_triple: "x86_64-unknown-illumos",
26002 debug: true,
26003 opt_level: 0,
26004 }
26005 Sep 22 23:21:41.044 INFO Upstairs <-> Downstairs Message Version: 4
26006 Sep 22 23:21:41.044 INFO Crucible stats registered with UUID: 585127d2-6fce-4638-823a-9e437ec79e5b
26007 Sep 22 23:21:41.044 INFO Crucible 585127d2-6fce-4638-823a-9e437ec79e5b has session id: 1ae42173-6784-460f-b521-f55c07c51ee3
26008 Sep 22 23:21:41.044 INFO [0] connecting to 127.0.0.1:37108, looper: 0
26009 Sep 22 23:21:41.044 INFO [1] connecting to 127.0.0.1:35718, looper: 1
26010 Sep 22 23:21:41.045 INFO [2] connecting to 127.0.0.1:60546, looper: 2
26011 Sep 22 23:21:41.045 INFO up_listen starts, task: up_listen
26012 Sep 22 23:21:41.045 INFO Wait for all three downstairs to come online
26013 Sep 22 23:21:41.045 INFO Flush timeout: 0.5
26014 Sep 22 23:21:41.045 INFO accepted connection from 127.0.0.1:58406, task: main
26015 Sep 22 23:21:41.045 INFO accepted connection from 127.0.0.1:59580, task: main
26016 Sep 22 23:21:41.045 INFO [0] 585127d2-6fce-4638-823a-9e437ec79e5b looper connected, looper: 0
26017 Sep 22 23:21:41.045 INFO [0] Proc runs for 127.0.0.1:37108 in state New
26018 Sep 22 23:21:41.045 INFO [1] 585127d2-6fce-4638-823a-9e437ec79e5b looper connected, looper: 1
26019 Sep 22 23:21:41.045 INFO [1] Proc runs for 127.0.0.1:35718 in state New
26020 Sep 22 23:21:41.045 INFO [2] 585127d2-6fce-4638-823a-9e437ec79e5b looper connected, looper: 2
26021 Sep 22 23:21:41.045 INFO [2] Proc runs for 127.0.0.1:60546 in state New
26022 Sep 22 23:21:41.045 INFO accepted connection from 127.0.0.1:43230, task: main
26023 Sep 22 23:21:41.045 INFO Connection request from 585127d2-6fce-4638-823a-9e437ec79e5b with version 4, task: proc
26024 Sep 22 23:21:41.045 INFO upstairs UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 } connected, version 4, task: proc
26025 Sep 22 23:21:41.046 INFO Connection request from 585127d2-6fce-4638-823a-9e437ec79e5b with version 4, task: proc
26026 Sep 22 23:21:41.046 INFO upstairs UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 } connected, version 4, task: proc
26027 Sep 22 23:21:41.046 INFO Connection request from 585127d2-6fce-4638-823a-9e437ec79e5b with version 4, task: proc
26028 Sep 22 23:21:41.046 INFO upstairs UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 } connected, version 4, task: proc
26029 Sep 22 23:21:41.046 INFO [0] 585127d2-6fce-4638-823a-9e437ec79e5b (32007434-41a4-434b-a63f-6381f23a145d) New New New ds_transition to WaitActive
26030 Sep 22 23:21:41.046 INFO [0] Transition from New to WaitActive
26031 Sep 22 23:21:41.046 INFO [1] 585127d2-6fce-4638-823a-9e437ec79e5b (32007434-41a4-434b-a63f-6381f23a145d) WaitActive New New ds_transition to WaitActive
26032 Sep 22 23:21:41.046 INFO [1] Transition from New to WaitActive
26033 Sep 22 23:21:41.046 INFO [2] 585127d2-6fce-4638-823a-9e437ec79e5b (32007434-41a4-434b-a63f-6381f23a145d) WaitActive WaitActive New ds_transition to WaitActive
26034 Sep 22 23:21:41.046 INFO [2] Transition from New to WaitActive
26035 The guest has requested activation
26036 Sep 22 23:21:41.046 INFO 585127d2-6fce-4638-823a-9e437ec79e5b active request set
26037 Sep 22 23:21:41.046 INFO [0] received activate with gen 2
26038 Sep 22 23:21:41.046 INFO [0] client got ds_active_rx, promote! session 32007434-41a4-434b-a63f-6381f23a145d
26039 Sep 22 23:21:41.046 INFO [1] received activate with gen 2
26040 Sep 22 23:21:41.046 INFO [1] client got ds_active_rx, promote! session 32007434-41a4-434b-a63f-6381f23a145d
26041 Sep 22 23:21:41.046 INFO [2] received activate with gen 2
26042 Sep 22 23:21:41.046 INFO [2] client got ds_active_rx, promote! session 32007434-41a4-434b-a63f-6381f23a145d
26043 Sep 22 23:21:41.046 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 } to UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 }
26044 Sep 22 23:21:41.046 WARN Signaling to UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 } thread that UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 } is being promoted (read-write)
26045 Sep 22 23:21:41.047 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 } to UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 }
26046 Sep 22 23:21:41.047 WARN Signaling to UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 } thread that UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 } is being promoted (read-write)
26047 Sep 22 23:21:41.047 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 } to UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 }
26048 Sep 22 23:21:41.047 WARN Signaling to UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 } thread that UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 } is being promoted (read-write)
26049 Sep 22 23:21:41.047 WARN Another upstairs UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 }, task: main
26050 Sep 22 23:21:41.047 INFO UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 } is now active (read-write)
26051 Sep 22 23:21:41.047 WARN Another upstairs UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 }, task: main
26052 Sep 22 23:21:41.047 INFO UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 } is now active (read-write)
26053 Sep 22 23:21:41.047 WARN Another upstairs UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 3a227d09-1b16-48c0-8c37-0e4189fb3e80, gen: 1 }, task: main
26054 Sep 22 23:21:41.048 INFO UpstairsConnection { upstairs_id: 585127d2-6fce-4638-823a-9e437ec79e5b, session_id: 32007434-41a4-434b-a63f-6381f23a145d, gen: 2 } is now active (read-write)
26055 Sep 22 23:21:41.048 INFO connection (127.0.0.1:43703): all done
26056 Sep 22 23:21:41.048 INFO connection (127.0.0.1:60507): all done
26057 Sep 22 23:21:41.048 INFO connection (127.0.0.1:49103): all done
260582023-09-22T23:21:41.048ZERROcrucible-pantry (datafile): [0] 585127d2-6fce-4638-823a-9e437ec79e5b (3a227d09-1b16-48c0-8c37-0e4189fb3e80) cmd_loop saw YouAreNoLongerActive 585127d2-6fce-4638-823a-9e437ec79e5b 32007434-41a4-434b-a63f-6381f23a145d 2
260592023-09-22T23:21:41.048ZINFOcrucible-pantry (datafile): [0] 585127d2-6fce-4638-823a-9e437ec79e5b (3a227d09-1b16-48c0-8c37-0e4189fb3e80) Active Active Active ds_transition to Disabled
260602023-09-22T23:21:41.048ZINFOcrucible-pantry (datafile): [0] Transition from Active to Disabled
260612023-09-22T23:21:41.048ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b set inactive, session 3a227d09-1b16-48c0-8c37-0e4189fb3e80
260622023-09-22T23:21:41.048ZERROcrucible-pantry (datafile): 127.0.0.1:37108: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 0
260632023-09-22T23:21:41.048ZINFOcrucible-pantry (datafile): [0] 585127d2-6fce-4638-823a-9e437ec79e5b Gone missing, transition from Disabled to Disconnected
260642023-09-22T23:21:41.048ZINFOcrucible-pantry (datafile): [0] 585127d2-6fce-4638-823a-9e437ec79e5b connection to 127.0.0.1:37108 closed looper = 0
260652023-09-22T23:21:41.048ZERROcrucible-pantry (datafile): [1] 585127d2-6fce-4638-823a-9e437ec79e5b (3a227d09-1b16-48c0-8c37-0e4189fb3e80) cmd_loop saw YouAreNoLongerActive 585127d2-6fce-4638-823a-9e437ec79e5b 32007434-41a4-434b-a63f-6381f23a145d 2
260662023-09-22T23:21:41.048ZINFOcrucible-pantry (datafile): [1] 585127d2-6fce-4638-823a-9e437ec79e5b (3a227d09-1b16-48c0-8c37-0e4189fb3e80) Disconnected Active Active ds_transition to Disabled
260672023-09-22T23:21:41.048ZINFOcrucible-pantry (datafile): [1] Transition from Active to Disabled
260682023-09-22T23:21:41.048ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b set inactive, session 3a227d09-1b16-48c0-8c37-0e4189fb3e80
260692023-09-22T23:21:41.048ZERROcrucible-pantry (datafile): 127.0.0.1:35718: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 1
260702023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): [1] 585127d2-6fce-4638-823a-9e437ec79e5b Gone missing, transition from Disabled to Disconnected
260712023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): [1] 585127d2-6fce-4638-823a-9e437ec79e5b connection to 127.0.0.1:35718 closed looper = 1
260722023-09-22T23:21:41.049ZERROcrucible-pantry (datafile): [2] 585127d2-6fce-4638-823a-9e437ec79e5b (3a227d09-1b16-48c0-8c37-0e4189fb3e80) cmd_loop saw YouAreNoLongerActive 585127d2-6fce-4638-823a-9e437ec79e5b 32007434-41a4-434b-a63f-6381f23a145d 2
260732023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): [2] 585127d2-6fce-4638-823a-9e437ec79e5b (3a227d09-1b16-48c0-8c37-0e4189fb3e80) Disconnected Disconnected Active ds_transition to Disabled
260742023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): [2] Transition from Active to Disabled
260752023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b set inactive, session 3a227d09-1b16-48c0-8c37-0e4189fb3e80
260762023-09-22T23:21:41.049ZERROcrucible-pantry (datafile): 127.0.0.1:60546: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 2
260772023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): [2] 585127d2-6fce-4638-823a-9e437ec79e5b Gone missing, transition from Disabled to Disconnected
260782023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): [2] 585127d2-6fce-4638-823a-9e437ec79e5b connection to 127.0.0.1:60546 closed looper = 2
260792023-09-22T23:21:41.049ZWARNcrucible-pantry (datafile): [0] pm_task rx.recv() is None
260802023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:37108 task reports connection:false
260812023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b Disconnected Disconnected Disconnected
260822023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:37108 task reports offline
26083 Sep 22 23:21:41.049 INFO [0] downstairs client at 127.0.0.1:37108 has UUID aa05d4f9-364f-4612-a579-613b38785c79
26084 Sep 22 23:21:41.049 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: aa05d4f9-364f-4612-a579-613b38785c79, encrypted: true, database_read_version: 1, database_write_version: 1 }
260852023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:35718 task reports connection:false
26086 Sep 22 23:21:41.049 INFO 585127d2-6fce-4638-823a-9e437ec79e5b WaitActive WaitActive WaitActive
260872023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b Disconnected Disconnected Disconnected
260882023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:35718 task reports offline
26089 Sep 22 23:21:41.049 INFO [1] downstairs client at 127.0.0.1:35718 has UUID 9c7d4f0a-db95-4538-bb7e-7b1ed1936555
26090 Sep 22 23:21:41.049 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 9c7d4f0a-db95-4538-bb7e-7b1ed1936555, encrypted: true, database_read_version: 1, database_write_version: 1 }
260912023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:60546 task reports connection:false
26092 Sep 22 23:21:41.049 INFO 585127d2-6fce-4638-823a-9e437ec79e5b WaitActive WaitActive WaitActive
260932023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): 585127d2-6fce-4638-823a-9e437ec79e5b Disconnected Disconnected Disconnected
260942023-09-22T23:21:41.049ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:60546 task reports offline
26095 Sep 22 23:21:41.049 INFO [2] downstairs client at 127.0.0.1:60546 has UUID b63118d3-af4f-496f-b02c-9f9b92d23429
26096 Sep 22 23:21:41.049 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: b63118d3-af4f-496f-b02c-9f9b92d23429, encrypted: true, database_read_version: 1, database_write_version: 1 }
260972023-09-22T23:21:41.049ZWARNcrucible-pantry (datafile): [1] pm_task rx.recv() is None
26098 Sep 22 23:21:41.049 INFO 585127d2-6fce-4638-823a-9e437ec79e5b WaitActive WaitActive WaitActive
260992023-09-22T23:21:41.049ZWARNcrucible-pantry (datafile): [2] pm_task rx.recv() is None
26100 Sep 22 23:21:41.057 INFO Current flush_numbers [0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
26101 Sep 22 23:21:41.058 INFO Current flush_numbers [0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
26102 Sep 22 23:21:41.059 INFO Current flush_numbers [0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
26103 Sep 22 23:21:41.072 INFO Downstairs has completed Negotiation, task: proc
26104 Sep 22 23:21:41.073 INFO Downstairs has completed Negotiation, task: proc
26105 Sep 22 23:21:41.074 INFO Downstairs has completed Negotiation, task: proc
26106 Sep 22 23:21:41.074 INFO [0] 585127d2-6fce-4638-823a-9e437ec79e5b (32007434-41a4-434b-a63f-6381f23a145d) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
26107 Sep 22 23:21:41.074 INFO [0] Transition from WaitActive to WaitQuorum
26108 Sep 22 23:21:41.074 WARN [0] new RM replaced this: None
26109 Sep 22 23:21:41.074 INFO [0] Starts reconcile loop
26110 Sep 22 23:21:41.074 INFO [1] 585127d2-6fce-4638-823a-9e437ec79e5b (32007434-41a4-434b-a63f-6381f23a145d) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
26111 Sep 22 23:21:41.075 INFO [1] Transition from WaitActive to WaitQuorum
26112 Sep 22 23:21:41.075 WARN [1] new RM replaced this: None
26113 Sep 22 23:21:41.075 INFO [1] Starts reconcile loop
26114 Sep 22 23:21:41.075 INFO [2] 585127d2-6fce-4638-823a-9e437ec79e5b (32007434-41a4-434b-a63f-6381f23a145d) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
26115 Sep 22 23:21:41.075 INFO [2] Transition from WaitActive to WaitQuorum
26116 Sep 22 23:21:41.075 WARN [2] new RM replaced this: None
26117 Sep 22 23:21:41.075 INFO [2] Starts reconcile loop
26118 Sep 22 23:21:41.075 INFO [0] 127.0.0.1:37108 task reports connection:true
26119 Sep 22 23:21:41.075 INFO 585127d2-6fce-4638-823a-9e437ec79e5b WaitQuorum WaitQuorum WaitQuorum
26120 Sep 22 23:21:41.075 INFO [0]R flush_numbers[0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
26121 Sep 22 23:21:41.075 INFO [0]R generation[0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
26122 Sep 22 23:21:41.075 INFO [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
26123 Sep 22 23:21:41.075 INFO [1]R flush_numbers[0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
26124 Sep 22 23:21:41.075 INFO [1]R generation[0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
26125 Sep 22 23:21:41.075 INFO [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
26126 Sep 22 23:21:41.075 INFO [2]R flush_numbers[0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
26127 Sep 22 23:21:41.075 INFO [2]R generation[0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
26128 Sep 22 23:21:41.075 INFO [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
26129 Sep 22 23:21:41.075 INFO Max found gen is 2
26130 Sep 22 23:21:41.075 INFO Generation requested: 2 >= found:2
26131 Sep 22 23:21:41.075 INFO Next flush: 2
26132 Sep 22 23:21:41.075 INFO All extents match
26133 Sep 22 23:21:41.075 INFO No downstairs repair required
26134 Sep 22 23:21:41.075 INFO No initial repair work was required
26135 Sep 22 23:21:41.075 INFO Set Downstairs and Upstairs active
26136 Sep 22 23:21:41.075 INFO 585127d2-6fce-4638-823a-9e437ec79e5b is now active with session: 32007434-41a4-434b-a63f-6381f23a145d
26137 Sep 22 23:21:41.075 INFO 585127d2-6fce-4638-823a-9e437ec79e5b Set Active after no repair
26138 Sep 22 23:21:41.075 INFO Notify all downstairs, region set compare is done.
26139 Sep 22 23:21:41.075 INFO Set check for repair
26140 Sep 22 23:21:41.075 INFO [1] 127.0.0.1:35718 task reports connection:true
26141 Sep 22 23:21:41.075 INFO 585127d2-6fce-4638-823a-9e437ec79e5b Active Active Active
26142 Sep 22 23:21:41.075 INFO Set check for repair
26143 Sep 22 23:21:41.075 INFO [2] 127.0.0.1:60546 task reports connection:true
26144 Sep 22 23:21:41.075 INFO 585127d2-6fce-4638-823a-9e437ec79e5b Active Active Active
26145 Sep 22 23:21:41.075 INFO Set check for repair
26146 Sep 22 23:21:41.075 INFO [0] received reconcile message
26147 Sep 22 23:21:41.075 INFO [0] All repairs completed, exit
26148 Sep 22 23:21:41.075 INFO [0] Starts cmd_loop
26149 Sep 22 23:21:41.075 INFO [1] received reconcile message
26150 Sep 22 23:21:41.075 INFO [1] All repairs completed, exit
26151 Sep 22 23:21:41.075 INFO [1] Starts cmd_loop
26152 Sep 22 23:21:41.076 INFO [2] received reconcile message
26153 Sep 22 23:21:41.076 INFO [2] All repairs completed, exit
26154 Sep 22 23:21:41.076 INFO [2] Starts cmd_loop
26155 The guest has finished waiting for activation
26156 Sep 22 23:21:41.077 DEBG IO Read 1000 has deps []
26157 Sep 22 23:21:41.087 DEBG Read :1000 deps:[] res:true
26158 Sep 22 23:21:41.101 DEBG Read :1000 deps:[] res:true
26159 Sep 22 23:21:41.120 DEBG Read :1000 deps:[] res:true
26160 Sep 22 23:21:41.426 DEBG [0] Read AckReady 1000, : downstairs
26161 Sep 22 23:21:41.591 DEBG [1] Read already AckReady 1000, : downstairs
261622023-09-22T23:21:41.684ZINFOcrucible-pantry (datafile): Checking if live repair is needed
261632023-09-22T23:21:41.684ZINFOcrucible-pantry (datafile): No Live Repair required at this time
261642023-09-22T23:21:41.702ZINFOcrucible-pantry (datafile): Checking if live repair is needed
261652023-09-22T23:21:41.702ZINFOcrucible-pantry (datafile): No Live Repair required at this time
261662023-09-22T23:21:41.703ZINFOcrucible-pantry (dropshot): request completed latency_us = 385 local_addr = 127.0.0.1:47396 method = GET remote_addr = 127.0.0.1:39199 req_id = 748e1c09-92e3-4fba-b7d0-a9eaa2c05c5d response_code = 200 uri = /crucible/pantry/0/job/e14704dd-f376-471e-a250-3ebae85bb495/is_finished
26167 Sep 22 23:21:41.725 WARN f712941d-0577-43e4-8a2e-7814ec270c09 request to replace downstairs 127.0.0.1:46295 with 127.0.0.1:47135
26168 Sep 22 23:21:41.725 INFO f712941d-0577-43e4-8a2e-7814ec270c09 found new target: 127.0.0.1:47135 at 0
26169 Sep 22 23:21:41.725 INFO Downstairs replacement completed
26170 Sep 22 23:21:41.725 DEBG IO Read 1011 has deps []
26171 Sep 22 23:21:41.726 DEBG Read :1011 deps:[] res:true
26172 Sep 22 23:21:41.727 DEBG Read :1011 deps:[] res:true
26173 Sep 22 23:21:41.727 DEBG Read :1011 deps:[] res:true
26174 Sep 22 23:21:41.732 DEBG [1] Read AckReady 1011, : downstairs
26175 Sep 22 23:21:41.735 DEBG [2] Read already AckReady 1011, : downstairs
26176 Sep 22 23:21:41.738 DEBG [0] Read already AckReady 1011, : downstairs
26177 Sep 22 23:21:41.738 DEBG up_ds_listen was notified
26178 Sep 22 23:21:41.738 DEBG up_ds_listen process 1011
26179 Sep 22 23:21:41.738 DEBG [A] ack job 1011:12, : downstairs
26180 Sep 22 23:21:41.739 DEBG up_ds_listen checked 1 jobs, back to waiting
26181 test test::integration_test_guest_replace_downstairs ... ok
26182 Sep 22 23:21:41.756 DEBG [2] Read already AckReady 1000, : downstairs
26183 Sep 22 23:21:41.757 DEBG up_ds_listen was notified
26184 Sep 22 23:21:41.757 DEBG up_ds_listen process 1000
26185 Sep 22 23:21:41.757 DEBG [A] ack job 1000:1, : downstairs
261862023-09-22T23:21:41.764ZINFOcrucible-pantry (dropshot): request completed latency_us = 315 local_addr = 127.0.0.1:52393 method = GET remote_addr = 127.0.0.1:57584 req_id = 45f054bd-5ad6-433f-a9e6-0701595805cf response_code = 200 uri = /crucible/pantry/0/job/7165c87e-0953-466c-822f-e26bef4d9d7d/is_finished
261872023-09-22T23:21:41.767ZINFOcrucible-pantry (dropshot): request completed latency_us = 956875 local_addr = 127.0.0.1:47828 method = POST remote_addr = 127.0.0.1:35008 req_id = 5dc0da9e-7211-4ba5-afda-0f61e6fc3dad response_code = 200 uri = /crucible/pantry/0/volume/07fcaddf-9197-4aa4-a6a7-ae8fc7c212ec/bulk_read
261882023-09-22T23:21:41.767ZINFOcrucible-pantry (datafile): Checking if live repair is needed
261892023-09-22T23:21:41.767ZINFOcrucible-pantry (datafile): No Live Repair required at this time
26190 Sep 22 23:21:41.776 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
26191 Sep 22 23:21:41.777 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
26192 Sep 22 23:21:41.777 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
26193 Sep 22 23:21:41.785 DEBG up_ds_listen checked 1 jobs, back to waiting
261942023-09-22T23:21:41.802ZINFOcrucible-pantry (datafile): detach removing entry for volume 07fcaddf-9197-4aa4-a6a7-ae8fc7c212ec
261952023-09-22T23:21:41.802ZINFOcrucible-pantry (datafile): detaching volume 07fcaddf-9197-4aa4-a6a7-ae8fc7c212ec
26196 Sep 22 23:21:41.803 DEBG Flush :1003 extent_limit None deps:[] res:true f:2 g:1
26197 Sep 22 23:21:41.803 DEBG Flush :1003 extent_limit None deps:[] res:true f:2 g:1
26198 Sep 22 23:21:41.803 DEBG Flush :1003 extent_limit None deps:[] res:true f:2 g:1
261992023-09-22T23:21:41.804ZINFOcrucible-pantry (datafile): Request to deactivate this guest
262002023-09-22T23:21:41.804ZINFOcrucible-pantry (datafile): 6d97a177-f79d-4abd-9fa5-a1779824805b set deactivating.
262012023-09-22T23:21:41.804ZINFOcrucible-pantry (dropshot): request completed latency_us = 2098 local_addr = 127.0.0.1:47828 method = DELETE remote_addr = 127.0.0.1:35008 req_id = 580bc9aa-16aa-4105-ac00-971b48886428 response_code = 204 uri = /crucible/pantry/0/volume/07fcaddf-9197-4aa4-a6a7-ae8fc7c212ec
26202 Sep 22 23:21:41.811 INFO Checking if live repair is needed
26203 Sep 22 23:21:41.811 INFO No Live Repair required at this time
26204 Sep 22 23:21:41.844 DEBG Write :1000 deps:[] res:true
26205 Sep 22 23:21:41.869 DEBG Write :1000 deps:[] res:true
26206 Sep 22 23:21:41.894 DEBG Write :1000 deps:[] res:true
26207 Sep 22 23:21:41.907 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
26208 Sep 22 23:21:41.907 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
26209 Sep 22 23:21:41.907 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
26210 test test::test_pantry_bulk_write_max_chunk_size ... ok
26211 Sep 22 23:21:41.944 INFO current number of open files limit 65536 is already the maximum
26212 Sep 22 23:21:41.944 INFO Created new region file "/tmp/downstairs-we6eLCZU/region.json"
26213 Sep 22 23:21:41.948 INFO current number of open files limit 65536 is already the maximum
26214 Sep 22 23:21:41.948 INFO Opened existing region file "/tmp/downstairs-we6eLCZU/region.json"
26215 Sep 22 23:21:41.948 INFO Database read version 1
26216 Sep 22 23:21:41.948 INFO Database write version 1
26217 Sep 22 23:21:41.949 INFO UUID: 5bd06377-7334-447c-a337-f5f05f8d6c8d
26218 Sep 22 23:21:41.949 INFO Blocks per extent:5 Total Extents: 2
26219 Sep 22 23:21:41.949 INFO Crucible Version: Crucible Version: 0.0.1
26220 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26221 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26222 rustc: 1.70.0 stable x86_64-unknown-illumos
26223 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26224 Sep 22 23:21:41.949 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26225 Sep 22 23:21:41.949 INFO Using address: 127.0.0.1:56105, task: main
26226 Sep 22 23:21:41.950 INFO Repair listens on 127.0.0.1:0, task: repair
26227 Sep 22 23:21:41.950 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53964, task: repair
26228 Sep 22 23:21:41.950 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53964, task: repair
26229 Sep 22 23:21:41.950 INFO listening, local_addr: 127.0.0.1:53964, task: repair
26230 Sep 22 23:21:41.950 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53964, task: repair
26231 Sep 22 23:21:41.950 INFO Using repair address: 127.0.0.1:53964, task: main
26232 Sep 22 23:21:41.950 INFO No SSL acceptor configured, task: main
26233 Sep 22 23:21:41.950 INFO current number of open files limit 65536 is already the maximum
26234 Sep 22 23:21:41.950 INFO Created new region file "/tmp/downstairs-2o9xo501/region.json"
26235 Sep 22 23:21:41.954 INFO current number of open files limit 65536 is already the maximum
26236 Sep 22 23:21:41.954 INFO Opened existing region file "/tmp/downstairs-2o9xo501/region.json"
26237 Sep 22 23:21:41.954 INFO Database read version 1
26238 Sep 22 23:21:41.954 INFO Database write version 1
26239 Sep 22 23:21:41.955 INFO UUID: 9bb0c711-bf0b-4c97-9224-93a1bd568273
26240 Sep 22 23:21:41.955 INFO Blocks per extent:5 Total Extents: 2
26241 Sep 22 23:21:41.956 INFO Crucible Version: Crucible Version: 0.0.1
26242 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26243 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26244 rustc: 1.70.0 stable x86_64-unknown-illumos
26245 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26246 Sep 22 23:21:41.956 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26247 Sep 22 23:21:41.956 INFO Using address: 127.0.0.1:34504, task: main
26248 Sep 22 23:21:41.956 INFO Repair listens on 127.0.0.1:0, task: repair
26249 Sep 22 23:21:41.956 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51030, task: repair
26250 Sep 22 23:21:41.956 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51030, task: repair
26251 Sep 22 23:21:41.956 INFO listening, local_addr: 127.0.0.1:51030, task: repair
26252 Sep 22 23:21:41.956 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51030, task: repair
26253 Sep 22 23:21:41.956 INFO Using repair address: 127.0.0.1:51030, task: main
26254 Sep 22 23:21:41.956 INFO No SSL acceptor configured, task: main
26255 Sep 22 23:21:41.956 INFO current number of open files limit 65536 is already the maximum
26256 Sep 22 23:21:41.957 INFO Created new region file "/tmp/downstairs-QafMFymO/region.json"
26257 Sep 22 23:21:41.960 INFO current number of open files limit 65536 is already the maximum
26258 Sep 22 23:21:41.960 INFO Opened existing region file "/tmp/downstairs-QafMFymO/region.json"
26259 Sep 22 23:21:41.960 INFO Database read version 1
26260 Sep 22 23:21:41.960 INFO Database write version 1
26261 Sep 22 23:21:41.961 INFO UUID: 0a385deb-9c37-4b6c-9838-b74475ae3131
26262 Sep 22 23:21:41.961 INFO Blocks per extent:5 Total Extents: 2
26263 Sep 22 23:21:41.961 INFO Crucible Version: Crucible Version: 0.0.1
26264 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26265 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26266 rustc: 1.70.0 stable x86_64-unknown-illumos
26267 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26268 Sep 22 23:21:41.961 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26269 Sep 22 23:21:41.961 INFO Using address: 127.0.0.1:33240, task: main
26270 Sep 22 23:21:41.962 INFO Repair listens on 127.0.0.1:0, task: repair
26271 Sep 22 23:21:41.962 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:62996, task: repair
26272 Sep 22 23:21:41.962 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:62996, task: repair
26273 Sep 22 23:21:41.962 INFO listening, local_addr: 127.0.0.1:62996, task: repair
26274 Sep 22 23:21:41.962 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:62996, task: repair
26275 Sep 22 23:21:41.962 INFO Using repair address: 127.0.0.1:62996, task: main
26276 Sep 22 23:21:41.962 INFO No SSL acceptor configured, task: main
26277 note: configured to log to "/dev/stdout"
262782023-09-22T23:21:41.964ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:58561
262792023-09-22T23:21:41.964ZINFOcrucible-pantry: listen IP: 127.0.0.1:58561
26280 test test::test_pantry_bulk_read_max_chunk_size ... ok
26281 Sep 22 23:21:41.978 INFO current number of open files limit 65536 is already the maximum
26282 Sep 22 23:21:41.979 INFO Created new region file "/tmp/downstairs-m4GfdQ6u/region.json"
26283 Sep 22 23:21:41.981 INFO current number of open files limit 65536 is already the maximum
26284 Sep 22 23:21:41.981 INFO Opened existing region file "/tmp/downstairs-m4GfdQ6u/region.json"
26285 Sep 22 23:21:41.981 INFO Database read version 1
26286 Sep 22 23:21:41.981 INFO Database write version 1
26287 Sep 22 23:21:41.981 INFO UUID: 1dc6ff1a-e3e5-49bb-96af-2f8e2384da86
26288 Sep 22 23:21:41.981 INFO Blocks per extent:5 Total Extents: 2
26289 Sep 22 23:21:41.981 INFO Crucible Version: Crucible Version: 0.0.1
26290 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26291 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26292 rustc: 1.70.0 stable x86_64-unknown-illumos
26293 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26294 Sep 22 23:21:41.981 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26295 Sep 22 23:21:41.981 INFO Using address: 127.0.0.1:40394, task: main
26296 Sep 22 23:21:41.982 INFO Repair listens on 127.0.0.1:0, task: repair
26297 Sep 22 23:21:41.982 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:42941, task: repair
26298 Sep 22 23:21:41.982 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:42941, task: repair
26299 Sep 22 23:21:41.982 INFO listening, local_addr: 127.0.0.1:42941, task: repair
26300 Sep 22 23:21:41.982 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:42941, task: repair
26301 Sep 22 23:21:41.982 INFO Using repair address: 127.0.0.1:42941, task: main
26302 Sep 22 23:21:41.982 INFO No SSL acceptor configured, task: main
26303 Sep 22 23:21:41.982 INFO current number of open files limit 65536 is already the maximum
26304 Sep 22 23:21:41.982 INFO Created new region file "/tmp/downstairs-snkVG7mj/region.json"
26305 Sep 22 23:21:41.984 INFO current number of open files limit 65536 is already the maximum
26306 Sep 22 23:21:41.984 INFO Opened existing region file "/tmp/downstairs-snkVG7mj/region.json"
26307 Sep 22 23:21:41.984 INFO Database read version 1
26308 Sep 22 23:21:41.984 INFO Database write version 1
26309 Sep 22 23:21:41.985 INFO UUID: 6b825ea1-626e-494d-9a24-d66b39d14dbc
26310 Sep 22 23:21:41.985 INFO Blocks per extent:5 Total Extents: 2
26311 Sep 22 23:21:41.985 INFO Crucible Version: Crucible Version: 0.0.1
26312 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26313 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26314 rustc: 1.70.0 stable x86_64-unknown-illumos
26315 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26316 Sep 22 23:21:41.985 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26317 Sep 22 23:21:41.985 INFO Using address: 127.0.0.1:44812, task: main
26318 Sep 22 23:21:41.985 INFO Repair listens on 127.0.0.1:0, task: repair
26319 Sep 22 23:21:41.985 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:47303, task: repair
26320 Sep 22 23:21:41.985 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:47303, task: repair
26321 Sep 22 23:21:41.985 INFO listening, local_addr: 127.0.0.1:47303, task: repair
26322 Sep 22 23:21:41.985 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:47303, task: repair
26323 Sep 22 23:21:41.985 INFO Using repair address: 127.0.0.1:47303, task: main
26324 Sep 22 23:21:41.985 INFO No SSL acceptor configured, task: main
26325 Sep 22 23:21:41.985 INFO current number of open files limit 65536 is already the maximum
26326 Sep 22 23:21:41.986 INFO Created new region file "/tmp/downstairs-WgIExdEe/region.json"
26327 Sep 22 23:21:41.988 INFO current number of open files limit 65536 is already the maximum
26328 Sep 22 23:21:41.988 INFO Opened existing region file "/tmp/downstairs-WgIExdEe/region.json"
26329 Sep 22 23:21:41.988 INFO Database read version 1
26330 Sep 22 23:21:41.988 INFO Database write version 1
26331 Sep 22 23:21:41.988 INFO UUID: 19cc35ab-e1e8-4b0e-9882-b11c9bb545fa
26332 Sep 22 23:21:41.988 INFO Blocks per extent:5 Total Extents: 2
26333 Sep 22 23:21:41.988 INFO Crucible Version: Crucible Version: 0.0.1
26334 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26335 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26336 rustc: 1.70.0 stable x86_64-unknown-illumos
26337 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26338 Sep 22 23:21:41.988 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26339 Sep 22 23:21:41.988 INFO Using address: 127.0.0.1:50704, task: main
26340 Sep 22 23:21:41.989 INFO Repair listens on 127.0.0.1:0, task: repair
26341 Sep 22 23:21:41.989 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:62111, task: repair
26342 Sep 22 23:21:41.989 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:62111, task: repair
26343 Sep 22 23:21:41.989 INFO listening, local_addr: 127.0.0.1:62111, task: repair
26344 Sep 22 23:21:41.989 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:62111, task: repair
26345 Sep 22 23:21:41.989 INFO Using repair address: 127.0.0.1:62111, task: main
26346 Sep 22 23:21:41.989 INFO No SSL acceptor configured, task: main
26347 note: configured to log to "/dev/stdout"
263482023-09-22T23:21:41.990ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:63226
263492023-09-22T23:21:41.990ZINFOcrucible-pantry: listen IP: 127.0.0.1:63226
26350 Sep 22 23:21:41.991 WARN 52fe8d40-333d-4b64-8663-2adf0476947f request to replace downstairs 127.0.0.1:38958 with 127.0.0.1:62901
26351 Sep 22 23:21:41.991 INFO 52fe8d40-333d-4b64-8663-2adf0476947f found new target: 127.0.0.1:62901 at 0
26352 Downstairs replacement completed
26353 Sep 22 23:21:41.991 DEBG IO Read 1011 has deps []
26354 Sep 22 23:21:41.992 DEBG Read :1011 deps:[] res:true
26355 Sep 22 23:21:41.993 DEBG Read :1011 deps:[] res:true
26356 Sep 22 23:21:41.993 DEBG Read :1011 deps:[] res:true
26357 Sep 22 23:21:41.998 DEBG [1] Read AckReady 1011, : downstairs
26358 Sep 22 23:21:42.001 DEBG [2] Read already AckReady 1011, : downstairs
26359 Sep 22 23:21:42.004 DEBG [0] Read already AckReady 1011, : downstairs
26360 Sep 22 23:21:42.004 DEBG up_ds_listen was notified
26361 Sep 22 23:21:42.004 DEBG up_ds_listen process 1011
26362 Sep 22 23:21:42.004 DEBG [A] ack job 1011:12, : downstairs
26363 Sep 22 23:21:42.004 DEBG up_ds_listen checked 1 jobs, back to waiting
26364 test test::integration_test_volume_replace_downstairs ... ok
26365 Sep 22 23:21:42.011 INFO current number of open files limit 65536 is already the maximum
26366 Sep 22 23:21:42.011 INFO Created new region file "/tmp/downstairs-9LeQywgd/region.json"
26367 Sep 22 23:21:42.014 INFO current number of open files limit 65536 is already the maximum
26368 Sep 22 23:21:42.014 INFO Opened existing region file "/tmp/downstairs-9LeQywgd/region.json"
26369 Sep 22 23:21:42.014 INFO Database read version 1
26370 Sep 22 23:21:42.014 INFO Database write version 1
26371 Sep 22 23:21:42.015 INFO UUID: 981e485a-3d34-445d-9adb-f9e13ff6f329
26372 Sep 22 23:21:42.015 INFO Blocks per extent:5 Total Extents: 2
26373 Sep 22 23:21:42.015 INFO Crucible Version: Crucible Version: 0.0.1
26374 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26375 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26376 rustc: 1.70.0 stable x86_64-unknown-illumos
26377 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26378 Sep 22 23:21:42.015 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26379 Sep 22 23:21:42.015 INFO Using address: 127.0.0.1:42682, task: main
26380 Sep 22 23:21:42.016 INFO Repair listens on 127.0.0.1:0, task: repair
26381 Sep 22 23:21:42.016 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:60320, task: repair
26382 Sep 22 23:21:42.016 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:60320, task: repair
26383 Sep 22 23:21:42.016 INFO listening, local_addr: 127.0.0.1:60320, task: repair
26384 Sep 22 23:21:42.016 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:60320, task: repair
26385 Sep 22 23:21:42.016 INFO Using repair address: 127.0.0.1:60320, task: main
26386 Sep 22 23:21:42.016 INFO No SSL acceptor configured, task: main
26387 Sep 22 23:21:42.016 INFO current number of open files limit 65536 is already the maximum
26388 Sep 22 23:21:42.016 INFO Created new region file "/tmp/downstairs-KFBQqDry/region.json"
26389 Sep 22 23:21:42.019 INFO current number of open files limit 65536 is already the maximum
26390 Sep 22 23:21:42.019 INFO Opened existing region file "/tmp/downstairs-KFBQqDry/region.json"
26391 Sep 22 23:21:42.019 INFO Database read version 1
26392 Sep 22 23:21:42.019 INFO Database write version 1
26393 Sep 22 23:21:42.020 INFO UUID: c77d45f9-2dd1-4698-958d-b4ab9ce4ffee
26394 Sep 22 23:21:42.020 INFO Blocks per extent:5 Total Extents: 2
26395 Sep 22 23:21:42.020 INFO Crucible Version: Crucible Version: 0.0.1
26396 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26397 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26398 rustc: 1.70.0 stable x86_64-unknown-illumos
26399 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26400 Sep 22 23:21:42.020 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26401 Sep 22 23:21:42.020 INFO Using address: 127.0.0.1:61724, task: main
26402 Sep 22 23:21:42.020 INFO Repair listens on 127.0.0.1:0, task: repair
26403 Sep 22 23:21:42.020 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:54833, task: repair
26404 Sep 22 23:21:42.021 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:54833, task: repair
26405 Sep 22 23:21:42.021 INFO listening, local_addr: 127.0.0.1:54833, task: repair
26406 Sep 22 23:21:42.021 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:54833, task: repair
26407 Sep 22 23:21:42.021 INFO Using repair address: 127.0.0.1:54833, task: main
26408 Sep 22 23:21:42.021 INFO No SSL acceptor configured, task: main
26409 Sep 22 23:21:42.021 INFO current number of open files limit 65536 is already the maximum
26410 Sep 22 23:21:42.021 INFO Created new region file "/tmp/downstairs-4kPeDRfm/region.json"
26411 Sep 22 23:21:42.024 INFO current number of open files limit 65536 is already the maximum
26412 Sep 22 23:21:42.024 INFO Opened existing region file "/tmp/downstairs-4kPeDRfm/region.json"
26413 Sep 22 23:21:42.024 INFO Database read version 1
26414 Sep 22 23:21:42.024 INFO Database write version 1
26415 Sep 22 23:21:42.025 INFO UUID: 300935ac-634a-4338-b182-cfe66d8d8948
26416 Sep 22 23:21:42.025 INFO Blocks per extent:5 Total Extents: 2
26417 Sep 22 23:21:42.025 INFO Crucible Version: Crucible Version: 0.0.1
26418 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26419 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26420 rustc: 1.70.0 stable x86_64-unknown-illumos
26421 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26422 Sep 22 23:21:42.025 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26423 Sep 22 23:21:42.025 INFO Using address: 127.0.0.1:47055, task: main
26424 Sep 22 23:21:42.025 INFO Repair listens on 127.0.0.1:0, task: repair
26425 Sep 22 23:21:42.025 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:62348, task: repair
26426 Sep 22 23:21:42.025 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:62348, task: repair
26427 Sep 22 23:21:42.025 INFO listening, local_addr: 127.0.0.1:62348, task: repair
26428 Sep 22 23:21:42.025 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:62348, task: repair
26429 Sep 22 23:21:42.026 INFO Using repair address: 127.0.0.1:62348, task: main
26430 Sep 22 23:21:42.026 INFO No SSL acceptor configured, task: main
26431 note: configured to log to "/dev/stdout"
264322023-09-22T23:21:42.027ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:33810
264332023-09-22T23:21:42.028ZINFOcrucible-pantry: listen IP: 127.0.0.1:33810
26434 Sep 22 23:21:42.124 INFO listening on 127.0.0.1:0, task: main
26435 Sep 22 23:21:42.124 INFO listening on 127.0.0.1:0, task: main
26436 Sep 22 23:21:42.125 INFO listening on 127.0.0.1:0, task: main
264372023-09-22T23:21:42.125ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:58561 remote_addr = 127.0.0.1:41754
264382023-09-22T23:21:42.126ZINFOcrucible-pantry (datafile): no entry exists for volume 6d13f2e2-dfe6-43d7-aeab-f413816a137e, constructing...
264392023-09-22T23:21:42.126ZINFOcrucible-pantry (datafile): Upstairs starts
264402023-09-22T23:21:42.126ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
264412023-09-22T23:21:42.126ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
264422023-09-22T23:21:42.126ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0
264432023-09-22T23:21:42.126ZINFOcrucible-pantry (datafile): Crucible 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 has session id: cf6d7f60-3bbd-4eef-a7da-e75edf81adf5
264442023-09-22T23:21:42.126ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:56105 looper = 0
264452023-09-22T23:21:42.127ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:34504 looper = 1
264462023-09-22T23:21:42.127ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:33240 looper = 2
264472023-09-22T23:21:42.127ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
264482023-09-22T23:21:42.127ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
264492023-09-22T23:21:42.127ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
264502023-09-22T23:21:42.127ZINFOcrucible-pantry (datafile): volume 6d13f2e2-dfe6-43d7-aeab-f413816a137e constructed ok
26451 The guest has requested activation
264522023-09-22T23:21:42.127ZINFOcrucible-pantry (datafile): 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 active request set
26453 Sep 22 23:21:42.127 INFO accepted connection from 127.0.0.1:39184, task: main
26454 Sep 22 23:21:42.127 INFO accepted connection from 127.0.0.1:46433, task: main
26455 Sep 22 23:21:42.128 INFO accepted connection from 127.0.0.1:62619, task: main
264562023-09-22T23:21:42.128ZINFOcrucible-pantry (datafile): [0] 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 looper connected looper = 0
264572023-09-22T23:21:42.128ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:56105 in state New
264582023-09-22T23:21:42.128ZINFOcrucible-pantry (datafile): [1] 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 looper connected looper = 1
264592023-09-22T23:21:42.128ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:34504 in state New
264602023-09-22T23:21:42.128ZINFOcrucible-pantry (datafile): [2] 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 looper connected looper = 2
264612023-09-22T23:21:42.128ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:33240 in state New
26462 Sep 22 23:21:42.128 INFO Connection request from 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 with version 4, task: proc
26463 Sep 22 23:21:42.128 INFO upstairs UpstairsConnection { upstairs_id: 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0, session_id: 2e08e8ff-b29e-4152-8a8e-7239daeea0f1, gen: 1 } connected, version 4, task: proc
26464 Sep 22 23:21:42.128 INFO Connection request from 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 with version 4, task: proc
26465 Sep 22 23:21:42.128 INFO upstairs UpstairsConnection { upstairs_id: 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0, session_id: 2e08e8ff-b29e-4152-8a8e-7239daeea0f1, gen: 1 } connected, version 4, task: proc
26466 Sep 22 23:21:42.128 INFO Connection request from 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 with version 4, task: proc
26467 Sep 22 23:21:42.128 INFO upstairs UpstairsConnection { upstairs_id: 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0, session_id: 2e08e8ff-b29e-4152-8a8e-7239daeea0f1, gen: 1 } connected, version 4, task: proc
264682023-09-22T23:21:42.128ZINFOcrucible-pantry (datafile): [0] 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 (2e08e8ff-b29e-4152-8a8e-7239daeea0f1) New New New ds_transition to WaitActive
264692023-09-22T23:21:42.129ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
264702023-09-22T23:21:42.129ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 2e08e8ff-b29e-4152-8a8e-7239daeea0f1
264712023-09-22T23:21:42.129ZINFOcrucible-pantry (datafile): [1] 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 (2e08e8ff-b29e-4152-8a8e-7239daeea0f1) WaitActive New New ds_transition to WaitActive
264722023-09-22T23:21:42.129ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
264732023-09-22T23:21:42.129ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 2e08e8ff-b29e-4152-8a8e-7239daeea0f1
264742023-09-22T23:21:42.129ZINFOcrucible-pantry (datafile): [2] 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 (2e08e8ff-b29e-4152-8a8e-7239daeea0f1) WaitActive WaitActive New ds_transition to WaitActive
264752023-09-22T23:21:42.129ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
26476 Sep 22 23:21:42.129 INFO UpstairsConnection { upstairs_id: 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0, session_id: 2e08e8ff-b29e-4152-8a8e-7239daeea0f1, gen: 1 } is now active (read-write)
264772023-09-22T23:21:42.129ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 2e08e8ff-b29e-4152-8a8e-7239daeea0f1
26478 Sep 22 23:21:42.129 INFO UpstairsConnection { upstairs_id: 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0, session_id: 2e08e8ff-b29e-4152-8a8e-7239daeea0f1, gen: 1 } is now active (read-write)
26479 Sep 22 23:21:42.129 INFO UpstairsConnection { upstairs_id: 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0, session_id: 2e08e8ff-b29e-4152-8a8e-7239daeea0f1, gen: 1 } is now active (read-write)
264802023-09-22T23:21:42.129ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:56105 has UUID 5bd06377-7334-447c-a337-f5f05f8d6c8d
264812023-09-22T23:21:42.130ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 5bd06377-7334-447c-a337-f5f05f8d6c8d, encrypted: true, database_read_version: 1, database_write_version: 1 }
264822023-09-22T23:21:42.130ZINFOcrucible-pantry (datafile): 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 WaitActive WaitActive WaitActive
264832023-09-22T23:21:42.130ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:34504 has UUID 9bb0c711-bf0b-4c97-9224-93a1bd568273
264842023-09-22T23:21:42.130ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 9bb0c711-bf0b-4c97-9224-93a1bd568273, encrypted: true, database_read_version: 1, database_write_version: 1 }
264852023-09-22T23:21:42.130ZINFOcrucible-pantry (datafile): 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 WaitActive WaitActive WaitActive
264862023-09-22T23:21:42.130ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:33240 has UUID 0a385deb-9c37-4b6c-9838-b74475ae3131
264872023-09-22T23:21:42.130ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 0a385deb-9c37-4b6c-9838-b74475ae3131, encrypted: true, database_read_version: 1, database_write_version: 1 }
264882023-09-22T23:21:42.130ZINFOcrucible-pantry (datafile): 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 WaitActive WaitActive WaitActive
26489 Sep 22 23:21:42.130 INFO Current flush_numbers [0..12]: [0, 0]
26490 Sep 22 23:21:42.130 INFO Downstairs has completed Negotiation, task: proc
26491 Sep 22 23:21:42.130 INFO Current flush_numbers [0..12]: [0, 0]
26492 Sep 22 23:21:42.131 INFO Downstairs has completed Negotiation, task: proc
26493 Sep 22 23:21:42.131 INFO Current flush_numbers [0..12]: [0, 0]
26494 Sep 22 23:21:42.131 INFO Downstairs has completed Negotiation, task: proc
264952023-09-22T23:21:42.131ZINFOcrucible-pantry (datafile): [0] 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 (2e08e8ff-b29e-4152-8a8e-7239daeea0f1) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
264962023-09-22T23:21:42.131ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
264972023-09-22T23:21:42.131ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
264982023-09-22T23:21:42.131ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
264992023-09-22T23:21:42.131ZINFOcrucible-pantry (datafile): [1] 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 (2e08e8ff-b29e-4152-8a8e-7239daeea0f1) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
265002023-09-22T23:21:42.131ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
265012023-09-22T23:21:42.131ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
265022023-09-22T23:21:42.131ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
265032023-09-22T23:21:42.131ZINFOcrucible-pantry (datafile): [2] 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 (2e08e8ff-b29e-4152-8a8e-7239daeea0f1) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
265042023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
26505 The guest has finished waiting for activation
265062023-09-22T23:21:42.132ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
265072023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
265082023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:56105 task reports connection:true
265092023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 WaitQuorum WaitQuorum WaitQuorum
265102023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): [0]R flush_numbers: [0, 0]
265112023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): [0]R generation: [0, 0]
265122023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): [0]R dirty: [false, false]
265132023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): [1]R flush_numbers: [0, 0]
265142023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): [1]R generation: [0, 0]
265152023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): [1]R dirty: [false, false]
265162023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): [2]R flush_numbers: [0, 0]
265172023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): [2]R generation: [0, 0]
265182023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): [2]R dirty: [false, false]
265192023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): Max found gen is 1
265202023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
265212023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): Next flush: 1
265222023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): All extents match
265232023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): No downstairs repair required
265242023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): No initial repair work was required
265252023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
265262023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 is now active with session: 2e08e8ff-b29e-4152-8a8e-7239daeea0f1
265272023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 Set Active after no repair
265282023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
265292023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): Set check for repair
265302023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:34504 task reports connection:true
265312023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 Active Active Active
265322023-09-22T23:21:42.132ZINFOcrucible-pantry (datafile): Set check for repair
265332023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:33240 task reports connection:true
265342023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 Active Active Active
265352023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): Set check for repair
265362023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): [0] received reconcile message
265372023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
265382023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
265392023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): [1] received reconcile message
265402023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
265412023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
265422023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): [2] received reconcile message
265432023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
265442023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
265452023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): volume 6d13f2e2-dfe6-43d7-aeab-f413816a137e activated ok
265462023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): volume 6d13f2e2-dfe6-43d7-aeab-f413816a137e constructed and inserted ok
265472023-09-22T23:21:42.133ZINFOcrucible-pantry (dropshot): request completed latency_us = 6580 local_addr = 127.0.0.1:58561 method = POST remote_addr = 127.0.0.1:41754 req_id = 9b3e1787-4434-4959-bf7a-a3ae6b2ae36f response_code = 200 uri = /crucible/pantry/0/volume/6d13f2e2-dfe6-43d7-aeab-f413816a137e
265482023-09-22T23:21:42.133ZINFOcrucible-pantry (datafile): flush with snap requested
26549 Sep 22 23:21:42.134 ERRO Snapshot request received on unsupported binary
26550 Sep 22 23:21:42.134 DEBG Flush :1000 extent_limit None deps:[] res:true f:1 g:1
26551 Sep 22 23:21:42.134 ERRO Snapshot request received on unsupported binary
26552 Sep 22 23:21:42.134 DEBG Flush :1000 extent_limit None deps:[] res:true f:1 g:1
26553 Sep 22 23:21:42.134 ERRO Snapshot request received on unsupported binary
26554 Sep 22 23:21:42.134 DEBG Flush :1000 extent_limit None deps:[] res:true f:1 g:1
265552023-09-22T23:21:42.134ZINFOcrucible-pantry (dropshot): request completed latency_us = 1713 local_addr = 127.0.0.1:58561 method = POST remote_addr = 127.0.0.1:41754 req_id = 8f6d35ef-46dd-4149-9cfc-0e8bc774721b response_code = 204 uri = /crucible/pantry/0/volume/6d13f2e2-dfe6-43d7-aeab-f413816a137e/snapshot
265562023-09-22T23:21:42.135ZINFOcrucible-pantry (datafile): detach removing entry for volume 6d13f2e2-dfe6-43d7-aeab-f413816a137e
265572023-09-22T23:21:42.135ZINFOcrucible-pantry (datafile): detaching volume 6d13f2e2-dfe6-43d7-aeab-f413816a137e
26558 Sep 22 23:21:42.136 DEBG Flush :1001 extent_limit None deps:[] res:true f:2 g:1
26559 Sep 22 23:21:42.136 DEBG Flush :1001 extent_limit None deps:[] res:true f:2 g:1
26560 Sep 22 23:21:42.136 DEBG Flush :1001 extent_limit None deps:[] res:true f:2 g:1
265612023-09-22T23:21:42.136ZINFOcrucible-pantry (datafile): Request to deactivate this guest
265622023-09-22T23:21:42.137ZINFOcrucible-pantry (datafile): 2b87c9f1-3f50-4bc0-9661-27a6adb02fb0 set deactivating.
265632023-09-22T23:21:42.137ZINFOcrucible-pantry (dropshot): request completed latency_us = 1686 local_addr = 127.0.0.1:58561 method = DELETE remote_addr = 127.0.0.1:41754 req_id = 06fbc838-40cc-498e-996e-1858bd3f1bb1 response_code = 204 uri = /crucible/pantry/0/volume/6d13f2e2-dfe6-43d7-aeab-f413816a137e
26564 test test::test_pantry_snapshot ... ok
26565 Sep 22 23:21:42.144 INFO current number of open files limit 65536 is already the maximum
26566 Sep 22 23:21:42.144 INFO Created new region file "/tmp/downstairs-WW0WMgjz/region.json"
26567 Sep 22 23:21:42.147 INFO current number of open files limit 65536 is already the maximum
26568 Sep 22 23:21:42.147 INFO Opened existing region file "/tmp/downstairs-WW0WMgjz/region.json"
26569 Sep 22 23:21:42.147 INFO Database read version 1
26570 Sep 22 23:21:42.147 INFO Database write version 1
26571 Sep 22 23:21:42.148 INFO UUID: 101c5e08-1317-4b60-a031-62766e2cdb1f
26572 Sep 22 23:21:42.148 INFO Blocks per extent:5 Total Extents: 2
26573 Sep 22 23:21:42.148 INFO Crucible Version: Crucible Version: 0.0.1
26574 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26575 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26576 rustc: 1.70.0 stable x86_64-unknown-illumos
26577 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26578 Sep 22 23:21:42.148 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26579 Sep 22 23:21:42.148 INFO Using address: 127.0.0.1:49499, task: main
26580 Sep 22 23:21:42.149 INFO Repair listens on 127.0.0.1:0, task: repair
26581 Sep 22 23:21:42.149 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:40788, task: repair
26582 Sep 22 23:21:42.149 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:40788, task: repair
26583 Sep 22 23:21:42.149 INFO listening, local_addr: 127.0.0.1:40788, task: repair
26584 Sep 22 23:21:42.149 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:40788, task: repair
26585 Sep 22 23:21:42.149 INFO Using repair address: 127.0.0.1:40788, task: main
26586 Sep 22 23:21:42.149 INFO No SSL acceptor configured, task: main
26587 Sep 22 23:21:42.149 INFO current number of open files limit 65536 is already the maximum
26588 Sep 22 23:21:42.149 INFO Created new region file "/tmp/downstairs-QUbX2kxB/region.json"
26589 Sep 22 23:21:42.152 INFO current number of open files limit 65536 is already the maximum
26590 Sep 22 23:21:42.152 INFO Opened existing region file "/tmp/downstairs-QUbX2kxB/region.json"
26591 Sep 22 23:21:42.152 INFO Database read version 1
26592 Sep 22 23:21:42.152 INFO Database write version 1
26593 Sep 22 23:21:42.153 INFO UUID: 44286653-4082-4ed7-a812-3555da359c78
26594 Sep 22 23:21:42.153 INFO Blocks per extent:5 Total Extents: 2
26595 Sep 22 23:21:42.153 INFO Crucible Version: Crucible Version: 0.0.1
26596 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26597 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26598 rustc: 1.70.0 stable x86_64-unknown-illumos
26599 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26600 Sep 22 23:21:42.153 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26601 Sep 22 23:21:42.153 INFO Using address: 127.0.0.1:56926, task: main
26602 Sep 22 23:21:42.153 INFO Repair listens on 127.0.0.1:0, task: repair
26603 Sep 22 23:21:42.153 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:39647, task: repair
26604 Sep 22 23:21:42.154 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:39647, task: repair
26605 Sep 22 23:21:42.154 INFO listening, local_addr: 127.0.0.1:39647, task: repair
26606 Sep 22 23:21:42.154 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:39647, task: repair
26607 Sep 22 23:21:42.154 INFO Using repair address: 127.0.0.1:39647, task: main
26608 Sep 22 23:21:42.154 INFO No SSL acceptor configured, task: main
26609 Sep 22 23:21:42.154 INFO current number of open files limit 65536 is already the maximum
26610 Sep 22 23:21:42.154 INFO Created new region file "/tmp/downstairs-iu0Qdi78/region.json"
26611 Sep 22 23:21:42.157 INFO current number of open files limit 65536 is already the maximum
26612 Sep 22 23:21:42.157 INFO Opened existing region file "/tmp/downstairs-iu0Qdi78/region.json"
26613 Sep 22 23:21:42.157 INFO Database read version 1
26614 Sep 22 23:21:42.157 INFO Database write version 1
26615 Sep 22 23:21:42.158 INFO UUID: d4d95f2d-aa01-4319-8829-47ddcf12c9ca
26616 Sep 22 23:21:42.158 INFO Blocks per extent:5 Total Extents: 2
26617 Sep 22 23:21:42.158 INFO Crucible Version: Crucible Version: 0.0.1
26618 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26619 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26620 rustc: 1.70.0 stable x86_64-unknown-illumos
26621 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26622 Sep 22 23:21:42.158 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26623 Sep 22 23:21:42.158 INFO Using address: 127.0.0.1:46734, task: main
26624 Sep 22 23:21:42.158 INFO Repair listens on 127.0.0.1:0, task: repair
26625 Sep 22 23:21:42.158 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58207, task: repair
26626 Sep 22 23:21:42.158 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58207, task: repair
26627 Sep 22 23:21:42.158 INFO listening, local_addr: 127.0.0.1:58207, task: repair
26628 Sep 22 23:21:42.159 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58207, task: repair
26629 Sep 22 23:21:42.159 INFO Using repair address: 127.0.0.1:58207, task: main
26630 Sep 22 23:21:42.159 INFO No SSL acceptor configured, task: main
26631 note: configured to log to "/dev/stdout"
26632 Sep 22 23:21:42.160 INFO listening on 127.0.0.1:0, task: main
26633 Sep 22 23:21:42.160 INFO listening on 127.0.0.1:0, task: main
26634 Sep 22 23:21:42.160 INFO listening on 127.0.0.1:0, task: main
266352023-09-22T23:21:42.160ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:63226 remote_addr = 127.0.0.1:35139
266362023-09-22T23:21:42.161ZINFOcrucible-pantry (datafile): no entry exists for volume fc92b52b-99b5-4d79-ab62-675ee18fb434, constructing...
266372023-09-22T23:21:42.161ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:59371
266382023-09-22T23:21:42.161ZINFOcrucible-pantry (datafile): Upstairs starts
266392023-09-22T23:21:42.161ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
266402023-09-22T23:21:42.161ZINFOcrucible-pantry: listen IP: 127.0.0.1:59371
266412023-09-22T23:21:42.161ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
266422023-09-22T23:21:42.161ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 94d7a72a-673e-42de-882f-9dbeb845d1f4
266432023-09-22T23:21:42.161ZINFOcrucible-pantry (datafile): Crucible 94d7a72a-673e-42de-882f-9dbeb845d1f4 has session id: 9d22304a-a5f9-4f90-8c1f-fec9500ab362
266442023-09-22T23:21:42.161ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:40394 looper = 0
266452023-09-22T23:21:42.161ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:44812 looper = 1
266462023-09-22T23:21:42.161ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:50704 looper = 2
266472023-09-22T23:21:42.161ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
266482023-09-22T23:21:42.161ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
266492023-09-22T23:21:42.161ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
266502023-09-22T23:21:42.162ZINFOcrucible-pantry (datafile): volume fc92b52b-99b5-4d79-ab62-675ee18fb434 constructed ok
26651 The guest has requested activation
266522023-09-22T23:21:42.162ZINFOcrucible-pantry (datafile): 94d7a72a-673e-42de-882f-9dbeb845d1f4 active request set
26653 Sep 22 23:21:42.162 INFO accepted connection from 127.0.0.1:60751, task: main
26654 Sep 22 23:21:42.162 INFO accepted connection from 127.0.0.1:55531, task: main
26655 Sep 22 23:21:42.162 INFO accepted connection from 127.0.0.1:40108, task: main
266562023-09-22T23:21:42.162ZINFOcrucible-pantry (datafile): [0] 94d7a72a-673e-42de-882f-9dbeb845d1f4 looper connected looper = 0
266572023-09-22T23:21:42.162ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:40394 in state New
266582023-09-22T23:21:42.162ZINFOcrucible-pantry (datafile): [1] 94d7a72a-673e-42de-882f-9dbeb845d1f4 looper connected looper = 1
266592023-09-22T23:21:42.162ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:44812 in state New
266602023-09-22T23:21:42.163ZINFOcrucible-pantry (datafile): [2] 94d7a72a-673e-42de-882f-9dbeb845d1f4 looper connected looper = 2
266612023-09-22T23:21:42.163ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:50704 in state New
26662 Sep 22 23:21:42.163 INFO Connection request from 94d7a72a-673e-42de-882f-9dbeb845d1f4 with version 4, task: proc
26663 Sep 22 23:21:42.163 INFO upstairs UpstairsConnection { upstairs_id: 94d7a72a-673e-42de-882f-9dbeb845d1f4, session_id: 34608033-2e41-41aa-a912-4128bf340849, gen: 1 } connected, version 4, task: proc
26664 Sep 22 23:21:42.163 INFO Connection request from 94d7a72a-673e-42de-882f-9dbeb845d1f4 with version 4, task: proc
26665 Sep 22 23:21:42.163 INFO upstairs UpstairsConnection { upstairs_id: 94d7a72a-673e-42de-882f-9dbeb845d1f4, session_id: 34608033-2e41-41aa-a912-4128bf340849, gen: 1 } connected, version 4, task: proc
26666 Sep 22 23:21:42.163 INFO Connection request from 94d7a72a-673e-42de-882f-9dbeb845d1f4 with version 4, task: proc
26667 Sep 22 23:21:42.163 INFO upstairs UpstairsConnection { upstairs_id: 94d7a72a-673e-42de-882f-9dbeb845d1f4, session_id: 34608033-2e41-41aa-a912-4128bf340849, gen: 1 } connected, version 4, task: proc
266682023-09-22T23:21:42.163ZINFOcrucible-pantry (datafile): [0] 94d7a72a-673e-42de-882f-9dbeb845d1f4 (34608033-2e41-41aa-a912-4128bf340849) New New New ds_transition to WaitActive
266692023-09-22T23:21:42.163ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
266702023-09-22T23:21:42.163ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 34608033-2e41-41aa-a912-4128bf340849
266712023-09-22T23:21:42.163ZINFOcrucible-pantry (datafile): [1] 94d7a72a-673e-42de-882f-9dbeb845d1f4 (34608033-2e41-41aa-a912-4128bf340849) WaitActive New New ds_transition to WaitActive
266722023-09-22T23:21:42.163ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
266732023-09-22T23:21:42.163ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 34608033-2e41-41aa-a912-4128bf340849
266742023-09-22T23:21:42.163ZINFOcrucible-pantry (datafile): [2] 94d7a72a-673e-42de-882f-9dbeb845d1f4 (34608033-2e41-41aa-a912-4128bf340849) WaitActive WaitActive New ds_transition to WaitActive
26675 Sep 22 23:21:42.163 INFO UpstairsConnection { upstairs_id: 94d7a72a-673e-42de-882f-9dbeb845d1f4, session_id: 34608033-2e41-41aa-a912-4128bf340849, gen: 1 } is now active (read-write)
266762023-09-22T23:21:42.163ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
266772023-09-22T23:21:42.163ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 34608033-2e41-41aa-a912-4128bf340849
26678 Sep 22 23:21:42.163 INFO UpstairsConnection { upstairs_id: 94d7a72a-673e-42de-882f-9dbeb845d1f4, session_id: 34608033-2e41-41aa-a912-4128bf340849, gen: 1 } is now active (read-write)
26679 Sep 22 23:21:42.164 INFO UpstairsConnection { upstairs_id: 94d7a72a-673e-42de-882f-9dbeb845d1f4, session_id: 34608033-2e41-41aa-a912-4128bf340849, gen: 1 } is now active (read-write)
266802023-09-22T23:21:42.164ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:40394 has UUID 1dc6ff1a-e3e5-49bb-96af-2f8e2384da86
266812023-09-22T23:21:42.164ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 1dc6ff1a-e3e5-49bb-96af-2f8e2384da86, encrypted: true, database_read_version: 1, database_write_version: 1 }
266822023-09-22T23:21:42.164ZINFOcrucible-pantry (datafile): 94d7a72a-673e-42de-882f-9dbeb845d1f4 WaitActive WaitActive WaitActive
266832023-09-22T23:21:42.164ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:44812 has UUID 6b825ea1-626e-494d-9a24-d66b39d14dbc
266842023-09-22T23:21:42.164ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 6b825ea1-626e-494d-9a24-d66b39d14dbc, encrypted: true, database_read_version: 1, database_write_version: 1 }
266852023-09-22T23:21:42.164ZINFOcrucible-pantry (datafile): 94d7a72a-673e-42de-882f-9dbeb845d1f4 WaitActive WaitActive WaitActive
266862023-09-22T23:21:42.164ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:50704 has UUID 19cc35ab-e1e8-4b0e-9882-b11c9bb545fa
266872023-09-22T23:21:42.164ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 19cc35ab-e1e8-4b0e-9882-b11c9bb545fa, encrypted: true, database_read_version: 1, database_write_version: 1 }
266882023-09-22T23:21:42.164ZINFOcrucible-pantry (datafile): 94d7a72a-673e-42de-882f-9dbeb845d1f4 WaitActive WaitActive WaitActive
26689 Sep 22 23:21:42.164 INFO Current flush_numbers [0..12]: [0, 0]
26690 Sep 22 23:21:42.165 INFO Downstairs has completed Negotiation, task: proc
26691 Sep 22 23:21:42.165 INFO Current flush_numbers [0..12]: [0, 0]
26692 Sep 22 23:21:42.165 INFO Downstairs has completed Negotiation, task: proc
26693 Sep 22 23:21:42.165 INFO Current flush_numbers [0..12]: [0, 0]
26694 Sep 22 23:21:42.165 INFO Downstairs has completed Negotiation, task: proc
266952023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [0] 94d7a72a-673e-42de-882f-9dbeb845d1f4 (34608033-2e41-41aa-a912-4128bf340849) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
266962023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
266972023-09-22T23:21:42.166ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
266982023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
266992023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [1] 94d7a72a-673e-42de-882f-9dbeb845d1f4 (34608033-2e41-41aa-a912-4128bf340849) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
267002023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
267012023-09-22T23:21:42.166ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
267022023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
267032023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [2] 94d7a72a-673e-42de-882f-9dbeb845d1f4 (34608033-2e41-41aa-a912-4128bf340849) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
267042023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
26705 The guest has finished waiting for activation
267062023-09-22T23:21:42.166ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
267072023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
267082023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:40394 task reports connection:true
267092023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): 94d7a72a-673e-42de-882f-9dbeb845d1f4 WaitQuorum WaitQuorum WaitQuorum
267102023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [0]R flush_numbers: [0, 0]
267112023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [0]R generation: [0, 0]
267122023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [0]R dirty: [false, false]
267132023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [1]R flush_numbers: [0, 0]
267142023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [1]R generation: [0, 0]
267152023-09-22T23:21:42.166ZINFOcrucible-pantry (datafile): [1]R dirty: [false, false]
267162023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): [2]R flush_numbers: [0, 0]
267172023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): [2]R generation: [0, 0]
267182023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): [2]R dirty: [false, false]
267192023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): Max found gen is 1
267202023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
267212023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): Next flush: 1
267222023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): All extents match
267232023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): No downstairs repair required
267242023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): No initial repair work was required
267252023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
267262023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): 94d7a72a-673e-42de-882f-9dbeb845d1f4 is now active with session: 34608033-2e41-41aa-a912-4128bf340849
267272023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): 94d7a72a-673e-42de-882f-9dbeb845d1f4 Set Active after no repair
267282023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
267292023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): Set check for repair
267302023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:44812 task reports connection:true
267312023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): 94d7a72a-673e-42de-882f-9dbeb845d1f4 Active Active Active
267322023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): Set check for repair
267332023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:50704 task reports connection:true
267342023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): 94d7a72a-673e-42de-882f-9dbeb845d1f4 Active Active Active
267352023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): Set check for repair
267362023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): [0] received reconcile message
267372023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
267382023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
267392023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): [1] received reconcile message
267402023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
267412023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
267422023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): [2] received reconcile message
267432023-09-22T23:21:42.167ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
267442023-09-22T23:21:42.168ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
267452023-09-22T23:21:42.168ZINFOcrucible-pantry (datafile): volume fc92b52b-99b5-4d79-ab62-675ee18fb434 activated ok
267462023-09-22T23:21:42.168ZINFOcrucible-pantry (datafile): volume fc92b52b-99b5-4d79-ab62-675ee18fb434 constructed and inserted ok
267472023-09-22T23:21:42.168ZINFOcrucible-pantry (dropshot): request completed latency_us = 6199 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:35139 req_id = 189f22d2-19b5-44f2-993a-6c0d67bfb933 response_code = 200 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434
267482023-09-22T23:21:42.172ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:63226 remote_addr = 127.0.0.1:60149
267492023-09-22T23:21:42.172ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:63226 remote_addr = 127.0.0.1:62526
267502023-09-22T23:21:42.172ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:63226 remote_addr = 127.0.0.1:63083
267512023-09-22T23:21:42.172ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:63226 remote_addr = 127.0.0.1:46607
267522023-09-22T23:21:42.172ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:63226 remote_addr = 127.0.0.1:34605
267532023-09-22T23:21:42.172ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:63226 remote_addr = 127.0.0.1:64643
267542023-09-22T23:21:42.172ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:63226 remote_addr = 127.0.0.1:45431
267552023-09-22T23:21:42.172ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:63226 remote_addr = 127.0.0.1:54209
267562023-09-22T23:21:42.172ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:63226 remote_addr = 127.0.0.1:49934
267572023-09-22T23:21:42.174ZINFOcrucible-pantry (dropshot): request completed latency_us = 3302 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:35139 req_id = 6892ecaf-2aa0-45f9-aaf5-7dc549bacc4e response_code = 204 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_write
26758 Sep 22 23:21:42.178 DEBG Write :1000 deps:[] res:true
26759 Sep 22 23:21:42.178 DEBG Write :1000 deps:[] res:true
26760 Sep 22 23:21:42.179 DEBG Write :1000 deps:[] res:true
26761 Sep 22 23:21:42.182 INFO listening on 127.0.0.1:0, task: main
26762 Sep 22 23:21:42.182 INFO listening on 127.0.0.1:0, task: main
26763 Sep 22 23:21:42.182 INFO listening on 127.0.0.1:0, task: main
267642023-09-22T23:21:42.182ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:33810 remote_addr = 127.0.0.1:40528
267652023-09-22T23:21:42.183ZINFOcrucible-pantry (datafile): no entry exists for volume 715e89c9-d21c-47a9-83e7-53d748a8ba52, constructing...
267662023-09-22T23:21:42.183ZINFOcrucible-pantry (datafile): Upstairs starts
267672023-09-22T23:21:42.183ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
267682023-09-22T23:21:42.183ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
267692023-09-22T23:21:42.183ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: fb7bddce-e41d-4045-a067-a43fc478d6a6
267702023-09-22T23:21:42.183ZINFOcrucible-pantry (datafile): Crucible fb7bddce-e41d-4045-a067-a43fc478d6a6 has session id: d07d0fc9-f7ce-4e03-abe9-08b4f5157d34
267712023-09-22T23:21:42.183ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:42682 looper = 0
267722023-09-22T23:21:42.183ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:61724 looper = 1
267732023-09-22T23:21:42.183ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:47055 looper = 2
267742023-09-22T23:21:42.183ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
267752023-09-22T23:21:42.183ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
267762023-09-22T23:21:42.183ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
267772023-09-22T23:21:42.183ZINFOcrucible-pantry (datafile): volume 715e89c9-d21c-47a9-83e7-53d748a8ba52 constructed ok
26778 The guest has requested activation
267792023-09-22T23:21:42.184ZINFOcrucible-pantry (datafile): fb7bddce-e41d-4045-a067-a43fc478d6a6 active request set
26780 Sep 22 23:21:42.184 INFO accepted connection from 127.0.0.1:51117, task: main
26781 Sep 22 23:21:42.184 INFO accepted connection from 127.0.0.1:58664, task: main
26782 Sep 22 23:21:42.184 INFO accepted connection from 127.0.0.1:48821, task: main
267832023-09-22T23:21:42.184ZINFOcrucible-pantry (datafile): [0] fb7bddce-e41d-4045-a067-a43fc478d6a6 looper connected looper = 0
267842023-09-22T23:21:42.184ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:42682 in state New
267852023-09-22T23:21:42.184ZINFOcrucible-pantry (datafile): [1] fb7bddce-e41d-4045-a067-a43fc478d6a6 looper connected looper = 1
267862023-09-22T23:21:42.184ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:61724 in state New
267872023-09-22T23:21:42.184ZINFOcrucible-pantry (datafile): [2] fb7bddce-e41d-4045-a067-a43fc478d6a6 looper connected looper = 2
267882023-09-22T23:21:42.184ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:47055 in state New
26789 Sep 22 23:21:42.184 INFO Connection request from fb7bddce-e41d-4045-a067-a43fc478d6a6 with version 4, task: proc
26790 Sep 22 23:21:42.184 INFO upstairs UpstairsConnection { upstairs_id: fb7bddce-e41d-4045-a067-a43fc478d6a6, session_id: 18c3a639-2993-44be-a553-3c3d4014cfe6, gen: 1 } connected, version 4, task: proc
26791 Sep 22 23:21:42.184 INFO Connection request from fb7bddce-e41d-4045-a067-a43fc478d6a6 with version 4, task: proc
26792 Sep 22 23:21:42.184 INFO upstairs UpstairsConnection { upstairs_id: fb7bddce-e41d-4045-a067-a43fc478d6a6, session_id: 18c3a639-2993-44be-a553-3c3d4014cfe6, gen: 1 } connected, version 4, task: proc
26793 Sep 22 23:21:42.184 INFO Connection request from fb7bddce-e41d-4045-a067-a43fc478d6a6 with version 4, task: proc
26794 Sep 22 23:21:42.184 INFO upstairs UpstairsConnection { upstairs_id: fb7bddce-e41d-4045-a067-a43fc478d6a6, session_id: 18c3a639-2993-44be-a553-3c3d4014cfe6, gen: 1 } connected, version 4, task: proc
267952023-09-22T23:21:42.185ZINFOcrucible-pantry (datafile): [0] fb7bddce-e41d-4045-a067-a43fc478d6a6 (18c3a639-2993-44be-a553-3c3d4014cfe6) New New New ds_transition to WaitActive
267962023-09-22T23:21:42.185ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
267972023-09-22T23:21:42.185ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 18c3a639-2993-44be-a553-3c3d4014cfe6
267982023-09-22T23:21:42.185ZINFOcrucible-pantry (datafile): [1] fb7bddce-e41d-4045-a067-a43fc478d6a6 (18c3a639-2993-44be-a553-3c3d4014cfe6) WaitActive New New ds_transition to WaitActive
26799 Sep 22 23:21:42.185 INFO UpstairsConnection { upstairs_id: fb7bddce-e41d-4045-a067-a43fc478d6a6, session_id: 18c3a639-2993-44be-a553-3c3d4014cfe6, gen: 1 } is now active (read-write)
268002023-09-22T23:21:42.185ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
268012023-09-22T23:21:42.185ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 18c3a639-2993-44be-a553-3c3d4014cfe6
26802 Sep 22 23:21:42.185 INFO UpstairsConnection { upstairs_id: fb7bddce-e41d-4045-a067-a43fc478d6a6, session_id: 18c3a639-2993-44be-a553-3c3d4014cfe6, gen: 1 } is now active (read-write)
268032023-09-22T23:21:42.185ZINFOcrucible-pantry (datafile): [2] fb7bddce-e41d-4045-a067-a43fc478d6a6 (18c3a639-2993-44be-a553-3c3d4014cfe6) WaitActive WaitActive New ds_transition to WaitActive
268042023-09-22T23:21:42.185ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
26805 Sep 22 23:21:42.185 INFO UpstairsConnection { upstairs_id: fb7bddce-e41d-4045-a067-a43fc478d6a6, session_id: 18c3a639-2993-44be-a553-3c3d4014cfe6, gen: 1 } is now active (read-write)
268062023-09-22T23:21:42.185ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 18c3a639-2993-44be-a553-3c3d4014cfe6
268072023-09-22T23:21:42.185ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:42682 has UUID 981e485a-3d34-445d-9adb-f9e13ff6f329
268082023-09-22T23:21:42.185ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 981e485a-3d34-445d-9adb-f9e13ff6f329, encrypted: true, database_read_version: 1, database_write_version: 1 }
268092023-09-22T23:21:42.185ZINFOcrucible-pantry (datafile): fb7bddce-e41d-4045-a067-a43fc478d6a6 WaitActive WaitActive WaitActive
268102023-09-22T23:21:42.185ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:61724 has UUID c77d45f9-2dd1-4698-958d-b4ab9ce4ffee
268112023-09-22T23:21:42.185ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c77d45f9-2dd1-4698-958d-b4ab9ce4ffee, encrypted: true, database_read_version: 1, database_write_version: 1 }
268122023-09-22T23:21:42.186ZINFOcrucible-pantry (datafile): fb7bddce-e41d-4045-a067-a43fc478d6a6 WaitActive WaitActive WaitActive
268132023-09-22T23:21:42.186ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:47055 has UUID 300935ac-634a-4338-b182-cfe66d8d8948
26814 Sep 22 23:21:42.186 INFO Current flush_numbers [0..12]: [0, 0]
268152023-09-22T23:21:42.186ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 300935ac-634a-4338-b182-cfe66d8d8948, encrypted: true, database_read_version: 1, database_write_version: 1 }
268162023-09-22T23:21:42.186ZINFOcrucible-pantry (datafile): fb7bddce-e41d-4045-a067-a43fc478d6a6 WaitActive WaitActive WaitActive
26817 Sep 22 23:21:42.186 INFO Downstairs has completed Negotiation, task: proc
26818 Sep 22 23:21:42.186 INFO Current flush_numbers [0..12]: [0, 0]
26819 Sep 22 23:21:42.186 INFO Downstairs has completed Negotiation, task: proc
26820 Sep 22 23:21:42.186 INFO Current flush_numbers [0..12]: [0, 0]
26821 Sep 22 23:21:42.186 INFO Downstairs has completed Negotiation, task: proc
268222023-09-22T23:21:42.186ZINFOcrucible-pantry (datafile): [0] fb7bddce-e41d-4045-a067-a43fc478d6a6 (18c3a639-2993-44be-a553-3c3d4014cfe6) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
268232023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
268242023-09-22T23:21:42.187ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
268252023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
268262023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [1] fb7bddce-e41d-4045-a067-a43fc478d6a6 (18c3a639-2993-44be-a553-3c3d4014cfe6) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
268272023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
268282023-09-22T23:21:42.187ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
26829 The guest has finished waiting for activation
268302023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
268312023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [2] fb7bddce-e41d-4045-a067-a43fc478d6a6 (18c3a639-2993-44be-a553-3c3d4014cfe6) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
268322023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
268332023-09-22T23:21:42.187ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
268342023-09-22T23:21:42.187ZINFOcrucible-pantry (dropshot): request completed latency_us = 12066 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:60149 req_id = ec612fd3-8be0-4cf9-ba1a-a365f863a0fb response_code = 204 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_write
268352023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
268362023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:42682 task reports connection:true
268372023-09-22T23:21:42.187ZINFOcrucible-pantry (dropshot): request completed latency_us = 11958 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:62526 req_id = a8175294-6798-4939-95a3-c489b8ea8777 response_code = 204 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_write
268382023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): fb7bddce-e41d-4045-a067-a43fc478d6a6 WaitQuorum WaitQuorum WaitQuorum
268392023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [0]R flush_numbers: [0, 0]
268402023-09-22T23:21:42.187ZINFOcrucible-pantry (dropshot): request completed latency_us = 11834 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:63083 req_id = 3fdc08ef-c794-4d68-80e5-2f48fde6eea3 response_code = 204 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_write
268412023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [0]R generation: [0, 0]
268422023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [0]R dirty: [false, false]
268432023-09-22T23:21:42.187ZINFOcrucible-pantry (dropshot): request completed latency_us = 11693 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:46607 req_id = 725d82f8-1008-4758-aea4-f4dfdc20eaa1 response_code = 204 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_write
268442023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [1]R flush_numbers: [0, 0]
268452023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [1]R generation: [0, 0]
268462023-09-22T23:21:42.187ZINFOcrucible-pantry (dropshot): request completed latency_us = 11568 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:34605 req_id = e76c3b6d-894f-4b35-bfe7-c38f50c55fa0 response_code = 204 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_write
268472023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [1]R dirty: [false, false]
268482023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [2]R flush_numbers: [0, 0]
268492023-09-22T23:21:42.187ZINFOcrucible-pantry (dropshot): request completed latency_us = 11427 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:64643 req_id = 98a63973-7c40-48cb-80ed-273c32854cb5 response_code = 204 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_write
268502023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [2]R generation: [0, 0]
268512023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): [2]R dirty: [false, false]
268522023-09-22T23:21:42.187ZINFOcrucible-pantry (dropshot): request completed latency_us = 11273 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:45431 req_id = 35b96d57-f7ae-4ed6-acc2-b15ef0a2c492 response_code = 204 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_write
268532023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): Max found gen is 1
268542023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
268552023-09-22T23:21:42.187ZINFOcrucible-pantry (dropshot): request completed latency_us = 11116 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:54209 req_id = 0438b2f4-a47b-4b9b-8901-7025dcbcdae8 response_code = 204 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_write
268562023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): Next flush: 1
268572023-09-22T23:21:42.187ZINFOcrucible-pantry (datafile): All extents match
268582023-09-22T23:21:42.187ZINFOcrucible-pantry (dropshot): request completed latency_us = 10989 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:49934 req_id = 6517deb5-b551-4e0c-b9cf-cca9a7c97eae response_code = 204 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_write
268592023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): No downstairs repair required
268602023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): No initial repair work was required
268612023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
268622023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): fb7bddce-e41d-4045-a067-a43fc478d6a6 is now active with session: 18c3a639-2993-44be-a553-3c3d4014cfe6
26863 Sep 22 23:21:42.188 DEBG Write :1001 deps:[] res:true
268642023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): fb7bddce-e41d-4045-a067-a43fc478d6a6 Set Active after no repair
268652023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
268662023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): Set check for repair
268672023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:61724 task reports connection:true
26868 Sep 22 23:21:42.188 DEBG Write :1002 deps:[] res:true
268692023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): fb7bddce-e41d-4045-a067-a43fc478d6a6 Active Active Active
268702023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): Set check for repair
268712023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:47055 task reports connection:true
268722023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): fb7bddce-e41d-4045-a067-a43fc478d6a6 Active Active Active
268732023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): Set check for repair
268742023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): [0] received reconcile message
26875 Sep 22 23:21:42.188 DEBG Write :1003 deps:[] res:true
268762023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
268772023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
268782023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): [1] received reconcile message
268792023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
268802023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
268812023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): [2] received reconcile message
26882 Sep 22 23:21:42.188 DEBG Write :1004 deps:[] res:true
268832023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
268842023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
268852023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): volume 715e89c9-d21c-47a9-83e7-53d748a8ba52 activated ok
268862023-09-22T23:21:42.188ZINFOcrucible-pantry (datafile): volume 715e89c9-d21c-47a9-83e7-53d748a8ba52 constructed and inserted ok
268872023-09-22T23:21:42.188ZINFOcrucible-pantry (dropshot): request completed latency_us = 4874 local_addr = 127.0.0.1:33810 method = POST remote_addr = 127.0.0.1:40528 req_id = 5d552db6-e614-4568-880f-9486fd8ee34d response_code = 200 uri = /crucible/pantry/0/volume/715e89c9-d21c-47a9-83e7-53d748a8ba52
268882023-09-22T23:21:42.189ZINFOcrucible-pantry (dropshot): request completed latency_us = 237 local_addr = 127.0.0.1:33810 method = POST remote_addr = 127.0.0.1:40528 req_id = 62cda30e-1500-4fa5-8745-9d8a4919e931 response_code = 200 uri = /crucible/pantry/0/volume/715e89c9-d21c-47a9-83e7-53d748a8ba52/validate
26889 Sep 22 23:21:42.189 DEBG Write :1005 deps:[] res:true
26890 Sep 22 23:21:42.189 DEBG Write :1006 deps:[] res:true
268912023-09-22T23:21:42.189ZINFOcrucible-pantry (dropshot): request completed latency_us = 263 local_addr = 127.0.0.1:33810 method = GET remote_addr = 127.0.0.1:40528 req_id = 85c1eddb-a341-49f3-aeb1-3b39e9fbd3b8 response_code = 200 uri = /crucible/pantry/0/job/6a3c7f62-15b7-45cc-a4cb-0883cbca910e/is_finished
26892 Sep 22 23:21:42.189 DEBG Write :1007 deps:[] res:true
26893 Sep 22 23:21:42.189 DEBG Write :1008 deps:[] res:true
268942023-09-22T23:21:42.189ZERROcrucible-pantry (datafile): job 6a3c7f62-15b7-45cc-a4cb-0883cbca910e failed with size to validate 100 not divisible by block size 512!
26895 Sep 22 23:21:42.189 DEBG Write :1009 deps:[] res:true
268962023-09-22T23:21:42.190ZINFOcrucible-pantry (dropshot): request completed latency_us = 207 local_addr = 127.0.0.1:33810 method = GET remote_addr = 127.0.0.1:40528 req_id = cf48edb4-754a-4bb7-b4a1-fc11c9b7ce6a response_code = 200 uri = /crucible/pantry/0/job/6a3c7f62-15b7-45cc-a4cb-0883cbca910e/ok
268972023-09-22T23:21:42.190ZINFOcrucible-pantry (datafile): detach removing entry for volume 715e89c9-d21c-47a9-83e7-53d748a8ba52
268982023-09-22T23:21:42.190ZINFOcrucible-pantry (datafile): detaching volume 715e89c9-d21c-47a9-83e7-53d748a8ba52
26899 Sep 22 23:21:42.191 DEBG Flush :1000 extent_limit None deps:[] res:true f:1 g:1
26900 Sep 22 23:21:42.191 DEBG Flush :1000 extent_limit None deps:[] res:true f:1 g:1
26901 Sep 22 23:21:42.191 DEBG Flush :1000 extent_limit None deps:[] res:true f:1 g:1
269022023-09-22T23:21:42.191ZINFOcrucible-pantry (datafile): Request to deactivate this guest
269032023-09-22T23:21:42.191ZINFOcrucible-pantry (datafile): fb7bddce-e41d-4045-a067-a43fc478d6a6 set deactivating.
269042023-09-22T23:21:42.191ZINFOcrucible-pantry (dropshot): request completed latency_us = 1309 local_addr = 127.0.0.1:33810 method = DELETE remote_addr = 127.0.0.1:40528 req_id = 189a4809-c0c9-496a-8105-12b798a256ca response_code = 204 uri = /crucible/pantry/0/volume/715e89c9-d21c-47a9-83e7-53d748a8ba52
26905 Sep 22 23:21:42.193 DEBG Write :1001 deps:[] res:true
26906 Sep 22 23:21:42.193 DEBG Write :1002 deps:[] res:true
26907 Sep 22 23:21:42.193 DEBG Write :1003 deps:[] res:true
26908 Sep 22 23:21:42.193 DEBG Write :1004 deps:[] res:true
26909 Sep 22 23:21:42.194 DEBG Write :1005 deps:[] res:true
26910 Sep 22 23:21:42.194 DEBG Write :1006 deps:[] res:true
26911 Sep 22 23:21:42.194 DEBG Write :1007 deps:[] res:true
26912 Sep 22 23:21:42.195 DEBG Write :1008 deps:[] res:true
26913 Sep 22 23:21:42.195 DEBG Write :1009 deps:[] res:true
26914 Sep 22 23:21:42.195 DEBG Write :1001 deps:[] res:true
26915 Sep 22 23:21:42.195 DEBG Write :1002 deps:[] res:true
26916 Sep 22 23:21:42.196 DEBG Write :1003 deps:[] res:true
26917 test test::test_pantry_validate_fail ... ok
26918 Sep 22 23:21:42.196 DEBG Write :1004 deps:[] res:true
26919 Sep 22 23:21:42.196 INFO test_volume_replace of a volume
26920 Sep 22 23:21:42.196 INFO current number of open files limit 65536 is already the maximum
26921 Sep 22 23:21:42.196 INFO Created new region file "/tmp/downstairs-KHkahjo0/region.json"
26922 Sep 22 23:21:42.196 DEBG Write :1005 deps:[] res:true
26923 Sep 22 23:21:42.197 DEBG Write :1006 deps:[] res:true
26924 Sep 22 23:21:42.197 DEBG Write :1007 deps:[] res:true
26925 Sep 22 23:21:42.197 DEBG Write :1008 deps:[] res:true
26926 Sep 22 23:21:42.197 DEBG IO Write 1000 has deps []
26927 Sep 22 23:21:42.197 DEBG Write :1009 deps:[] res:true
26928 Sep 22 23:21:42.197 DEBG IO Flush 1001 has deps [JobId(1000)]
26929 Sep 22 23:21:42.198 DEBG up_ds_listen was notified
26930 Sep 22 23:21:42.198 DEBG up_ds_listen process 1000
26931 Sep 22 23:21:42.198 DEBG [A] ack job 1000:1, : downstairs
26932 Sep 22 23:21:42.198 DEBG up_ds_listen checked 1 jobs, back to waiting
26933 Sep 22 23:21:42.199 INFO current number of open files limit 65536 is already the maximum
26934 Sep 22 23:21:42.199 INFO Opened existing region file "/tmp/downstairs-KHkahjo0/region.json"
26935 Sep 22 23:21:42.199 INFO Database read version 1
26936 Sep 22 23:21:42.199 INFO Database write version 1
26937 Sep 22 23:21:42.200 DEBG Read :1010 deps:[JobId(1000)] res:true
26938 Sep 22 23:21:42.200 DEBG Read :1010 deps:[JobId(1000)] res:true
26939 Sep 22 23:21:42.200 INFO UUID: 502a3035-412d-4e19-aa17-914c9ad2cb18
26940 Sep 22 23:21:42.200 INFO Blocks per extent:5 Total Extents: 2
26941 Sep 22 23:21:42.200 DEBG Read :1010 deps:[JobId(1000)] res:true
26942 Sep 22 23:21:42.200 INFO Crucible Version: Crucible Version: 0.0.1
26943 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26944 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26945 rustc: 1.70.0 stable x86_64-unknown-illumos
26946 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26947 Sep 22 23:21:42.200 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26948 Sep 22 23:21:42.200 INFO Using address: 127.0.0.1:54814, task: main
26949 Sep 22 23:21:42.201 INFO Repair listens on 127.0.0.1:0, task: repair
26950 Sep 22 23:21:42.201 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:48782, task: repair
26951 Sep 22 23:21:42.201 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:48782, task: repair
26952 Sep 22 23:21:42.201 INFO listening, local_addr: 127.0.0.1:48782, task: repair
26953 Sep 22 23:21:42.201 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:48782, task: repair
26954 Sep 22 23:21:42.201 INFO Using repair address: 127.0.0.1:48782, task: main
26955 Sep 22 23:21:42.201 INFO No SSL acceptor configured, task: main
26956 Sep 22 23:21:42.201 INFO current number of open files limit 65536 is already the maximum
26957 Sep 22 23:21:42.201 INFO Created new region file "/tmp/downstairs-PQIky7CE/region.json"
269582023-09-22T23:21:42.202ZINFOcrucible-pantry (dropshot): request completed latency_us = 3736 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:49934 req_id = 4a927f49-cc52-4f9b-a198-e5a53914618d response_code = 200 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_read
26959 Sep 22 23:21:42.204 DEBG Read :1011 deps:[JobId(1001)] res:true
26960 Sep 22 23:21:42.204 INFO current number of open files limit 65536 is already the maximum
26961 Sep 22 23:21:42.204 INFO Opened existing region file "/tmp/downstairs-PQIky7CE/region.json"
26962 Sep 22 23:21:42.204 INFO Database read version 1
26963 Sep 22 23:21:42.204 INFO Database write version 1
26964 Sep 22 23:21:42.205 DEBG Read :1011 deps:[JobId(1001)] res:true
26965 Sep 22 23:21:42.205 DEBG Read :1011 deps:[JobId(1001)] res:true
26966 Sep 22 23:21:42.205 INFO UUID: e3e919ab-aa67-4c3a-95f0-261ab63bbc0d
26967 Sep 22 23:21:42.205 INFO Blocks per extent:5 Total Extents: 2
26968 Sep 22 23:21:42.205 INFO Crucible Version: Crucible Version: 0.0.1
26969 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26970 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26971 rustc: 1.70.0 stable x86_64-unknown-illumos
26972 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26973 Sep 22 23:21:42.205 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26974 Sep 22 23:21:42.205 INFO Using address: 127.0.0.1:38745, task: main
26975 Sep 22 23:21:42.206 INFO Repair listens on 127.0.0.1:0, task: repair
26976 Sep 22 23:21:42.206 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:34887, task: repair
26977 Sep 22 23:21:42.206 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:34887, task: repair
26978 Sep 22 23:21:42.206 INFO listening, local_addr: 127.0.0.1:34887, task: repair
26979 Sep 22 23:21:42.206 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:34887, task: repair
26980 Sep 22 23:21:42.206 INFO Using repair address: 127.0.0.1:34887, task: main
26981 Sep 22 23:21:42.206 INFO No SSL acceptor configured, task: main
26982 Sep 22 23:21:42.206 INFO current number of open files limit 65536 is already the maximum
26983 Sep 22 23:21:42.206 INFO Created new region file "/tmp/downstairs-1gPjNQ1p/region.json"
269842023-09-22T23:21:42.207ZINFOcrucible-pantry (dropshot): request completed latency_us = 3543 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:49934 req_id = 35d58403-e2df-4252-9c72-95bab026aa9d response_code = 200 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_read
26985 Sep 22 23:21:42.209 DEBG Read :1012 deps:[JobId(1002)] res:true
26986 Sep 22 23:21:42.209 DEBG Read :1012 deps:[JobId(1002)] res:true
26987 Sep 22 23:21:42.209 DEBG Read :1012 deps:[JobId(1002)] res:true
26988 Sep 22 23:21:42.209 INFO current number of open files limit 65536 is already the maximum
26989 Sep 22 23:21:42.210 INFO Opened existing region file "/tmp/downstairs-1gPjNQ1p/region.json"
26990 Sep 22 23:21:42.210 INFO Database read version 1
26991 Sep 22 23:21:42.210 INFO Database write version 1
26992 Sep 22 23:21:42.210 INFO UUID: 893fcb30-a534-4c9f-9b0b-98301d815098
26993 Sep 22 23:21:42.210 INFO Blocks per extent:5 Total Extents: 2
26994 Sep 22 23:21:42.210 INFO Crucible Version: Crucible Version: 0.0.1
26995 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26996 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26997 rustc: 1.70.0 stable x86_64-unknown-illumos
26998 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26999 Sep 22 23:21:42.210 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27000 Sep 22 23:21:42.211 INFO Using address: 127.0.0.1:41243, task: main
27001 Sep 22 23:21:42.211 INFO Repair listens on 127.0.0.1:0, task: repair
27002 Sep 22 23:21:42.211 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58505, task: repair
27003 Sep 22 23:21:42.211 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58505, task: repair
27004 Sep 22 23:21:42.211 INFO listening, local_addr: 127.0.0.1:58505, task: repair
270052023-09-22T23:21:42.211ZINFOcrucible-pantry (dropshot): request completed latency_us = 3682 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:49934 req_id = f593d9ca-b621-414d-9f96-7e3a565f6e48 response_code = 200 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_read
27006 Sep 22 23:21:42.211 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58505, task: repair
27007 Sep 22 23:21:42.211 INFO Using repair address: 127.0.0.1:58505, task: main
27008 Sep 22 23:21:42.211 INFO No SSL acceptor configured, task: main
27009 Sep 22 23:21:42.212 INFO Upstairs starts
27010 Sep 22 23:21:42.212 INFO Crucible Version: BuildInfo {
27011 version: "0.0.1",
27012 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
27013 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
27014 git_branch: "main",
27015 rustc_semver: "1.70.0",
27016 rustc_channel: "stable",
27017 rustc_host_triple: "x86_64-unknown-illumos",
27018 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
27019 cargo_triple: "x86_64-unknown-illumos",
27020 debug: true,
27021 opt_level: 0,
27022 }
27023 Sep 22 23:21:42.212 INFO Upstairs <-> Downstairs Message Version: 4
27024 Sep 22 23:21:42.212 INFO Crucible stats registered with UUID: ad483994-9168-4721-bea0-26e15aed46eb
27025 Sep 22 23:21:42.212 INFO Crucible ad483994-9168-4721-bea0-26e15aed46eb has session id: 7829b344-35e9-4075-8db0-a4e3ccd0147f
27026 Sep 22 23:21:42.212 INFO listening on 127.0.0.1:0, task: main
27027 Sep 22 23:21:42.212 INFO listening on 127.0.0.1:0, task: main
27028 Sep 22 23:21:42.212 INFO listening on 127.0.0.1:0, task: main
27029 Sep 22 23:21:42.212 INFO [0] connecting to 127.0.0.1:54814, looper: 0
27030 Sep 22 23:21:42.212 INFO [1] connecting to 127.0.0.1:38745, looper: 1
27031 Sep 22 23:21:42.212 INFO [2] connecting to 127.0.0.1:41243, looper: 2
27032 Sep 22 23:21:42.212 INFO up_listen starts, task: up_listen
27033 Sep 22 23:21:42.212 INFO Wait for all three downstairs to come online
27034 Sep 22 23:21:42.213 INFO Flush timeout: 0.5
27035 Sep 22 23:21:42.213 INFO [0] ad483994-9168-4721-bea0-26e15aed46eb looper connected, looper: 0
27036 Sep 22 23:21:42.213 INFO [0] Proc runs for 127.0.0.1:54814 in state New
27037 Sep 22 23:21:42.213 INFO accepted connection from 127.0.0.1:55920, task: main
27038 Sep 22 23:21:42.213 INFO [1] ad483994-9168-4721-bea0-26e15aed46eb looper connected, looper: 1
27039 Sep 22 23:21:42.213 INFO [1] Proc runs for 127.0.0.1:38745 in state New
27040 Sep 22 23:21:42.213 DEBG Read :1013 deps:[JobId(1003)] res:true
27041 Sep 22 23:21:42.213 INFO accepted connection from 127.0.0.1:37381, task: main
27042 Sep 22 23:21:42.213 INFO [2] ad483994-9168-4721-bea0-26e15aed46eb looper connected, looper: 2
27043 Sep 22 23:21:42.213 INFO [2] Proc runs for 127.0.0.1:41243 in state New
27044 Sep 22 23:21:42.213 INFO accepted connection from 127.0.0.1:37150, task: main
27045 Sep 22 23:21:42.213 DEBG Read :1013 deps:[JobId(1003)] res:true
27046 Sep 22 23:21:42.213 DEBG Read :1013 deps:[JobId(1003)] res:true
27047 Sep 22 23:21:42.213 INFO Connection request from ad483994-9168-4721-bea0-26e15aed46eb with version 4, task: proc
27048 Sep 22 23:21:42.213 INFO upstairs UpstairsConnection { upstairs_id: ad483994-9168-4721-bea0-26e15aed46eb, session_id: d1488151-3064-4eea-a301-ac082b13982b, gen: 2 } connected, version 4, task: proc
27049 Sep 22 23:21:42.214 INFO Connection request from ad483994-9168-4721-bea0-26e15aed46eb with version 4, task: proc
27050 Sep 22 23:21:42.214 INFO upstairs UpstairsConnection { upstairs_id: ad483994-9168-4721-bea0-26e15aed46eb, session_id: d1488151-3064-4eea-a301-ac082b13982b, gen: 2 } connected, version 4, task: proc
27051 Sep 22 23:21:42.214 INFO Connection request from ad483994-9168-4721-bea0-26e15aed46eb with version 4, task: proc
27052 Sep 22 23:21:42.214 INFO upstairs UpstairsConnection { upstairs_id: ad483994-9168-4721-bea0-26e15aed46eb, session_id: d1488151-3064-4eea-a301-ac082b13982b, gen: 2 } connected, version 4, task: proc
27053 Sep 22 23:21:42.214 INFO [0] ad483994-9168-4721-bea0-26e15aed46eb (d1488151-3064-4eea-a301-ac082b13982b) New New New ds_transition to WaitActive
27054 Sep 22 23:21:42.214 INFO [0] Transition from New to WaitActive
27055 Sep 22 23:21:42.214 INFO [1] ad483994-9168-4721-bea0-26e15aed46eb (d1488151-3064-4eea-a301-ac082b13982b) WaitActive New New ds_transition to WaitActive
27056 Sep 22 23:21:42.214 INFO [1] Transition from New to WaitActive
27057 Sep 22 23:21:42.214 INFO [2] ad483994-9168-4721-bea0-26e15aed46eb (d1488151-3064-4eea-a301-ac082b13982b) WaitActive WaitActive New ds_transition to WaitActive
27058 Sep 22 23:21:42.214 INFO [2] Transition from New to WaitActive
27059 The guest has requested activation
27060 Sep 22 23:21:42.214 INFO ad483994-9168-4721-bea0-26e15aed46eb active request set
27061 Sep 22 23:21:42.214 INFO [0] received activate with gen 2
27062 Sep 22 23:21:42.214 INFO [0] client got ds_active_rx, promote! session d1488151-3064-4eea-a301-ac082b13982b
27063 Sep 22 23:21:42.214 INFO [1] received activate with gen 2
27064 Sep 22 23:21:42.214 INFO [1] client got ds_active_rx, promote! session d1488151-3064-4eea-a301-ac082b13982b
27065 Sep 22 23:21:42.214 INFO [2] received activate with gen 2
27066 Sep 22 23:21:42.214 INFO [2] client got ds_active_rx, promote! session d1488151-3064-4eea-a301-ac082b13982b
27067 Sep 22 23:21:42.215 INFO UpstairsConnection { upstairs_id: ad483994-9168-4721-bea0-26e15aed46eb, session_id: d1488151-3064-4eea-a301-ac082b13982b, gen: 2 } is now active (read-write)
27068 Sep 22 23:21:42.215 INFO UpstairsConnection { upstairs_id: ad483994-9168-4721-bea0-26e15aed46eb, session_id: d1488151-3064-4eea-a301-ac082b13982b, gen: 2 } is now active (read-write)
27069 Sep 22 23:21:42.215 INFO UpstairsConnection { upstairs_id: ad483994-9168-4721-bea0-26e15aed46eb, session_id: d1488151-3064-4eea-a301-ac082b13982b, gen: 2 } is now active (read-write)
27070 Sep 22 23:21:42.215 INFO [0] downstairs client at 127.0.0.1:54814 has UUID 502a3035-412d-4e19-aa17-914c9ad2cb18
27071 Sep 22 23:21:42.215 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 502a3035-412d-4e19-aa17-914c9ad2cb18, encrypted: true, database_read_version: 1, database_write_version: 1 }
27072 Sep 22 23:21:42.215 INFO ad483994-9168-4721-bea0-26e15aed46eb WaitActive WaitActive WaitActive
27073 Sep 22 23:21:42.215 INFO [1] downstairs client at 127.0.0.1:38745 has UUID e3e919ab-aa67-4c3a-95f0-261ab63bbc0d
27074 Sep 22 23:21:42.215 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e3e919ab-aa67-4c3a-95f0-261ab63bbc0d, encrypted: true, database_read_version: 1, database_write_version: 1 }
270752023-09-22T23:21:42.215ZINFOcrucible-pantry (dropshot): request completed latency_us = 3592 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:49934 req_id = 54450ad8-664d-442c-b8d7-98e498a485d4 response_code = 200 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_read
27076 Sep 22 23:21:42.216 INFO ad483994-9168-4721-bea0-26e15aed46eb WaitActive WaitActive WaitActive
27077 Sep 22 23:21:42.216 INFO [2] downstairs client at 127.0.0.1:41243 has UUID 893fcb30-a534-4c9f-9b0b-98301d815098
27078 Sep 22 23:21:42.216 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 893fcb30-a534-4c9f-9b0b-98301d815098, encrypted: true, database_read_version: 1, database_write_version: 1 }
27079 Sep 22 23:21:42.216 INFO ad483994-9168-4721-bea0-26e15aed46eb WaitActive WaitActive WaitActive
27080 Sep 22 23:21:42.216 INFO Current flush_numbers [0..12]: [0, 0]
27081 Sep 22 23:21:42.216 INFO Downstairs has completed Negotiation, task: proc
27082 Sep 22 23:21:42.216 INFO Current flush_numbers [0..12]: [0, 0]
27083 Sep 22 23:21:42.217 INFO Downstairs has completed Negotiation, task: proc
27084 Sep 22 23:21:42.217 INFO Current flush_numbers [0..12]: [0, 0]
27085 Sep 22 23:21:42.217 INFO Downstairs has completed Negotiation, task: proc
27086 Sep 22 23:21:42.217 INFO [0] ad483994-9168-4721-bea0-26e15aed46eb (d1488151-3064-4eea-a301-ac082b13982b) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
27087 Sep 22 23:21:42.217 INFO [0] Transition from WaitActive to WaitQuorum
27088 Sep 22 23:21:42.217 WARN [0] new RM replaced this: None
27089 Sep 22 23:21:42.217 INFO [0] Starts reconcile loop
27090 Sep 22 23:21:42.217 DEBG Read :1014 deps:[JobId(1004)] res:true
27091 Sep 22 23:21:42.217 INFO [1] ad483994-9168-4721-bea0-26e15aed46eb (d1488151-3064-4eea-a301-ac082b13982b) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
27092 Sep 22 23:21:42.217 INFO [1] Transition from WaitActive to WaitQuorum
27093 Sep 22 23:21:42.217 WARN [1] new RM replaced this: None
27094 Sep 22 23:21:42.217 INFO [1] Starts reconcile loop
27095 Sep 22 23:21:42.218 DEBG Read :1014 deps:[JobId(1004)] res:true
27096 Sep 22 23:21:42.218 INFO [2] ad483994-9168-4721-bea0-26e15aed46eb (d1488151-3064-4eea-a301-ac082b13982b) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
27097 Sep 22 23:21:42.218 INFO [2] Transition from WaitActive to WaitQuorum
27098 Sep 22 23:21:42.218 WARN [2] new RM replaced this: None
27099 Sep 22 23:21:42.218 INFO [2] Starts reconcile loop
27100 Sep 22 23:21:42.218 INFO [0] 127.0.0.1:54814 task reports connection:true
27101 Sep 22 23:21:42.218 INFO ad483994-9168-4721-bea0-26e15aed46eb WaitQuorum WaitQuorum WaitQuorum
27102 Sep 22 23:21:42.218 DEBG Read :1014 deps:[JobId(1004)] res:true
27103 Sep 22 23:21:42.218 INFO [0]R flush_numbers: [0, 0]
27104 Sep 22 23:21:42.218 INFO [0]R generation: [0, 0]
27105 Sep 22 23:21:42.218 INFO [0]R dirty: [false, false]
27106 Sep 22 23:21:42.218 INFO [1]R flush_numbers: [0, 0]
27107 Sep 22 23:21:42.218 INFO [1]R generation: [0, 0]
27108 Sep 22 23:21:42.218 INFO [1]R dirty: [false, false]
27109 Sep 22 23:21:42.218 INFO [2]R flush_numbers: [0, 0]
27110 Sep 22 23:21:42.218 INFO [2]R generation: [0, 0]
27111 Sep 22 23:21:42.218 INFO [2]R dirty: [false, false]
27112 Sep 22 23:21:42.218 INFO Max found gen is 1
27113 Sep 22 23:21:42.218 INFO Generation requested: 2 >= found:1
27114 Sep 22 23:21:42.218 INFO Next flush: 1
27115 Sep 22 23:21:42.218 INFO All extents match
27116 Sep 22 23:21:42.218 INFO No downstairs repair required
27117 Sep 22 23:21:42.218 INFO No initial repair work was required
27118 Sep 22 23:21:42.218 INFO Set Downstairs and Upstairs active
27119 Sep 22 23:21:42.218 INFO ad483994-9168-4721-bea0-26e15aed46eb is now active with session: d1488151-3064-4eea-a301-ac082b13982b
27120 Sep 22 23:21:42.218 INFO ad483994-9168-4721-bea0-26e15aed46eb Set Active after no repair
27121 Sep 22 23:21:42.218 INFO Notify all downstairs, region set compare is done.
27122 Sep 22 23:21:42.218 INFO Set check for repair
27123 Sep 22 23:21:42.218 INFO [1] 127.0.0.1:38745 task reports connection:true
27124 Sep 22 23:21:42.218 INFO ad483994-9168-4721-bea0-26e15aed46eb Active Active Active
27125 Sep 22 23:21:42.218 INFO Set check for repair
27126 Sep 22 23:21:42.218 INFO [2] 127.0.0.1:41243 task reports connection:true
27127 Sep 22 23:21:42.218 INFO ad483994-9168-4721-bea0-26e15aed46eb Active Active Active
27128 Sep 22 23:21:42.218 INFO Set check for repair
27129 Sep 22 23:21:42.218 INFO [0] received reconcile message
27130 Sep 22 23:21:42.218 INFO [0] All repairs completed, exit
27131 Sep 22 23:21:42.218 INFO [0] Starts cmd_loop
27132 Sep 22 23:21:42.218 INFO [1] received reconcile message
27133 Sep 22 23:21:42.218 INFO [1] All repairs completed, exit
27134 Sep 22 23:21:42.219 INFO [1] Starts cmd_loop
27135 Sep 22 23:21:42.219 INFO [2] received reconcile message
27136 Sep 22 23:21:42.219 INFO [2] All repairs completed, exit
27137 Sep 22 23:21:42.219 INFO [2] Starts cmd_loop
27138 The guest has finished waiting for activation
271392023-09-22T23:21:42.220ZINFOcrucible-pantry (dropshot): request completed latency_us = 3488 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:49934 req_id = d097b876-3f26-4331-853c-4e48326b1ca4 response_code = 200 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_read
27140 Sep 22 23:21:42.222 DEBG Read :1015 deps:[JobId(1005)] res:true
27141 Sep 22 23:21:42.222 DEBG IO Write 1000 has deps []
27142 Sep 22 23:21:42.222 DEBG Read :1015 deps:[JobId(1005)] res:true
27143 Sep 22 23:21:42.222 DEBG up_ds_listen was notified
27144 Sep 22 23:21:42.222 DEBG up_ds_listen process 1000
27145 Sep 22 23:21:42.222 DEBG [A] ack job 1000:1, : downstairs
27146 Sep 22 23:21:42.222 DEBG up_ds_listen checked 1 jobs, back to waiting
27147 Sep 22 23:21:42.222 DEBG Read :1015 deps:[JobId(1005)] res:true
271482023-09-22T23:21:42.224ZINFOcrucible-pantry (dropshot): request completed latency_us = 3551 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:49934 req_id = a1c2f725-bacf-406d-b89f-906c252fa284 response_code = 200 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_read
27149 Sep 22 23:21:42.225 DEBG Write :1000 deps:[] res:true
27150 Sep 22 23:21:42.226 DEBG Read :1016 deps:[JobId(1006)] res:true
27151 Sep 22 23:21:42.226 DEBG Read :1016 deps:[JobId(1006)] res:true
27152 Sep 22 23:21:42.226 DEBG Write :1000 deps:[] res:true
27153 Sep 22 23:21:42.226 DEBG Read :1016 deps:[JobId(1006)] res:true
27154 Sep 22 23:21:42.227 DEBG Write :1000 deps:[] res:true
27155 Sep 22 23:21:42.227 DEBG IO Read 1001 has deps [JobId(1000)]
271562023-09-22T23:21:42.228ZINFOcrucible-pantry (dropshot): request completed latency_us = 3481 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:49934 req_id = 82713f9b-af71-4ed9-989c-1283c6c76fe1 response_code = 200 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_read
27157 Sep 22 23:21:42.228 DEBG Read :1001 deps:[JobId(1000)] res:true
27158 Sep 22 23:21:42.229 DEBG Read :1001 deps:[JobId(1000)] res:true
27159 Sep 22 23:21:42.229 DEBG Read :1001 deps:[JobId(1000)] res:true
27160 Sep 22 23:21:42.230 DEBG Read :1017 deps:[JobId(1007)] res:true
27161 Sep 22 23:21:42.230 DEBG Read :1017 deps:[JobId(1007)] res:true
27162 Sep 22 23:21:42.230 DEBG Read :1017 deps:[JobId(1007)] res:true
271632023-09-22T23:21:42.232ZINFOcrucible-pantry (dropshot): request completed latency_us = 3393 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:49934 req_id = a1461ff2-a02c-4522-8ffe-252fd394f1c8 response_code = 200 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_read
27164 Sep 22 23:21:42.234 DEBG [0] Read AckReady 1001, : downstairs
27165 Sep 22 23:21:42.234 DEBG Read :1018 deps:[JobId(1008)] res:true
27166 Sep 22 23:21:42.234 DEBG Read :1018 deps:[JobId(1008)] res:true
27167 Sep 22 23:21:42.234 DEBG Read :1018 deps:[JobId(1008)] res:true
271682023-09-22T23:21:42.236ZINFOcrucible-pantry (dropshot): request completed latency_us = 3362 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:49934 req_id = 235f255c-5c74-44be-b926-e163ec05c50c response_code = 200 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_read
27169 Sep 22 23:21:42.236 DEBG [1] Read already AckReady 1001, : downstairs
27170 Sep 22 23:21:42.238 DEBG Read :1019 deps:[JobId(1009)] res:true
27171 Sep 22 23:21:42.238 DEBG Read :1019 deps:[JobId(1009)] res:true
27172 Sep 22 23:21:42.238 DEBG Read :1019 deps:[JobId(1009)] res:true
27173 Sep 22 23:21:42.239 DEBG [2] Read already AckReady 1001, : downstairs
27174 Sep 22 23:21:42.239 DEBG up_ds_listen was notified
27175 Sep 22 23:21:42.239 DEBG up_ds_listen process 1001
27176 Sep 22 23:21:42.239 DEBG [A] ack job 1001:2, : downstairs
27177 Sep 22 23:21:42.240 DEBG up_ds_listen checked 1 jobs, back to waiting
27178 Sep 22 23:21:42.240 INFO current number of open files limit 65536 is already the maximum
27179 Sep 22 23:21:42.240 INFO Created new region file "/tmp/downstairs-kGoDqY3e/region.json"
271802023-09-22T23:21:42.240ZINFOcrucible-pantry (dropshot): request completed latency_us = 3370 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:49934 req_id = 4c107f3e-b314-4820-b08c-447a8b7ab499 response_code = 200 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/bulk_read
271812023-09-22T23:21:42.242ZINFOcrucible-pantry (dropshot): request completed latency_us = 321 local_addr = 127.0.0.1:63226 method = POST remote_addr = 127.0.0.1:49934 req_id = 25fd763b-07c8-431b-bbd6-fe4175534a9e response_code = 200 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434/validate
27182 Sep 22 23:21:42.244 INFO current number of open files limit 65536 is already the maximum
27183 Sep 22 23:21:42.244 INFO Opened existing region file "/tmp/downstairs-kGoDqY3e/region.json"
27184 Sep 22 23:21:42.244 INFO Database read version 1
27185 Sep 22 23:21:42.244 INFO Database write version 1
27186 Sep 22 23:21:42.244 DEBG Read :1020 deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
27187 Sep 22 23:21:42.244 DEBG Read :1020 deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
27188 Sep 22 23:21:42.244 INFO UUID: 9425657f-af69-47cb-8591-774480ba4038
27189 Sep 22 23:21:42.244 INFO Blocks per extent:5 Total Extents: 2
27190 Sep 22 23:21:42.245 DEBG Read :1020 deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
27191 Sep 22 23:21:42.245 INFO Crucible Version: Crucible Version: 0.0.1
27192 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
27193 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
27194 rustc: 1.70.0 stable x86_64-unknown-illumos
27195 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
27196 Sep 22 23:21:42.245 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27197 Sep 22 23:21:42.245 INFO Using address: 127.0.0.1:64531, task: main
27198 Sep 22 23:21:42.245 INFO Repair listens on 127.0.0.1:0, task: repair
27199 Sep 22 23:21:42.245 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:35080, task: repair
27200 Sep 22 23:21:42.245 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:35080, task: repair
27201 Sep 22 23:21:42.245 INFO listening, local_addr: 127.0.0.1:35080, task: repair
27202 Sep 22 23:21:42.245 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:35080, task: repair
27203 Sep 22 23:21:42.245 INFO Using repair address: 127.0.0.1:35080, task: main
27204 Sep 22 23:21:42.246 INFO No SSL acceptor configured, task: main
27205 Sep 22 23:21:42.246 INFO A New downstairs: 127.0.0.1:64531
27206 Sep 22 23:21:42.246 INFO Old ops target: [127.0.0.1:54814, 127.0.0.1:38745, 127.0.0.1:41243]
27207 Sep 22 23:21:42.246 INFO New ops target: [127.0.0.1:64531, 127.0.0.1:38745, 127.0.0.1:41243]
27208 Sep 22 23:21:42.246 INFO Replace VCR now: Volume { id: f58b3665-bdc0-4be9-8cf4-eefa2c09c427, block_size: 512, sub_volumes: [Region { block_size: 512, blocks_per_extent: 5, extent_count: 2, opts: CrucibleOpts { id: ad483994-9168-4721-bea0-26e15aed46eb, target: [127.0.0.1:64531, 127.0.0.1:38745, 127.0.0.1:41243], lossy: false, flush_timeout: None, key: Some("dEINxGY0b3GK6upU+NElp3abG+t59SMWQZT7RPtIozo="), cert_pem: None, key_pem: None, root_cert_pem: None, control: None, read_only: false }, gen: 3 }], read_only_parent: None }
27209 Sep 22 23:21:42.246 INFO Volume f58b3665-bdc0-4be9-8cf4-eefa2c09c427, OK to replace: 127.0.0.1:54814 with 127.0.0.1:64531
27210 Sep 22 23:21:42.246 INFO listening on 127.0.0.1:0, task: main
27211 Sep 22 23:21:42.246 WARN f58b3665-bdc0-4be9-8cf4-eefa2c09c427 request to replace downstairs 127.0.0.1:54814 with 127.0.0.1:64531
27212 Sep 22 23:21:42.246 INFO f58b3665-bdc0-4be9-8cf4-eefa2c09c427 found old target: 127.0.0.1:54814 at 0
27213 Sep 22 23:21:42.246 INFO f58b3665-bdc0-4be9-8cf4-eefa2c09c427 replacing old: 127.0.0.1:54814 at 0
27214 Sep 22 23:21:42.246 INFO [0] client skip 2 in process jobs because fault, : downstairs
27215 Sep 22 23:21:42.246 INFO [0] changed 0 jobs to fault skipped, : downstairs
27216 Sep 22 23:21:42.246 INFO [0] ad483994-9168-4721-bea0-26e15aed46eb (d1488151-3064-4eea-a301-ac082b13982b) Active Active Active ds_transition to Replacing
27217 Sep 22 23:21:42.246 INFO [0] Transition from Active to Replacing
27218 Sep 22 23:21:42.246 INFO Replace downstairs underway for f58b3665-bdc0-4be9-8cf4-eefa2c09c427
27219 Sep 22 23:21:42.246 INFO send read now
27220 Sep 22 23:21:42.246 INFO listening on 127.0.0.1:0, task: main
27221 Sep 22 23:21:42.246 INFO listening on 127.0.0.1:0, task: main
27222 Sep 22 23:21:42.246 DEBG IO Read 1002 has deps [JobId(1000)]
27223 Sep 22 23:21:42.247 INFO listening on 127.0.0.1:0, task: main
272242023-09-22T23:21:42.247ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:59371 remote_addr = 127.0.0.1:39338
27225 Sep 22 23:21:42.247 DEBG Read :1002 deps:[JobId(1000)] res:true
272262023-09-22T23:21:42.248ZINFOcrucible-pantry (datafile): no entry exists for volume 52ed3973-f354-40ba-a6f9-b5522a98667e, constructing...
27227 Sep 22 23:21:42.248 DEBG Read :1002 deps:[JobId(1000)] res:true
272282023-09-22T23:21:42.248ZINFOcrucible-pantry (datafile): Upstairs starts
272292023-09-22T23:21:42.248ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
272302023-09-22T23:21:42.248ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
272312023-09-22T23:21:42.248ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 81fb9c00-5d21-4891-b458-1cfd5eccef89
272322023-09-22T23:21:42.248ZINFOcrucible-pantry (datafile): Crucible 81fb9c00-5d21-4891-b458-1cfd5eccef89 has session id: 4ce233b0-59d9-436c-93c7-f67280b0f01b
272332023-09-22T23:21:42.248ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:49499 looper = 0
272342023-09-22T23:21:42.248ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:56926 looper = 1
272352023-09-22T23:21:42.248ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:46734 looper = 2
272362023-09-22T23:21:42.248ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
272372023-09-22T23:21:42.249ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
272382023-09-22T23:21:42.249ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
272392023-09-22T23:21:42.249ZINFOcrucible-pantry (datafile): volume 52ed3973-f354-40ba-a6f9-b5522a98667e constructed ok
27240 The guest has requested activation
272412023-09-22T23:21:42.249ZINFOcrucible-pantry (datafile): 81fb9c00-5d21-4891-b458-1cfd5eccef89 active request set
27242 Sep 22 23:21:42.249 INFO accepted connection from 127.0.0.1:33803, task: main
27243 Sep 22 23:21:42.249 INFO accepted connection from 127.0.0.1:56045, task: main
27244 Sep 22 23:21:42.249 INFO accepted connection from 127.0.0.1:47897, task: main
272452023-09-22T23:21:42.249ZINFOcrucible-pantry (datafile): [0] 81fb9c00-5d21-4891-b458-1cfd5eccef89 looper connected looper = 0
272462023-09-22T23:21:42.249ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:49499 in state New
272472023-09-22T23:21:42.249ZINFOcrucible-pantry (datafile): [1] 81fb9c00-5d21-4891-b458-1cfd5eccef89 looper connected looper = 1
272482023-09-22T23:21:42.249ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:56926 in state New
272492023-09-22T23:21:42.250ZINFOcrucible-pantry (datafile): [2] 81fb9c00-5d21-4891-b458-1cfd5eccef89 looper connected looper = 2
272502023-09-22T23:21:42.250ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:46734 in state New
27251 Sep 22 23:21:42.250 INFO Connection request from 81fb9c00-5d21-4891-b458-1cfd5eccef89 with version 4, task: proc
27252 Sep 22 23:21:42.250 INFO upstairs UpstairsConnection { upstairs_id: 81fb9c00-5d21-4891-b458-1cfd5eccef89, session_id: 09d4dd63-aea1-4a34-834b-c41ae295a586, gen: 1 } connected, version 4, task: proc
27253 Sep 22 23:21:42.250 INFO Connection request from 81fb9c00-5d21-4891-b458-1cfd5eccef89 with version 4, task: proc
27254 Sep 22 23:21:42.250 INFO upstairs UpstairsConnection { upstairs_id: 81fb9c00-5d21-4891-b458-1cfd5eccef89, session_id: 09d4dd63-aea1-4a34-834b-c41ae295a586, gen: 1 } connected, version 4, task: proc
27255 Sep 22 23:21:42.250 INFO Connection request from 81fb9c00-5d21-4891-b458-1cfd5eccef89 with version 4, task: proc
27256 Sep 22 23:21:42.250 INFO upstairs UpstairsConnection { upstairs_id: 81fb9c00-5d21-4891-b458-1cfd5eccef89, session_id: 09d4dd63-aea1-4a34-834b-c41ae295a586, gen: 1 } connected, version 4, task: proc
272572023-09-22T23:21:42.250ZINFOcrucible-pantry (datafile): [0] 81fb9c00-5d21-4891-b458-1cfd5eccef89 (09d4dd63-aea1-4a34-834b-c41ae295a586) New New New ds_transition to WaitActive
272582023-09-22T23:21:42.250ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
272592023-09-22T23:21:42.250ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 09d4dd63-aea1-4a34-834b-c41ae295a586
272602023-09-22T23:21:42.250ZINFOcrucible-pantry (datafile): [1] 81fb9c00-5d21-4891-b458-1cfd5eccef89 (09d4dd63-aea1-4a34-834b-c41ae295a586) WaitActive New New ds_transition to WaitActive
272612023-09-22T23:21:42.250ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
272622023-09-22T23:21:42.250ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 09d4dd63-aea1-4a34-834b-c41ae295a586
27263 Sep 22 23:21:42.250 INFO UpstairsConnection { upstairs_id: 81fb9c00-5d21-4891-b458-1cfd5eccef89, session_id: 09d4dd63-aea1-4a34-834b-c41ae295a586, gen: 1 } is now active (read-write)
272642023-09-22T23:21:42.250ZINFOcrucible-pantry (datafile): [2] 81fb9c00-5d21-4891-b458-1cfd5eccef89 (09d4dd63-aea1-4a34-834b-c41ae295a586) WaitActive WaitActive New ds_transition to WaitActive
272652023-09-22T23:21:42.250ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
272662023-09-22T23:21:42.250ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 09d4dd63-aea1-4a34-834b-c41ae295a586
27267 Sep 22 23:21:42.250 INFO UpstairsConnection { upstairs_id: 81fb9c00-5d21-4891-b458-1cfd5eccef89, session_id: 09d4dd63-aea1-4a34-834b-c41ae295a586, gen: 1 } is now active (read-write)
27268 Sep 22 23:21:42.251 INFO UpstairsConnection { upstairs_id: 81fb9c00-5d21-4891-b458-1cfd5eccef89, session_id: 09d4dd63-aea1-4a34-834b-c41ae295a586, gen: 1 } is now active (read-write)
272692023-09-22T23:21:42.251ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:49499 has UUID 101c5e08-1317-4b60-a031-62766e2cdb1f
272702023-09-22T23:21:42.251ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 101c5e08-1317-4b60-a031-62766e2cdb1f, encrypted: true, database_read_version: 1, database_write_version: 1 }
272712023-09-22T23:21:42.251ZINFOcrucible-pantry (datafile): 81fb9c00-5d21-4891-b458-1cfd5eccef89 WaitActive WaitActive WaitActive
272722023-09-22T23:21:42.251ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:56926 has UUID 44286653-4082-4ed7-a812-3555da359c78
272732023-09-22T23:21:42.251ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 44286653-4082-4ed7-a812-3555da359c78, encrypted: true, database_read_version: 1, database_write_version: 1 }
272742023-09-22T23:21:42.251ZINFOcrucible-pantry (datafile): 81fb9c00-5d21-4891-b458-1cfd5eccef89 WaitActive WaitActive WaitActive
272752023-09-22T23:21:42.251ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:46734 has UUID d4d95f2d-aa01-4319-8829-47ddcf12c9ca
272762023-09-22T23:21:42.251ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: d4d95f2d-aa01-4319-8829-47ddcf12c9ca, encrypted: true, database_read_version: 1, database_write_version: 1 }
27277 Sep 22 23:21:42.251 INFO Current flush_numbers [0..12]: [0, 0]
272782023-09-22T23:21:42.251ZINFOcrucible-pantry (datafile): 81fb9c00-5d21-4891-b458-1cfd5eccef89 WaitActive WaitActive WaitActive
27279 Sep 22 23:21:42.252 INFO Downstairs has completed Negotiation, task: proc
27280 Sep 22 23:21:42.252 DEBG [1] Read AckReady 1002, : downstairs
27281 Sep 22 23:21:42.252 INFO Current flush_numbers [0..12]: [0, 0]
27282 Sep 22 23:21:42.252 INFO Downstairs has completed Negotiation, task: proc
27283 Sep 22 23:21:42.252 INFO Current flush_numbers [0..12]: [0, 0]
27284 Sep 22 23:21:42.252 INFO Downstairs has completed Negotiation, task: proc
272852023-09-22T23:21:42.252ZINFOcrucible-pantry (datafile): [0] 81fb9c00-5d21-4891-b458-1cfd5eccef89 (09d4dd63-aea1-4a34-834b-c41ae295a586) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
272862023-09-22T23:21:42.252ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
272872023-09-22T23:21:42.253ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
272882023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
272892023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [1] 81fb9c00-5d21-4891-b458-1cfd5eccef89 (09d4dd63-aea1-4a34-834b-c41ae295a586) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
272902023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
272912023-09-22T23:21:42.253ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
272922023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
272932023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [2] 81fb9c00-5d21-4891-b458-1cfd5eccef89 (09d4dd63-aea1-4a34-834b-c41ae295a586) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
272942023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
27295 The guest has finished waiting for activation
272962023-09-22T23:21:42.253ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
272972023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
272982023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:49499 task reports connection:true
272992023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): 81fb9c00-5d21-4891-b458-1cfd5eccef89 WaitQuorum WaitQuorum WaitQuorum
273002023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [0]R flush_numbers: [0, 0]
273012023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [0]R generation: [0, 0]
273022023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [0]R dirty: [false, false]
273032023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [1]R flush_numbers: [0, 0]
273042023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [1]R generation: [0, 0]
273052023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [1]R dirty: [false, false]
273062023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [2]R flush_numbers: [0, 0]
273072023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [2]R generation: [0, 0]
273082023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): [2]R dirty: [false, false]
273092023-09-22T23:21:42.253ZINFOcrucible-pantry (datafile): Max found gen is 1
273102023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
273112023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): Next flush: 1
273122023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): All extents match
273132023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): No downstairs repair required
273142023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): No initial repair work was required
273152023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
273162023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): 81fb9c00-5d21-4891-b458-1cfd5eccef89 is now active with session: 09d4dd63-aea1-4a34-834b-c41ae295a586
273172023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): 81fb9c00-5d21-4891-b458-1cfd5eccef89 Set Active after no repair
273182023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
273192023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): Set check for repair
273202023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:56926 task reports connection:true
273212023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): 81fb9c00-5d21-4891-b458-1cfd5eccef89 Active Active Active
273222023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): Set check for repair
273232023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:46734 task reports connection:true
273242023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): 81fb9c00-5d21-4891-b458-1cfd5eccef89 Active Active Active
273252023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): Set check for repair
273262023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): [0] received reconcile message
273272023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
273282023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
273292023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): [1] received reconcile message
273302023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
273312023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
273322023-09-22T23:21:42.254ZINFOcrucible-pantry (datafile): [2] received reconcile message
273332023-09-22T23:21:42.255ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
273342023-09-22T23:21:42.255ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
273352023-09-22T23:21:42.255ZINFOcrucible-pantry (datafile): volume 52ed3973-f354-40ba-a6f9-b5522a98667e activated ok
27336 Sep 22 23:21:42.255 DEBG [2] Read already AckReady 1002, : downstairs
273372023-09-22T23:21:42.255ZINFOcrucible-pantry (datafile): volume 52ed3973-f354-40ba-a6f9-b5522a98667e constructed and inserted ok
27338 Sep 22 23:21:42.255 DEBG up_ds_listen was notified
273392023-09-22T23:21:42.255ZINFOcrucible-pantry (dropshot): request completed latency_us = 6173 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:39338 req_id = c4437a54-300e-4d5d-bfb5-4143cabd857d response_code = 200 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e
27340 Sep 22 23:21:42.255 DEBG up_ds_listen process 1002
27341 Sep 22 23:21:42.255 DEBG [A] ack job 1002:3, : downstairs
27342 Sep 22 23:21:42.255 DEBG up_ds_listen checked 1 jobs, back to waiting
273432023-09-22T23:21:42.257ZINFOcrucible-pantry (dropshot): request completed latency_us = 228 local_addr = 127.0.0.1:63226 method = GET remote_addr = 127.0.0.1:49934 req_id = a4534583-1590-40b0-bb5c-c799be7ac989 response_code = 200 uri = /crucible/pantry/0/job/c5c977cd-0026-4860-a0d3-20d2ec1cd1ae/is_finished
273442023-09-22T23:21:42.258ZINFOcrucible-pantry (dropshot): request completed latency_us = 232 local_addr = 127.0.0.1:63226 method = GET remote_addr = 127.0.0.1:49934 req_id = 978d0e87-f6b1-4e30-a34e-e62b9a78943b response_code = 200 uri = /crucible/pantry/0/job/c5c977cd-0026-4860-a0d3-20d2ec1cd1ae/ok
273452023-09-22T23:21:42.259ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:59371 remote_addr = 127.0.0.1:32779
273462023-09-22T23:21:42.259ZINFOcrucible-pantry (datafile): detach removing entry for volume fc92b52b-99b5-4d79-ab62-675ee18fb434
273472023-09-22T23:21:42.259ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:59371 remote_addr = 127.0.0.1:49612
273482023-09-22T23:21:42.259ZINFOcrucible-pantry (datafile): detaching volume fc92b52b-99b5-4d79-ab62-675ee18fb434
273492023-09-22T23:21:42.259ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:59371 remote_addr = 127.0.0.1:58198
273502023-09-22T23:21:42.259ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:59371 remote_addr = 127.0.0.1:62417
273512023-09-22T23:21:42.259ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:59371 remote_addr = 127.0.0.1:49581
273522023-09-22T23:21:42.259ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:59371 remote_addr = 127.0.0.1:46603
273532023-09-22T23:21:42.259ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:59371 remote_addr = 127.0.0.1:47870
273542023-09-22T23:21:42.259ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:59371 remote_addr = 127.0.0.1:33608
273552023-09-22T23:21:42.259ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:59371 remote_addr = 127.0.0.1:53624
27356 test test::test_volume_replace_vcr ... ok
27357 Sep 22 23:21:42.260 INFO current number of open files limit 65536 is already the maximum
27358 Sep 22 23:21:42.260 INFO Created new region file "/tmp/downstairs-suheTfUx/region.json"
27359 Sep 22 23:21:42.261 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
27360 Sep 22 23:21:42.261 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
273612023-09-22T23:21:42.261ZINFOcrucible-pantry (dropshot): request completed latency_us = 2421 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:39338 req_id = fe332a10-b8c8-4539-a056-259538a35a19 response_code = 204 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_write
27362 Sep 22 23:21:42.261 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
273632023-09-22T23:21:42.261ZINFOcrucible-pantry (datafile): Request to deactivate this guest
273642023-09-22T23:21:42.261ZINFOcrucible-pantry (datafile): 94d7a72a-673e-42de-882f-9dbeb845d1f4 set deactivating.
273652023-09-22T23:21:42.262ZINFOcrucible-pantry (dropshot): request completed latency_us = 3147 local_addr = 127.0.0.1:63226 method = DELETE remote_addr = 127.0.0.1:49934 req_id = 3ae54a56-4edd-421c-bfd4-826ce7acddfa response_code = 204 uri = /crucible/pantry/0/volume/fc92b52b-99b5-4d79-ab62-675ee18fb434
27366 Sep 22 23:21:42.264 INFO current number of open files limit 65536 is already the maximum
27367 Sep 22 23:21:42.264 INFO Opened existing region file "/tmp/downstairs-suheTfUx/region.json"
27368 Sep 22 23:21:42.264 INFO Database read version 1
27369 Sep 22 23:21:42.264 INFO Database write version 1
27370 Sep 22 23:21:42.264 DEBG Write :1000 deps:[] res:true
27371 Sep 22 23:21:42.265 DEBG Write :1000 deps:[] res:true
27372 Sep 22 23:21:42.265 INFO UUID: 80aebe51-0bf8-4d2d-b94d-436bb289b850
27373 Sep 22 23:21:42.265 INFO Blocks per extent:5 Total Extents: 2
27374 Sep 22 23:21:42.265 INFO Crucible Version: Crucible Version: 0.0.1
27375 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
27376 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
27377 rustc: 1.70.0 stable x86_64-unknown-illumos
27378 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
27379 Sep 22 23:21:42.265 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27380 Sep 22 23:21:42.265 INFO Using address: 127.0.0.1:34016, task: main
27381 Sep 22 23:21:42.265 DEBG Write :1000 deps:[] res:true
27382 Sep 22 23:21:42.265 INFO Repair listens on 127.0.0.1:0, task: repair
27383 Sep 22 23:21:42.265 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:49270, task: repair
27384 Sep 22 23:21:42.265 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:49270, task: repair
27385 Sep 22 23:21:42.265 INFO listening, local_addr: 127.0.0.1:49270, task: repair
27386 Sep 22 23:21:42.266 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:49270, task: repair
27387 Sep 22 23:21:42.266 INFO Using repair address: 127.0.0.1:49270, task: main
27388 Sep 22 23:21:42.266 INFO No SSL acceptor configured, task: main
27389 Sep 22 23:21:42.266 INFO current number of open files limit 65536 is already the maximum
27390 Sep 22 23:21:42.266 INFO Created new region file "/tmp/downstairs-VlIPI8qJ/region.json"
27391 test test::test_pantry_validate ... ok
27392 Sep 22 23:21:42.268 INFO current number of open files limit 65536 is already the maximum
27393 Sep 22 23:21:42.269 INFO Opened existing region file "/tmp/downstairs-VlIPI8qJ/region.json"
27394 Sep 22 23:21:42.269 INFO Database read version 1
27395 Sep 22 23:21:42.269 INFO Database write version 1
27396 Sep 22 23:21:42.269 INFO UUID: 9b908e92-ae00-4395-8352-4c1a031a61cb
27397 Sep 22 23:21:42.269 INFO Blocks per extent:5 Total Extents: 2
27398 Sep 22 23:21:42.269 INFO Crucible Version: Crucible Version: 0.0.1
27399 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
27400 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
27401 rustc: 1.70.0 stable x86_64-unknown-illumos
27402 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
27403 Sep 22 23:21:42.269 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27404 Sep 22 23:21:42.269 INFO Using address: 127.0.0.1:62390, task: main
27405 Sep 22 23:21:42.269 INFO Repair listens on 127.0.0.1:0, task: repair
27406 Sep 22 23:21:42.270 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:44742, task: repair
27407 Sep 22 23:21:42.270 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:44742, task: repair
27408 Sep 22 23:21:42.270 INFO listening, local_addr: 127.0.0.1:44742, task: repair
27409 Sep 22 23:21:42.270 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:44742, task: repair
27410 Sep 22 23:21:42.270 INFO Using repair address: 127.0.0.1:44742, task: main
27411 Sep 22 23:21:42.270 INFO No SSL acceptor configured, task: main
27412 Sep 22 23:21:42.270 INFO current number of open files limit 65536 is already the maximum
27413 Sep 22 23:21:42.270 INFO Created new region file "/tmp/downstairs-lyKODt8n/region.json"
27414 Sep 22 23:21:42.272 INFO current number of open files limit 65536 is already the maximum
27415 Sep 22 23:21:42.272 INFO Opened existing region file "/tmp/downstairs-lyKODt8n/region.json"
27416 Sep 22 23:21:42.272 INFO Database read version 1
27417 Sep 22 23:21:42.272 INFO Database write version 1
27418 Sep 22 23:21:42.273 INFO UUID: 4e224a63-3b6f-46b4-89c9-d64f655331fa
27419 Sep 22 23:21:42.273 INFO Blocks per extent:5 Total Extents: 2
27420 Sep 22 23:21:42.273 INFO Crucible Version: Crucible Version: 0.0.1
27421 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
27422 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
27423 rustc: 1.70.0 stable x86_64-unknown-illumos
27424 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
27425 Sep 22 23:21:42.273 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27426 Sep 22 23:21:42.273 INFO Using address: 127.0.0.1:47605, task: main
274272023-09-22T23:21:42.273ZINFOcrucible-pantry (dropshot): request completed latency_us = 12421 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:32779 req_id = d0c86c51-59b9-4d14-9f60-3e59258f67dc response_code = 204 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_write
27428 Sep 22 23:21:42.273 INFO Repair listens on 127.0.0.1:0, task: repair
274292023-09-22T23:21:42.273ZINFOcrucible-pantry (dropshot): request completed latency_us = 12382 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:49612 req_id = b5d82e11-bf71-4992-8b64-4b8e85ca0847 response_code = 204 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_write
27430 Sep 22 23:21:42.273 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:55828, task: repair
274312023-09-22T23:21:42.273ZINFOcrucible-pantry (dropshot): request completed latency_us = 12288 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:58198 req_id = 801c2da9-a72b-4238-b2b0-07d5797deb5b response_code = 204 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_write
27432 Sep 22 23:21:42.273 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:55828, task: repair
27433 Sep 22 23:21:42.273 INFO listening, local_addr: 127.0.0.1:55828, task: repair
274342023-09-22T23:21:42.273ZINFOcrucible-pantry (dropshot): request completed latency_us = 12186 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:62417 req_id = a45c801c-f4ad-4d7c-ab36-5c872260b59d response_code = 204 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_write
274352023-09-22T23:21:42.274ZINFOcrucible-pantry (dropshot): request completed latency_us = 12099 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:49581 req_id = 6a75311c-647e-406e-a918-46574a94f203 response_code = 204 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_write
27436 Sep 22 23:21:42.274 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:55828, task: repair
27437 Sep 22 23:21:42.274 INFO Using repair address: 127.0.0.1:55828, task: main
27438 Sep 22 23:21:42.274 INFO No SSL acceptor configured, task: main
274392023-09-22T23:21:42.274ZINFOcrucible-pantry (dropshot): request completed latency_us = 11973 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:46603 req_id = e22158a5-641d-4d28-9873-80d814883240 response_code = 204 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_write
274402023-09-22T23:21:42.274ZINFOcrucible-pantry (dropshot): request completed latency_us = 11875 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:47870 req_id = ca64d123-48cd-4ca1-b6e9-3330fc7ca647 response_code = 204 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_write
274412023-09-22T23:21:42.274ZINFOcrucible-pantry (dropshot): request completed latency_us = 11772 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:33608 req_id = 1e1051d7-e07d-4538-a5ca-60bee59382bb response_code = 204 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_write
27442 Sep 22 23:21:42.274 INFO Upstairs starts
274432023-09-22T23:21:42.274ZINFOcrucible-pantry (dropshot): request completed latency_us = 11713 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:53624 req_id = aa30de07-5767-4abd-9af3-aa6b3ec98516 response_code = 204 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_write
27444 Sep 22 23:21:42.274 INFO Crucible Version: BuildInfo {
27445 version: "0.0.1",
27446 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
27447 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
27448 git_branch: "main",
27449 rustc_semver: "1.70.0",
27450 rustc_channel: "stable",
27451 rustc_host_triple: "x86_64-unknown-illumos",
27452 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
27453 cargo_triple: "x86_64-unknown-illumos",
27454 debug: true,
27455 opt_level: 0,
27456 }
27457 Sep 22 23:21:42.274 INFO Upstairs <-> Downstairs Message Version: 4
27458 Sep 22 23:21:42.274 INFO Crucible stats registered with UUID: 799d475b-e46c-45c2-863b-4988622fc243
27459 Sep 22 23:21:42.274 INFO Crucible 799d475b-e46c-45c2-863b-4988622fc243 has session id: 59749590-6fe4-4251-b14d-99efeb11148c
27460 Sep 22 23:21:42.274 INFO listening on 127.0.0.1:0, task: main
27461 Sep 22 23:21:42.274 INFO listening on 127.0.0.1:0, task: main
27462 Sep 22 23:21:42.274 INFO listening on 127.0.0.1:0, task: main
27463 Sep 22 23:21:42.274 INFO [0] connecting to 127.0.0.1:34016, looper: 0
27464 Sep 22 23:21:42.274 DEBG Write :1001 deps:[] res:true
27465 Sep 22 23:21:42.274 INFO [1] connecting to 127.0.0.1:62390, looper: 1
27466 Sep 22 23:21:42.274 DEBG Write :1002 deps:[] res:true
27467 Sep 22 23:21:42.274 INFO [2] connecting to 127.0.0.1:47605, looper: 2
27468 Sep 22 23:21:42.275 INFO up_listen starts, task: up_listen
27469 Sep 22 23:21:42.275 INFO Wait for all three downstairs to come online
27470 Sep 22 23:21:42.275 INFO Flush timeout: 0.5
27471 Sep 22 23:21:42.275 INFO [0] 799d475b-e46c-45c2-863b-4988622fc243 looper connected, looper: 0
27472 Sep 22 23:21:42.275 DEBG Write :1003 deps:[] res:true
27473 Sep 22 23:21:42.275 INFO [0] Proc runs for 127.0.0.1:34016 in state New
27474 Sep 22 23:21:42.275 INFO [1] 799d475b-e46c-45c2-863b-4988622fc243 looper connected, looper: 1
27475 Sep 22 23:21:42.275 INFO [1] Proc runs for 127.0.0.1:62390 in state New
27476 Sep 22 23:21:42.275 INFO accepted connection from 127.0.0.1:36492, task: main
27477 Sep 22 23:21:42.275 DEBG Write :1004 deps:[] res:true
27478 Sep 22 23:21:42.275 INFO accepted connection from 127.0.0.1:38651, task: main
27479 Sep 22 23:21:42.275 INFO [2] 799d475b-e46c-45c2-863b-4988622fc243 looper connected, looper: 2
27480 Sep 22 23:21:42.275 INFO [2] Proc runs for 127.0.0.1:47605 in state New
27481 Sep 22 23:21:42.275 INFO accepted connection from 127.0.0.1:53191, task: main
27482 Sep 22 23:21:42.275 DEBG Write :1005 deps:[] res:true
27483 Sep 22 23:21:42.275 INFO Connection request from 799d475b-e46c-45c2-863b-4988622fc243 with version 4, task: proc
27484 Sep 22 23:21:42.275 INFO upstairs UpstairsConnection { upstairs_id: 799d475b-e46c-45c2-863b-4988622fc243, session_id: 6cb054b3-bde9-4bc7-a1bb-ff874c7b4437, gen: 1 } connected, version 4, task: proc
27485 Sep 22 23:21:42.275 INFO Connection request from 799d475b-e46c-45c2-863b-4988622fc243 with version 4, task: proc
27486 Sep 22 23:21:42.275 INFO upstairs UpstairsConnection { upstairs_id: 799d475b-e46c-45c2-863b-4988622fc243, session_id: 6cb054b3-bde9-4bc7-a1bb-ff874c7b4437, gen: 1 } connected, version 4, task: proc
27487 Sep 22 23:21:42.275 DEBG Write :1006 deps:[] res:true
27488 Sep 22 23:21:42.276 INFO Connection request from 799d475b-e46c-45c2-863b-4988622fc243 with version 4, task: proc
27489 Sep 22 23:21:42.276 INFO upstairs UpstairsConnection { upstairs_id: 799d475b-e46c-45c2-863b-4988622fc243, session_id: 6cb054b3-bde9-4bc7-a1bb-ff874c7b4437, gen: 1 } connected, version 4, task: proc
27490 Sep 22 23:21:42.276 DEBG Write :1007 deps:[] res:true
27491 Sep 22 23:21:42.276 INFO [0] 799d475b-e46c-45c2-863b-4988622fc243 (6cb054b3-bde9-4bc7-a1bb-ff874c7b4437) New New New ds_transition to WaitActive
27492 Sep 22 23:21:42.276 INFO [0] Transition from New to WaitActive
27493 Sep 22 23:21:42.276 INFO [1] 799d475b-e46c-45c2-863b-4988622fc243 (6cb054b3-bde9-4bc7-a1bb-ff874c7b4437) WaitActive New New ds_transition to WaitActive
27494 Sep 22 23:21:42.276 INFO [1] Transition from New to WaitActive
27495 Sep 22 23:21:42.276 DEBG Write :1008 deps:[] res:true
27496 Sep 22 23:21:42.276 INFO [2] 799d475b-e46c-45c2-863b-4988622fc243 (6cb054b3-bde9-4bc7-a1bb-ff874c7b4437) WaitActive WaitActive New ds_transition to WaitActive
27497 Sep 22 23:21:42.276 INFO [2] Transition from New to WaitActive
27498 The guest has requested activation
27499 Sep 22 23:21:42.276 INFO 799d475b-e46c-45c2-863b-4988622fc243 active request set
27500 Sep 22 23:21:42.276 DEBG Write :1009 deps:[] res:true
27501 Sep 22 23:21:42.276 INFO [0] received activate with gen 1
27502 Sep 22 23:21:42.276 INFO [0] client got ds_active_rx, promote! session 6cb054b3-bde9-4bc7-a1bb-ff874c7b4437
27503 Sep 22 23:21:42.276 INFO [1] received activate with gen 1
27504 Sep 22 23:21:42.276 INFO [1] client got ds_active_rx, promote! session 6cb054b3-bde9-4bc7-a1bb-ff874c7b4437
27505 Sep 22 23:21:42.276 INFO [2] received activate with gen 1
27506 Sep 22 23:21:42.276 INFO [2] client got ds_active_rx, promote! session 6cb054b3-bde9-4bc7-a1bb-ff874c7b4437
27507 Sep 22 23:21:42.276 INFO UpstairsConnection { upstairs_id: 799d475b-e46c-45c2-863b-4988622fc243, session_id: 6cb054b3-bde9-4bc7-a1bb-ff874c7b4437, gen: 1 } is now active (read-write)
27508 Sep 22 23:21:42.276 INFO UpstairsConnection { upstairs_id: 799d475b-e46c-45c2-863b-4988622fc243, session_id: 6cb054b3-bde9-4bc7-a1bb-ff874c7b4437, gen: 1 } is now active (read-write)
27509 Sep 22 23:21:42.277 INFO UpstairsConnection { upstairs_id: 799d475b-e46c-45c2-863b-4988622fc243, session_id: 6cb054b3-bde9-4bc7-a1bb-ff874c7b4437, gen: 1 } is now active (read-write)
27510 Sep 22 23:21:42.277 INFO [0] downstairs client at 127.0.0.1:34016 has UUID 80aebe51-0bf8-4d2d-b94d-436bb289b850
27511 Sep 22 23:21:42.277 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 80aebe51-0bf8-4d2d-b94d-436bb289b850, encrypted: true, database_read_version: 1, database_write_version: 1 }
27512 Sep 22 23:21:42.277 INFO 799d475b-e46c-45c2-863b-4988622fc243 WaitActive WaitActive WaitActive
27513 Sep 22 23:21:42.277 INFO [1] downstairs client at 127.0.0.1:62390 has UUID 9b908e92-ae00-4395-8352-4c1a031a61cb
27514 Sep 22 23:21:42.277 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 9b908e92-ae00-4395-8352-4c1a031a61cb, encrypted: true, database_read_version: 1, database_write_version: 1 }
27515 Sep 22 23:21:42.277 INFO 799d475b-e46c-45c2-863b-4988622fc243 WaitActive WaitActive WaitActive
27516 Sep 22 23:21:42.277 INFO [2] downstairs client at 127.0.0.1:47605 has UUID 4e224a63-3b6f-46b4-89c9-d64f655331fa
27517 Sep 22 23:21:42.277 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 4e224a63-3b6f-46b4-89c9-d64f655331fa, encrypted: true, database_read_version: 1, database_write_version: 1 }
27518 Sep 22 23:21:42.277 INFO 799d475b-e46c-45c2-863b-4988622fc243 WaitActive WaitActive WaitActive
27519 Sep 22 23:21:42.277 INFO Current flush_numbers [0..12]: [0, 0]
27520 Sep 22 23:21:42.277 INFO Downstairs has completed Negotiation, task: proc
27521 Sep 22 23:21:42.278 INFO Current flush_numbers [0..12]: [0, 0]
27522 Sep 22 23:21:42.278 INFO Downstairs has completed Negotiation, task: proc
27523 Sep 22 23:21:42.278 INFO Current flush_numbers [0..12]: [0, 0]
27524 Sep 22 23:21:42.278 INFO Downstairs has completed Negotiation, task: proc
27525 Sep 22 23:21:42.278 INFO [0] 799d475b-e46c-45c2-863b-4988622fc243 (6cb054b3-bde9-4bc7-a1bb-ff874c7b4437) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
27526 Sep 22 23:21:42.278 INFO [0] Transition from WaitActive to WaitQuorum
27527 Sep 22 23:21:42.278 WARN [0] new RM replaced this: None
27528 Sep 22 23:21:42.278 INFO [0] Starts reconcile loop
27529 Sep 22 23:21:42.278 INFO [1] 799d475b-e46c-45c2-863b-4988622fc243 (6cb054b3-bde9-4bc7-a1bb-ff874c7b4437) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
27530 Sep 22 23:21:42.278 INFO [1] Transition from WaitActive to WaitQuorum
27531 Sep 22 23:21:42.278 WARN [1] new RM replaced this: None
27532 Sep 22 23:21:42.278 INFO [1] Starts reconcile loop
27533 Sep 22 23:21:42.278 INFO [2] 799d475b-e46c-45c2-863b-4988622fc243 (6cb054b3-bde9-4bc7-a1bb-ff874c7b4437) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
27534 Sep 22 23:21:42.278 INFO [2] Transition from WaitActive to WaitQuorum
27535 Sep 22 23:21:42.278 WARN [2] new RM replaced this: None
27536 Sep 22 23:21:42.278 INFO [2] Starts reconcile loop
27537 Sep 22 23:21:42.278 INFO [0] 127.0.0.1:34016 task reports connection:true
27538 Sep 22 23:21:42.278 INFO 799d475b-e46c-45c2-863b-4988622fc243 WaitQuorum WaitQuorum WaitQuorum
27539 Sep 22 23:21:42.278 INFO [0]R flush_numbers: [0, 0]
27540 Sep 22 23:21:42.278 INFO [0]R generation: [0, 0]
27541 Sep 22 23:21:42.278 INFO [0]R dirty: [false, false]
27542 Sep 22 23:21:42.279 INFO [1]R flush_numbers: [0, 0]
27543 Sep 22 23:21:42.279 INFO [1]R generation: [0, 0]
27544 Sep 22 23:21:42.279 INFO [1]R dirty: [false, false]
27545 Sep 22 23:21:42.279 INFO [2]R flush_numbers: [0, 0]
27546 Sep 22 23:21:42.279 INFO [2]R generation: [0, 0]
27547 Sep 22 23:21:42.279 INFO [2]R dirty: [false, false]
27548 Sep 22 23:21:42.279 INFO Max found gen is 1
27549 Sep 22 23:21:42.279 INFO Generation requested: 1 >= found:1
27550 Sep 22 23:21:42.279 INFO Next flush: 1
27551 Sep 22 23:21:42.279 INFO All extents match
27552 Sep 22 23:21:42.279 INFO No downstairs repair required
27553 Sep 22 23:21:42.279 INFO No initial repair work was required
27554 Sep 22 23:21:42.279 INFO Set Downstairs and Upstairs active
27555 Sep 22 23:21:42.279 INFO 799d475b-e46c-45c2-863b-4988622fc243 is now active with session: 6cb054b3-bde9-4bc7-a1bb-ff874c7b4437
27556 Sep 22 23:21:42.279 INFO 799d475b-e46c-45c2-863b-4988622fc243 Set Active after no repair
27557 Sep 22 23:21:42.279 INFO Notify all downstairs, region set compare is done.
27558 Sep 22 23:21:42.279 INFO Set check for repair
27559 Sep 22 23:21:42.279 INFO [1] 127.0.0.1:62390 task reports connection:true
27560 Sep 22 23:21:42.279 INFO 799d475b-e46c-45c2-863b-4988622fc243 Active Active Active
27561 Sep 22 23:21:42.279 INFO Set check for repair
27562 Sep 22 23:21:42.279 INFO [2] 127.0.0.1:47605 task reports connection:true
27563 Sep 22 23:21:42.279 INFO 799d475b-e46c-45c2-863b-4988622fc243 Active Active Active
27564 Sep 22 23:21:42.279 INFO Set check for repair
27565 Sep 22 23:21:42.279 INFO [0] received reconcile message
27566 Sep 22 23:21:42.279 INFO [0] All repairs completed, exit
27567 Sep 22 23:21:42.279 INFO [0] Starts cmd_loop
27568 Sep 22 23:21:42.279 INFO [1] received reconcile message
27569 Sep 22 23:21:42.279 INFO [1] All repairs completed, exit
27570 Sep 22 23:21:42.279 INFO [1] Starts cmd_loop
27571 Sep 22 23:21:42.279 INFO [2] received reconcile message
27572 Sep 22 23:21:42.279 INFO [2] All repairs completed, exit
27573 Sep 22 23:21:42.279 INFO [2] Starts cmd_loop
27574 The guest has finished waiting for activation
27575 Sep 22 23:21:42.279 DEBG Write :1001 deps:[] res:true
27576 Sep 22 23:21:42.280 DEBG Write :1002 deps:[] res:true
27577 Sep 22 23:21:42.280 DEBG Write :1003 deps:[] res:true
27578 Sep 22 23:21:42.280 DEBG Write :1004 deps:[] res:true
27579 Sep 22 23:21:42.281 DEBG Write :1005 deps:[] res:true
27580 Sep 22 23:21:42.281 DEBG Write :1006 deps:[] res:true
27581 Sep 22 23:21:42.281 DEBG Write :1007 deps:[] res:true
27582 test test::volume_zero_length_io ... ok
27583 Sep 22 23:21:42.282 DEBG Write :1008 deps:[] res:true
27584 Sep 22 23:21:42.282 DEBG Write :1009 deps:[] res:true
27585 Sep 22 23:21:42.282 DEBG Write :1001 deps:[] res:true
27586 Sep 22 23:21:42.282 DEBG Write :1002 deps:[] res:true
27587 Sep 22 23:21:42.282 DEBG Write :1003 deps:[] res:true
27588 Sep 22 23:21:42.283 DEBG Write :1004 deps:[] res:true
27589 Sep 22 23:21:42.283 DEBG Write :1005 deps:[] res:true
27590 Sep 22 23:21:42.283 DEBG Write :1006 deps:[] res:true
27591 Sep 22 23:21:42.283 DEBG Write :1007 deps:[] res:true
27592 Sep 22 23:21:42.284 DEBG Write :1008 deps:[] res:true
27593 Sep 22 23:21:42.284 DEBG Write :1009 deps:[] res:true
27594 Sep 22 23:21:42.285 DEBG Read :1010 deps:[JobId(1000)] res:true
27595 Sep 22 23:21:42.286 DEBG Read :1010 deps:[JobId(1000)] res:true
27596 Sep 22 23:21:42.286 DEBG Read :1010 deps:[JobId(1000)] res:true
275972023-09-22T23:21:42.287ZINFOcrucible-pantry (dropshot): request completed latency_us = 2597 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:62417 req_id = 53c06d21-f6d1-4def-a90f-5a404512ac37 response_code = 200 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_read
27598 Sep 22 23:21:42.289 DEBG Read :1011 deps:[JobId(1001)] res:true
27599 Sep 22 23:21:42.289 DEBG Read :1011 deps:[JobId(1001)] res:true
27600 Sep 22 23:21:42.289 DEBG Read :1011 deps:[JobId(1001)] res:true
276012023-09-22T23:21:42.290ZINFOcrucible-pantry (dropshot): request completed latency_us = 2421 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:62417 req_id = 10700452-e31f-4920-960b-a702c1710188 response_code = 200 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_read
27602 Sep 22 23:21:42.292 DEBG Read :1012 deps:[JobId(1002)] res:true
27603 Sep 22 23:21:42.292 DEBG Read :1012 deps:[JobId(1002)] res:true
27604 Sep 22 23:21:42.292 DEBG Read :1012 deps:[JobId(1002)] res:true
276052023-09-22T23:21:42.293ZINFOcrucible-pantry (dropshot): request completed latency_us = 2413 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:62417 req_id = 741197ec-27d2-4fa8-9e82-4b6e49eb346a response_code = 200 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_read
27606 Sep 22 23:21:42.294 DEBG Read :1013 deps:[JobId(1003)] res:true
27607 Sep 22 23:21:42.295 DEBG Read :1013 deps:[JobId(1003)] res:true
27608 Sep 22 23:21:42.295 DEBG Read :1013 deps:[JobId(1003)] res:true
276092023-09-22T23:21:42.296ZINFOcrucible-pantry (dropshot): request completed latency_us = 2400 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:62417 req_id = 0e0b4902-696e-4fd5-a4de-3dc810389cb2 response_code = 200 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_read
27610 Sep 22 23:21:42.297 DEBG Read :1014 deps:[JobId(1004)] res:true
27611 Sep 22 23:21:42.298 DEBG Read :1014 deps:[JobId(1004)] res:true
27612 Sep 22 23:21:42.298 DEBG Read :1014 deps:[JobId(1004)] res:true
276132023-09-22T23:21:42.299ZINFOcrucible-pantry (dropshot): request completed latency_us = 2423 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:62417 req_id = e7690cac-8556-46a5-88df-bb2c2c4e6a7b response_code = 200 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_read
27614 Sep 22 23:21:42.300 DEBG Read :1015 deps:[JobId(1005)] res:true
27615 Sep 22 23:21:42.301 DEBG Read :1015 deps:[JobId(1005)] res:true
27616 Sep 22 23:21:42.301 DEBG Read :1015 deps:[JobId(1005)] res:true
276172023-09-22T23:21:42.302ZINFOcrucible-pantry (dropshot): request completed latency_us = 2501 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:62417 req_id = ec931f11-1ded-46d0-aac4-93c2e7238824 response_code = 200 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_read
27618 Sep 22 23:21:42.303 DEBG Read :1016 deps:[JobId(1006)] res:true
27619 Sep 22 23:21:42.304 DEBG Read :1016 deps:[JobId(1006)] res:true
27620 Sep 22 23:21:42.304 DEBG Read :1016 deps:[JobId(1006)] res:true
276212023-09-22T23:21:42.305ZINFOcrucible-pantry (dropshot): request completed latency_us = 2767 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:62417 req_id = 59404c39-c66b-4aca-ac2f-8cc024f06818 response_code = 200 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_read
27622 Sep 22 23:21:42.307 DEBG Read :1017 deps:[JobId(1007)] res:true
27623 Sep 22 23:21:42.307 DEBG Read :1017 deps:[JobId(1007)] res:true
27624 Sep 22 23:21:42.308 DEBG Read :1017 deps:[JobId(1007)] res:true
276252023-09-22T23:21:42.310ZINFOcrucible-pantry (dropshot): request completed latency_us = 3491 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:62417 req_id = 235a2621-7dfa-45e0-90d0-784023f1bc3d response_code = 200 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_read
27626 Sep 22 23:21:42.311 DEBG Read :1018 deps:[JobId(1008)] res:true
27627 Sep 22 23:21:42.312 DEBG Read :1018 deps:[JobId(1008)] res:true
27628 Sep 22 23:21:42.312 DEBG Read :1018 deps:[JobId(1008)] res:true
276292023-09-22T23:21:42.314ZINFOcrucible-pantry (dropshot): request completed latency_us = 3402 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:62417 req_id = ea1f7246-1193-47ff-a583-f7c243e0d77c response_code = 200 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_read
27630 Sep 22 23:21:42.315 DEBG Read :1019 deps:[JobId(1009)] res:true
27631 Sep 22 23:21:42.316 DEBG Read :1019 deps:[JobId(1009)] res:true
27632 Sep 22 23:21:42.316 DEBG Read :1019 deps:[JobId(1009)] res:true
276332023-09-22T23:21:42.318ZINFOcrucible-pantry (dropshot): request completed latency_us = 3393 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:62417 req_id = ebf2169c-368b-4f26-8229-7f436cd283d7 response_code = 200 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/bulk_read
276342023-09-22T23:21:42.319ZINFOcrucible-pantry (dropshot): request completed latency_us = 303 local_addr = 127.0.0.1:59371 method = POST remote_addr = 127.0.0.1:62417 req_id = b0574342-119a-4c07-b4de-7f6e5e133ac4 response_code = 200 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e/validate
27635 Sep 22 23:21:42.320 DEBG Read :1020 deps:[JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
27636 Sep 22 23:21:42.321 DEBG Read :1020 deps:[JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
27637 Sep 22 23:21:42.321 DEBG Read :1020 deps:[JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
276382023-09-22T23:21:42.328ZINFOcrucible-pantry (dropshot): request completed latency_us = 205 local_addr = 127.0.0.1:59371 method = GET remote_addr = 127.0.0.1:62417 req_id = c0737c4e-3149-40c4-9c24-cbb8ecc6a7ca response_code = 200 uri = /crucible/pantry/0/job/f80a3498-aae7-4511-9946-9eb82ea06085/is_finished
276392023-09-22T23:21:42.329ZINFOcrucible-pantry (dropshot): request completed latency_us = 216 local_addr = 127.0.0.1:59371 method = GET remote_addr = 127.0.0.1:62417 req_id = a673f4df-195f-4309-b0f7-b9a088ff721a response_code = 200 uri = /crucible/pantry/0/job/f80a3498-aae7-4511-9946-9eb82ea06085/ok
276402023-09-22T23:21:42.330ZINFOcrucible-pantry (datafile): detach removing entry for volume 52ed3973-f354-40ba-a6f9-b5522a98667e
276412023-09-22T23:21:42.330ZINFOcrucible-pantry (datafile): detaching volume 52ed3973-f354-40ba-a6f9-b5522a98667e
27642 Sep 22 23:21:42.332 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
27643 Sep 22 23:21:42.332 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
27644 Sep 22 23:21:42.332 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
276452023-09-22T23:21:42.333ZINFOcrucible-pantry (datafile): Request to deactivate this guest
276462023-09-22T23:21:42.333ZINFOcrucible-pantry (datafile): 81fb9c00-5d21-4891-b458-1cfd5eccef89 set deactivating.
276472023-09-22T23:21:42.333ZINFOcrucible-pantry (dropshot): request completed latency_us = 3626 local_addr = 127.0.0.1:59371 method = DELETE remote_addr = 127.0.0.1:62417 req_id = 755994e8-542f-40d5-871c-59cac93f93a8 response_code = 204 uri = /crucible/pantry/0/volume/52ed3973-f354-40ba-a6f9-b5522a98667e
27648 test test::test_pantry_validate_subset ... ok
27649 Sep 22 23:21:42.509 DEBG Write :1002 deps:[] res:true
27650 Sep 22 23:21:42.524 DEBG Write :1002 deps:[] res:true
27651 Sep 22 23:21:42.539 DEBG Write :1002 deps:[] res:true
27652 Sep 22 23:21:42.545 DEBG Flush :1003 extent_limit None deps:[JobId(1002)] res:true f:2 g:1
27653 Sep 22 23:21:42.545 DEBG Flush :1003 extent_limit None deps:[JobId(1002)] res:true f:2 g:1
27654 Sep 22 23:21:42.545 DEBG Flush :1003 extent_limit None deps:[JobId(1002)] res:true f:2 g:1
27655 Sep 22 23:21:42.635 DEBG IO Write 1002 has deps [JobId(1001)]
27656 Sep 22 23:21:42.635 DEBG up_ds_listen was notified
27657 Sep 22 23:21:42.635 DEBG up_ds_listen process 1002
27658 Sep 22 23:21:42.635 DEBG [A] ack job 1002:3, : downstairs
27659 Sep 22 23:21:42.635 DEBG up_ds_listen checked 1 jobs, back to waiting
276602023-09-22T23:21:42.705ZINFOcrucible-pantry (dropshot): request completed latency_us = 258 local_addr = 127.0.0.1:47396 method = GET remote_addr = 127.0.0.1:39199 req_id = 0644f126-3797-4ef4-a330-ff93a2831140 response_code = 200 uri = /crucible/pantry/0/job/e14704dd-f376-471e-a250-3ebae85bb495/is_finished
276612023-09-22T23:21:42.786ZINFOcrucible-pantry (dropshot): request completed latency_us = 295 local_addr = 127.0.0.1:52393 method = GET remote_addr = 127.0.0.1:57584 req_id = 0091e2b8-1f8c-4b7f-8c2d-78be4d76d22b response_code = 200 uri = /crucible/pantry/0/job/7165c87e-0953-466c-822f-e26bef4d9d7d/is_finished
27662 Sep 22 23:21:42.882 DEBG Write :1000 deps:[] res:true
27663 Sep 22 23:21:42.898 DEBG Write :1000 deps:[] res:true
27664 Sep 22 23:21:42.914 DEBG Write :1000 deps:[] res:true
27665 Sep 22 23:21:42.920 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
27666 Sep 22 23:21:42.920 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
27667 Sep 22 23:21:42.920 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
27668 Sep 22 23:21:42.976 DEBG IO Write 1003 has deps [JobId(1001)]
27669 Sep 22 23:21:42.976 DEBG up_ds_listen was notified
27670 Sep 22 23:21:42.976 DEBG up_ds_listen process 1003
27671 Sep 22 23:21:42.976 DEBG [A] ack job 1003:4, : downstairs
27672 Sep 22 23:21:42.976 DEBG up_ds_listen checked 1 jobs, back to waiting
27673 Sep 22 23:21:42.977 DEBG IO Flush 1004 has deps [JobId(1003), JobId(1002), JobId(1001)]
27674 Sep 22 23:21:43.076 DEBG Write :1004 deps:[] res:true
27675 Sep 22 23:21:43.092 DEBG Write :1004 deps:[] res:true
27676 Sep 22 23:21:43.108 DEBG Write :1004 deps:[] res:true
27677 Sep 22 23:21:43.114 DEBG Flush :1005 extent_limit None deps:[JobId(1004)] res:true f:3 g:1
27678 Sep 22 23:21:43.114 DEBG Flush :1005 extent_limit None deps:[JobId(1004)] res:true f:3 g:1
27679 Sep 22 23:21:43.114 DEBG Flush :1005 extent_limit None deps:[JobId(1004)] res:true f:3 g:1
27680 Sep 22 23:21:43.309 DEBG IO Write 1005 has deps [JobId(1004), JobId(1001)]
27681 Sep 22 23:21:43.309 DEBG up_ds_listen was notified
27682 Sep 22 23:21:43.309 DEBG up_ds_listen process 1005
27683 Sep 22 23:21:43.309 DEBG [A] ack job 1005:6, : downstairs
27684 Sep 22 23:21:43.309 DEBG up_ds_listen checked 1 jobs, back to waiting
27685 Sep 22 23:21:43.503 DEBG Write :1006 deps:[] res:true
27686 Sep 22 23:21:43.515 DEBG Write :1006 deps:[] res:true
27687 Sep 22 23:21:43.527 DEBG Write :1006 deps:[] res:true
27688 Sep 22 23:21:43.531 DEBG Flush :1007 extent_limit None deps:[JobId(1006)] res:true f:4 g:1
27689 Sep 22 23:21:43.531 DEBG Flush :1007 extent_limit None deps:[JobId(1006)] res:true f:4 g:1
27690 Sep 22 23:21:43.531 DEBG Flush :1007 extent_limit None deps:[JobId(1006)] res:true f:4 g:1
27691 Sep 22 23:21:43.645 DEBG IO Write 1006 has deps [JobId(1004), JobId(1001)]
27692 Sep 22 23:21:43.645 DEBG up_ds_listen was notified
27693 Sep 22 23:21:43.645 DEBG up_ds_listen process 1006
27694 Sep 22 23:21:43.645 DEBG [A] ack job 1006:7, : downstairs
27695 Sep 22 23:21:43.645 DEBG up_ds_listen checked 1 jobs, back to waiting
27696 Sep 22 23:21:43.646 DEBG IO Flush 1007 has deps [JobId(1006), JobId(1005), JobId(1004)]
276972023-09-22T23:21:43.708ZINFOcrucible-pantry (dropshot): request completed latency_us = 320 local_addr = 127.0.0.1:47396 method = GET remote_addr = 127.0.0.1:39199 req_id = 8a20dd5c-415f-4a0e-b6b1-63bf6f59b6d0 response_code = 200 uri = /crucible/pantry/0/job/e14704dd-f376-471e-a250-3ebae85bb495/is_finished
276982023-09-22T23:21:43.709ZINFOcrucible-pantry (dropshot): request completed latency_us = 228 local_addr = 127.0.0.1:47396 method = GET remote_addr = 127.0.0.1:39199 req_id = 889ac11e-411e-4ef3-b3c7-da2104b0eea7 response_code = 200 uri = /crucible/pantry/0/job/e14704dd-f376-471e-a250-3ebae85bb495/ok
276992023-09-22T23:21:43.710ZINFOcrucible-pantry (datafile): detach removing entry for volume 1eb9cf2b-fac5-48fd-86a9-e21629057f3e
277002023-09-22T23:21:43.710ZINFOcrucible-pantry (datafile): detaching volume 1eb9cf2b-fac5-48fd-86a9-e21629057f3e
27701 Sep 22 23:21:43.710 DEBG Flush :1008 extent_limit None deps:[] res:true f:5 g:1
27702 Sep 22 23:21:43.710 DEBG Flush :1008 extent_limit None deps:[] res:true f:5 g:1
27703 Sep 22 23:21:43.711 DEBG Flush :1008 extent_limit None deps:[] res:true f:5 g:1
277042023-09-22T23:21:43.711ZINFOcrucible-pantry (datafile): Request to deactivate this guest
277052023-09-22T23:21:43.711ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 set deactivating.
277062023-09-22T23:21:43.711ZINFOcrucible-pantry (dropshot): request completed latency_us = 2072 local_addr = 127.0.0.1:47396 method = DELETE remote_addr = 127.0.0.1:39199 req_id = 507c63fd-3dfc-47e3-8e95-048158e5a7ac response_code = 204 uri = /crucible/pantry/0/volume/1eb9cf2b-fac5-48fd-86a9-e21629057f3e
27707 Sep 22 23:21:43.807 DEBG Write :1002 deps:[] res:true
27708 Sep 22 23:21:43.822 DEBG Write :1002 deps:[] res:true
27709 Sep 22 23:21:43.839 DEBG Write :1002 deps:[] res:true
277102023-09-22T23:21:43.841ZINFOcrucible-pantry (dropshot): request completed latency_us = 408 local_addr = 127.0.0.1:52393 method = GET remote_addr = 127.0.0.1:57584 req_id = 8b074832-3581-4beb-826d-716f6137fc76 response_code = 200 uri = /crucible/pantry/0/job/7165c87e-0953-466c-822f-e26bef4d9d7d/is_finished
27711 Sep 22 23:21:43.846 DEBG Flush :1003 extent_limit None deps:[JobId(1002)] res:true f:2 g:1
27712 Sep 22 23:21:43.846 DEBG Flush :1003 extent_limit None deps:[JobId(1002)] res:true f:2 g:1
27713 Sep 22 23:21:43.846 DEBG Flush :1003 extent_limit None deps:[JobId(1002)] res:true f:2 g:1
27714 Sep 22 23:21:43.986 DEBG IO Write 1008 has deps [JobId(1007), JobId(1004), JobId(1001)]
27715 Sep 22 23:21:43.986 DEBG up_ds_listen was notified
27716 Sep 22 23:21:43.986 DEBG up_ds_listen process 1008
27717 Sep 22 23:21:43.986 DEBG [A] ack job 1008:9, : downstairs
27718 Sep 22 23:21:43.986 DEBG up_ds_listen checked 1 jobs, back to waiting
27719 Sep 22 23:21:44.064 INFO current number of open files limit 65536 is already the maximum
27720 Sep 22 23:21:44.064 INFO Created new region file "/tmp/downstairs-RjobStuf/region.json"
27721 Sep 22 23:21:44.164 INFO current number of open files limit 65536 is already the maximum
27722 Sep 22 23:21:44.164 INFO Opened existing region file "/tmp/downstairs-RjobStuf/region.json"
27723 Sep 22 23:21:44.164 INFO Database read version 1
27724 Sep 22 23:21:44.164 INFO Database write version 1
27725 Sep 22 23:21:44.217 INFO UUID: 0c4b6300-aa56-4191-90c9-1d56da8be939
27726 Sep 22 23:21:44.217 INFO Blocks per extent:512 Total Extents: 188
27727 Sep 22 23:21:44.217 INFO Crucible Version: Crucible Version: 0.0.1
27728 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
27729 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
27730 rustc: 1.70.0 stable x86_64-unknown-illumos
27731 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
27732 Sep 22 23:21:44.217 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27733 Sep 22 23:21:44.217 INFO Using address: 127.0.0.1:63038, task: main
27734 Sep 22 23:21:44.218 INFO Repair listens on 127.0.0.1:0, task: repair
27735 Sep 22 23:21:44.218 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53331, task: repair
27736 Sep 22 23:21:44.218 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53331, task: repair
27737 Sep 22 23:21:44.219 INFO listening, local_addr: 127.0.0.1:53331, task: repair
27738 Sep 22 23:21:44.219 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53331, task: repair
27739 Sep 22 23:21:44.219 INFO Using repair address: 127.0.0.1:53331, task: main
27740 Sep 22 23:21:44.219 INFO No SSL acceptor configured, task: main
27741 Sep 22 23:21:44.227 INFO current number of open files limit 65536 is already the maximum
27742 Sep 22 23:21:44.227 INFO Created new region file "/tmp/downstairs-CwBIEkKq/region.json"
27743 Sep 22 23:21:44.322 DEBG IO Write 1009 has deps [JobId(1007), JobId(1004), JobId(1001)]
27744 Sep 22 23:21:44.322 DEBG up_ds_listen was notified
27745 Sep 22 23:21:44.322 DEBG up_ds_listen process 1009
27746 Sep 22 23:21:44.322 DEBG [A] ack job 1009:10, : downstairs
27747 Sep 22 23:21:44.322 DEBG up_ds_listen checked 1 jobs, back to waiting
27748 Sep 22 23:21:44.323 DEBG IO Flush 1010 has deps [JobId(1009), JobId(1008), JobId(1007)]
27749 Sep 22 23:21:44.330 INFO listening on 127.0.0.1:0, task: main
27750 Sep 22 23:21:44.331 INFO current number of open files limit 65536 is already the maximum
27751 Sep 22 23:21:44.331 INFO Opened existing region file "/tmp/downstairs-CwBIEkKq/region.json"
27752 Sep 22 23:21:44.331 INFO Database read version 1
27753 Sep 22 23:21:44.331 INFO Database write version 1
27754 Sep 22 23:21:44.381 INFO UUID: 77625cc5-cf57-4c89-b7d2-c29dca5216c1
27755 Sep 22 23:21:44.381 INFO Blocks per extent:512 Total Extents: 188
27756 Sep 22 23:21:44.381 INFO Crucible Version: Crucible Version: 0.0.1
27757 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
27758 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
27759 rustc: 1.70.0 stable x86_64-unknown-illumos
27760 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
27761 Sep 22 23:21:44.381 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27762 Sep 22 23:21:44.381 INFO Using address: 127.0.0.1:50898, task: main
27763 Sep 22 23:21:44.381 INFO Repair listens on 127.0.0.1:0, task: repair
27764 Sep 22 23:21:44.382 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:43544, task: repair
27765 Sep 22 23:21:44.382 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:43544, task: repair
27766 Sep 22 23:21:44.382 INFO listening, local_addr: 127.0.0.1:43544, task: repair
27767 Sep 22 23:21:44.382 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:43544, task: repair
27768 Sep 22 23:21:44.382 INFO Using repair address: 127.0.0.1:43544, task: main
27769 Sep 22 23:21:44.382 INFO No SSL acceptor configured, task: main
27770 Sep 22 23:21:44.390 INFO current number of open files limit 65536 is already the maximum
27771 Sep 22 23:21:44.390 INFO Created new region file "/tmp/downstairs-A2wB4MTZ/region.json"
27772 Sep 22 23:21:44.494 INFO listening on 127.0.0.1:0, task: main
27773 Sep 22 23:21:44.495 INFO current number of open files limit 65536 is already the maximum
27774 Sep 22 23:21:44.495 INFO Opened existing region file "/tmp/downstairs-A2wB4MTZ/region.json"
27775 Sep 22 23:21:44.495 INFO Database read version 1
27776 Sep 22 23:21:44.495 INFO Database write version 1
27777 Sep 22 23:21:44.499 DEBG Write :1004 deps:[] res:true
27778 Sep 22 23:21:44.514 DEBG Write :1004 deps:[] res:true
27779 Sep 22 23:21:44.530 DEBG Write :1004 deps:[] res:true
27780 Sep 22 23:21:44.536 DEBG Flush :1005 extent_limit None deps:[JobId(1004)] res:true f:3 g:1
27781 Sep 22 23:21:44.536 DEBG Flush :1005 extent_limit None deps:[JobId(1004)] res:true f:3 g:1
27782 Sep 22 23:21:44.536 DEBG Flush :1005 extent_limit None deps:[JobId(1004)] res:true f:3 g:1
27783 Sep 22 23:21:44.546 INFO UUID: d89769b3-2748-4a20-ad6b-6b02a9292676
27784 Sep 22 23:21:44.546 INFO Blocks per extent:512 Total Extents: 188
27785 Sep 22 23:21:44.546 INFO Crucible Version: Crucible Version: 0.0.1
27786 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
27787 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
27788 rustc: 1.70.0 stable x86_64-unknown-illumos
27789 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
27790 Sep 22 23:21:44.546 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27791 Sep 22 23:21:44.546 INFO Using address: 127.0.0.1:62519, task: main
27792 Sep 22 23:21:44.546 INFO Repair listens on 127.0.0.1:0, task: repair
27793 Sep 22 23:21:44.546 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:46997, task: repair
27794 Sep 22 23:21:44.546 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:46997, task: repair
27795 Sep 22 23:21:44.546 INFO listening, local_addr: 127.0.0.1:46997, task: repair
27796 Sep 22 23:21:44.546 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:46997, task: repair
27797 Sep 22 23:21:44.546 INFO Using repair address: 127.0.0.1:46997, task: main
27798 Sep 22 23:21:44.546 INFO No SSL acceptor configured, task: main
27799 Sep 22 23:21:44.555 INFO Upstairs starts
27800 Sep 22 23:21:44.555 INFO Crucible Version: BuildInfo {
27801 version: "0.0.1",
27802 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
27803 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
27804 git_branch: "main",
27805 rustc_semver: "1.70.0",
27806 rustc_channel: "stable",
27807 rustc_host_triple: "x86_64-unknown-illumos",
27808 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
27809 cargo_triple: "x86_64-unknown-illumos",
27810 debug: true,
27811 opt_level: 0,
27812 }
27813 Sep 22 23:21:44.555 INFO Upstairs <-> Downstairs Message Version: 4
27814 Sep 22 23:21:44.555 INFO Crucible stats registered with UUID: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b
27815 Sep 22 23:21:44.555 INFO Crucible 71f26a6c-b7c6-4f58-901b-ae4e36b1710b has session id: ca0bfa41-5904-420a-8eaa-b31e12bc4c5a
27816 Sep 22 23:21:44.555 INFO listening on 127.0.0.1:0, task: main
27817 Sep 22 23:21:44.555 INFO [0] connecting to 127.0.0.1:63038, looper: 0
27818 Sep 22 23:21:44.555 INFO [1] connecting to 127.0.0.1:50898, looper: 1
27819 Sep 22 23:21:44.555 INFO [2] connecting to 127.0.0.1:62519, looper: 2
27820 Sep 22 23:21:44.555 INFO up_listen starts, task: up_listen
27821 Sep 22 23:21:44.555 INFO Wait for all three downstairs to come online
27822 Sep 22 23:21:44.555 INFO Flush timeout: 0.5
27823 Sep 22 23:21:44.556 INFO [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b looper connected, looper: 0
27824 Sep 22 23:21:44.556 INFO [0] Proc runs for 127.0.0.1:63038 in state New
27825 Sep 22 23:21:44.556 INFO [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b looper connected, looper: 1
27826 Sep 22 23:21:44.556 INFO [1] Proc runs for 127.0.0.1:50898 in state New
27827 Sep 22 23:21:44.556 INFO [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b looper connected, looper: 2
27828 Sep 22 23:21:44.556 INFO [2] Proc runs for 127.0.0.1:62519 in state New
27829 Sep 22 23:21:44.556 INFO accepted connection from 127.0.0.1:40139, task: main
27830 Sep 22 23:21:44.556 INFO accepted connection from 127.0.0.1:42516, task: main
27831 Sep 22 23:21:44.556 INFO accepted connection from 127.0.0.1:55292, task: main
27832 Sep 22 23:21:44.619 INFO Connection request from 71f26a6c-b7c6-4f58-901b-ae4e36b1710b with version 4, task: proc
27833 Sep 22 23:21:44.619 INFO upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } connected, version 4, task: proc
27834 Sep 22 23:21:44.619 INFO Connection request from 71f26a6c-b7c6-4f58-901b-ae4e36b1710b with version 4, task: proc
27835 Sep 22 23:21:44.619 INFO upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } connected, version 4, task: proc
27836 Sep 22 23:21:44.620 INFO Connection request from 71f26a6c-b7c6-4f58-901b-ae4e36b1710b with version 4, task: proc
27837 Sep 22 23:21:44.620 INFO upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } connected, version 4, task: proc
27838 Sep 22 23:21:44.620 INFO [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) New New New ds_transition to WaitActive
27839 Sep 22 23:21:44.620 INFO [0] Transition from New to WaitActive
27840 Sep 22 23:21:44.620 INFO [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) WaitActive New New ds_transition to WaitActive
27841 Sep 22 23:21:44.620 INFO [1] Transition from New to WaitActive
27842 Sep 22 23:21:44.620 INFO [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) WaitActive WaitActive New ds_transition to WaitActive
27843 Sep 22 23:21:44.620 INFO [2] Transition from New to WaitActive
27844 Sep 22 23:21:44.663 DEBG IO Write 1011 has deps [JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
27845 Sep 22 23:21:44.664 DEBG up_ds_listen was notified
27846 Sep 22 23:21:44.664 DEBG up_ds_listen process 1011
27847 Sep 22 23:21:44.664 DEBG [A] ack job 1011:12, : downstairs
27848 Sep 22 23:21:44.664 DEBG up_ds_listen checked 1 jobs, back to waiting
27849 Sep 22 23:21:44.914 DEBG Write :1006 deps:[] res:true
27850 The guest has requested activation
27851 Sep 22 23:21:44.917 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b active request set
27852 Sep 22 23:21:44.917 INFO [0] received activate with gen 1
27853 Sep 22 23:21:44.917 INFO [0] client got ds_active_rx, promote! session 14046fa9-5850-443d-9708-a7d5d0130e56
27854 Sep 22 23:21:44.917 INFO [1] received activate with gen 1
27855 Sep 22 23:21:44.917 INFO [1] client got ds_active_rx, promote! session 14046fa9-5850-443d-9708-a7d5d0130e56
27856 Sep 22 23:21:44.918 INFO [2] received activate with gen 1
27857 Sep 22 23:21:44.918 INFO [2] client got ds_active_rx, promote! session 14046fa9-5850-443d-9708-a7d5d0130e56
27858 Sep 22 23:21:44.918 INFO UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } is now active (read-write)
27859 Sep 22 23:21:44.918 INFO UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } is now active (read-write)
27860 Sep 22 23:21:44.918 INFO UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } is now active (read-write)
27861 Sep 22 23:21:44.919 INFO [0] downstairs client at 127.0.0.1:63038 has UUID 0c4b6300-aa56-4191-90c9-1d56da8be939
27862 Sep 22 23:21:44.919 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 0c4b6300-aa56-4191-90c9-1d56da8be939, encrypted: true, database_read_version: 1, database_write_version: 1 }
27863 Sep 22 23:21:44.919 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b WaitActive WaitActive WaitActive
27864 Sep 22 23:21:44.919 INFO [1] downstairs client at 127.0.0.1:50898 has UUID 77625cc5-cf57-4c89-b7d2-c29dca5216c1
27865 Sep 22 23:21:44.919 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 77625cc5-cf57-4c89-b7d2-c29dca5216c1, encrypted: true, database_read_version: 1, database_write_version: 1 }
27866 Sep 22 23:21:44.919 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b WaitActive WaitActive WaitActive
27867 Sep 22 23:21:44.919 INFO [2] downstairs client at 127.0.0.1:62519 has UUID d89769b3-2748-4a20-ad6b-6b02a9292676
27868 Sep 22 23:21:44.919 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: d89769b3-2748-4a20-ad6b-6b02a9292676, encrypted: true, database_read_version: 1, database_write_version: 1 }
27869 Sep 22 23:21:44.919 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b WaitActive WaitActive WaitActive
27870 Sep 22 23:21:44.927 DEBG Write :1006 deps:[] res:true
27871 Sep 22 23:21:44.931 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27872 Sep 22 23:21:44.932 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27873 Sep 22 23:21:44.934 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27874 Sep 22 23:21:44.939 DEBG Write :1006 deps:[] res:true
27875 Sep 22 23:21:44.943 DEBG Flush :1007 extent_limit None deps:[JobId(1006)] res:true f:4 g:1
27876 Sep 22 23:21:44.943 DEBG Flush :1007 extent_limit None deps:[JobId(1006)] res:true f:4 g:1
27877 Sep 22 23:21:44.943 DEBG Flush :1007 extent_limit None deps:[JobId(1006)] res:true f:4 g:1
278782023-09-22T23:21:44.947ZINFOcrucible-pantry (dropshot): request completed latency_us = 224 local_addr = 127.0.0.1:52393 method = GET remote_addr = 127.0.0.1:57584 req_id = ec4c93f1-d209-413c-87f1-28c3dc8512e4 response_code = 200 uri = /crucible/pantry/0/job/7165c87e-0953-466c-822f-e26bef4d9d7d/is_finished
278792023-09-22T23:21:44.947ZERROcrucible-pantry (datafile): job 7165c87e-0953-466c-822f-e26bef4d9d7d failed with sha256 digest mismatch! expected 00000000000000000000000000000000000000000000000000000000f5b32221, saw 319d678f093c43502ca360911d52b475dea7fa6dcd962150c84fff18f5b32221
278802023-09-22T23:21:44.947ZINFOcrucible-pantry (dropshot): request completed latency_us = 210 local_addr = 127.0.0.1:52393 method = GET remote_addr = 127.0.0.1:57584 req_id = e1071ba7-2d11-4082-8795-0fd6a9b8d245 response_code = 200 uri = /crucible/pantry/0/job/7165c87e-0953-466c-822f-e26bef4d9d7d/ok
278812023-09-22T23:21:44.948ZINFOcrucible-pantry (datafile): detach removing entry for volume 3251378f-ea13-40c9-9d37-7f38d07fbf35
278822023-09-22T23:21:44.948ZINFOcrucible-pantry (datafile): detaching volume 3251378f-ea13-40c9-9d37-7f38d07fbf35
27883 Sep 22 23:21:44.948 DEBG Flush :1008 extent_limit None deps:[] res:true f:5 g:1
27884 Sep 22 23:21:44.948 DEBG Flush :1008 extent_limit None deps:[] res:true f:5 g:1
27885 Sep 22 23:21:44.948 DEBG Flush :1008 extent_limit None deps:[] res:true f:5 g:1
278862023-09-22T23:21:44.949ZINFOcrucible-pantry (datafile): Request to deactivate this guest
278872023-09-22T23:21:44.949ZINFOcrucible-pantry (datafile): cf566b9e-aff0-4665-9031-98e179d159a4 set deactivating.
278882023-09-22T23:21:44.949ZINFOcrucible-pantry (dropshot): request completed latency_us = 1337 local_addr = 127.0.0.1:52393 method = DELETE remote_addr = 127.0.0.1:57584 req_id = 4837a099-cd81-4a19-9262-885c3151a9f6 response_code = 204 uri = /crucible/pantry/0/volume/3251378f-ea13-40c9-9d37-7f38d07fbf35
27889 Sep 22 23:21:44.951 INFO Downstairs has completed Negotiation, task: proc
27890 Sep 22 23:21:44.952 INFO Downstairs has completed Negotiation, task: proc
27891 Sep 22 23:21:44.953 INFO Downstairs has completed Negotiation, task: proc
27892 Sep 22 23:21:44.953 INFO [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
27893 Sep 22 23:21:44.953 INFO [0] Transition from WaitActive to WaitQuorum
27894 Sep 22 23:21:44.953 WARN [0] new RM replaced this: None
27895 Sep 22 23:21:44.953 INFO [0] Starts reconcile loop
27896 Sep 22 23:21:44.954 INFO [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
27897 Sep 22 23:21:44.954 INFO [1] Transition from WaitActive to WaitQuorum
27898 Sep 22 23:21:44.954 WARN [1] new RM replaced this: None
27899 Sep 22 23:21:44.954 INFO [1] Starts reconcile loop
27900 Sep 22 23:21:44.954 INFO [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
27901 Sep 22 23:21:44.954 INFO [2] Transition from WaitActive to WaitQuorum
27902 Sep 22 23:21:44.954 WARN [2] new RM replaced this: None
27903 Sep 22 23:21:44.954 INFO [2] Starts reconcile loop
27904 Sep 22 23:21:44.954 INFO [0] 127.0.0.1:63038 task reports connection:true
27905 Sep 22 23:21:44.954 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b WaitQuorum WaitQuorum WaitQuorum
27906 Sep 22 23:21:44.954 INFO [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27907 Sep 22 23:21:44.954 INFO [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27908 Sep 22 23:21:44.954 INFO [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
27909 Sep 22 23:21:44.954 INFO [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27910 Sep 22 23:21:44.954 INFO [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27911 Sep 22 23:21:44.954 INFO [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
27912 Sep 22 23:21:44.954 INFO [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27913 Sep 22 23:21:44.954 INFO [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27914 Sep 22 23:21:44.954 INFO [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
27915 Sep 22 23:21:44.954 INFO Max found gen is 1
27916 Sep 22 23:21:44.954 INFO Generation requested: 1 >= found:1
27917 Sep 22 23:21:44.954 INFO Next flush: 1
27918 Sep 22 23:21:44.954 INFO All extents match
27919 Sep 22 23:21:44.954 INFO No downstairs repair required
27920 Sep 22 23:21:44.954 INFO No initial repair work was required
27921 Sep 22 23:21:44.954 INFO Set Downstairs and Upstairs active
27922 Sep 22 23:21:44.954 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b is now active with session: 14046fa9-5850-443d-9708-a7d5d0130e56
27923 Sep 22 23:21:44.954 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Set Active after no repair
27924 Sep 22 23:21:44.954 INFO Notify all downstairs, region set compare is done.
27925 Sep 22 23:21:44.954 INFO Set check for repair
27926 Sep 22 23:21:44.954 INFO [1] 127.0.0.1:50898 task reports connection:true
27927 Sep 22 23:21:44.954 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Active Active Active
27928 Sep 22 23:21:44.954 INFO Set check for repair
27929 Sep 22 23:21:44.954 INFO [2] 127.0.0.1:62519 task reports connection:true
27930 Sep 22 23:21:44.954 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Active Active Active
27931 Sep 22 23:21:44.954 INFO Set check for repair
27932 Sep 22 23:21:44.954 INFO [0] received reconcile message
27933 Sep 22 23:21:44.954 INFO [0] All repairs completed, exit
27934 Sep 22 23:21:44.954 INFO [0] Starts cmd_loop
27935 Sep 22 23:21:44.955 INFO [1] received reconcile message
27936 Sep 22 23:21:44.955 INFO [1] All repairs completed, exit
27937 Sep 22 23:21:44.955 INFO [1] Starts cmd_loop
27938 Sep 22 23:21:44.955 INFO [2] received reconcile message
27939 Sep 22 23:21:44.955 INFO [2] All repairs completed, exit
27940 Sep 22 23:21:44.955 INFO [2] Starts cmd_loop
27941 The guest has finished waiting for activation
27942 Sep 22 23:21:44.976 DEBG IO Read 1000 has deps []
27943 Sep 22 23:21:44.999 DEBG IO Write 1012 has deps [JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
27944 Sep 22 23:21:45.000 DEBG up_ds_listen was notified
27945 Sep 22 23:21:45.000 DEBG up_ds_listen process 1012
27946 Sep 22 23:21:45.000 DEBG [A] ack job 1012:13, : downstairs
27947 Sep 22 23:21:45.000 DEBG up_ds_listen checked 1 jobs, back to waiting
27948 Sep 22 23:21:45.000 DEBG IO Flush 1013 has deps [JobId(1012), JobId(1011), JobId(1010)]
27949 Sep 22 23:21:45.007 DEBG Read :1000 deps:[] res:true
27950 Sep 22 23:21:45.036 DEBG Read :1000 deps:[] res:true
27951 Sep 22 23:21:45.042 INFO Upstairs starts
27952 Sep 22 23:21:45.042 INFO Crucible Version: BuildInfo {
27953 version: "0.0.1",
27954 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
27955 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
27956 git_branch: "main",
27957 rustc_semver: "1.70.0",
27958 rustc_channel: "stable",
27959 rustc_host_triple: "x86_64-unknown-illumos",
27960 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
27961 cargo_triple: "x86_64-unknown-illumos",
27962 debug: true,
27963 opt_level: 0,
27964 }
27965 Sep 22 23:21:45.042 INFO Upstairs <-> Downstairs Message Version: 4
27966 Sep 22 23:21:45.042 INFO Crucible stats registered with UUID: 40e06af7-e69f-43e2-974b-bb00dc5e8960
27967 Sep 22 23:21:45.042 INFO Crucible 40e06af7-e69f-43e2-974b-bb00dc5e8960 has session id: 82d5f8f4-faa4-453f-a84d-0d23406d6cd8
27968 Sep 22 23:21:45.043 INFO [0] connecting to 127.0.0.1:55664, looper: 0
27969 Sep 22 23:21:45.043 INFO [1] connecting to 127.0.0.1:37034, looper: 1
27970 Sep 22 23:21:45.043 INFO [2] connecting to 127.0.0.1:53040, looper: 2
27971 Sep 22 23:21:45.043 INFO up_listen starts, task: up_listen
27972 Sep 22 23:21:45.043 INFO Wait for all three downstairs to come online
27973 Sep 22 23:21:45.043 INFO Flush timeout: 0.5
27974 Sep 22 23:21:45.043 INFO [0] 40e06af7-e69f-43e2-974b-bb00dc5e8960 looper connected, looper: 0
27975 Sep 22 23:21:45.043 INFO [0] Proc runs for 127.0.0.1:55664 in state New
27976 Sep 22 23:21:45.044 INFO [1] 40e06af7-e69f-43e2-974b-bb00dc5e8960 looper connected, looper: 1
27977 Sep 22 23:21:45.044 INFO [1] Proc runs for 127.0.0.1:37034 in state New
27978 Sep 22 23:21:45.044 INFO [2] 40e06af7-e69f-43e2-974b-bb00dc5e8960 looper connected, looper: 2
27979 Sep 22 23:21:45.044 INFO [2] Proc runs for 127.0.0.1:53040 in state New
27980 Sep 22 23:21:45.044 INFO accepted connection from 127.0.0.1:47023, task: main
27981 Sep 22 23:21:45.044 INFO accepted connection from 127.0.0.1:39586, task: main
27982 Sep 22 23:21:45.044 INFO accepted connection from 127.0.0.1:54462, task: main
27983 Sep 22 23:21:45.045 INFO Connection request from 40e06af7-e69f-43e2-974b-bb00dc5e8960 with version 4, task: proc
27984 Sep 22 23:21:45.045 INFO upstairs UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 } connected, version 4, task: proc
27985 Sep 22 23:21:45.045 INFO Connection request from 40e06af7-e69f-43e2-974b-bb00dc5e8960 with version 4, task: proc
27986 Sep 22 23:21:45.045 INFO upstairs UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 } connected, version 4, task: proc
27987 Sep 22 23:21:45.045 INFO Connection request from 40e06af7-e69f-43e2-974b-bb00dc5e8960 with version 4, task: proc
27988 Sep 22 23:21:45.045 INFO upstairs UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 } connected, version 4, task: proc
27989 Sep 22 23:21:45.045 INFO [0] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (f7ed2bae-5c71-405b-9355-9393f8d7847d) New New New ds_transition to WaitActive
27990 Sep 22 23:21:45.045 INFO [0] Transition from New to WaitActive
27991 Sep 22 23:21:45.045 INFO [1] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (f7ed2bae-5c71-405b-9355-9393f8d7847d) WaitActive New New ds_transition to WaitActive
27992 Sep 22 23:21:45.045 INFO [1] Transition from New to WaitActive
27993 Sep 22 23:21:45.045 INFO [2] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (f7ed2bae-5c71-405b-9355-9393f8d7847d) WaitActive WaitActive New ds_transition to WaitActive
27994 Sep 22 23:21:45.045 INFO [2] Transition from New to WaitActive
27995 The guest has requested activation
27996 Sep 22 23:21:45.046 INFO 40e06af7-e69f-43e2-974b-bb00dc5e8960 active request set
27997 Sep 22 23:21:45.046 INFO [0] received activate with gen 2
27998 Sep 22 23:21:45.046 INFO [0] client got ds_active_rx, promote! session f7ed2bae-5c71-405b-9355-9393f8d7847d
27999 Sep 22 23:21:45.046 INFO [1] received activate with gen 2
28000 Sep 22 23:21:45.046 INFO [1] client got ds_active_rx, promote! session f7ed2bae-5c71-405b-9355-9393f8d7847d
28001 Sep 22 23:21:45.046 INFO [2] received activate with gen 2
28002 Sep 22 23:21:45.046 INFO [2] client got ds_active_rx, promote! session f7ed2bae-5c71-405b-9355-9393f8d7847d
28003 Sep 22 23:21:45.046 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } to UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 }
28004 Sep 22 23:21:45.046 WARN Signaling to UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } thread that UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 } is being promoted (read-write)
28005 Sep 22 23:21:45.046 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } to UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 }
28006 Sep 22 23:21:45.046 WARN Signaling to UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } thread that UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 } is being promoted (read-write)
28007 Sep 22 23:21:45.047 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } to UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 }
28008 Sep 22 23:21:45.047 WARN Signaling to UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } thread that UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 } is being promoted (read-write)
28009 Sep 22 23:21:45.047 WARN Another upstairs UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 }, task: main
28010 Sep 22 23:21:45.047 INFO UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 } is now active (read-write)
28011 Sep 22 23:21:45.047 WARN Another upstairs UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 }, task: main
28012 Sep 22 23:21:45.047 INFO UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 } is now active (read-write)
28013 Sep 22 23:21:45.047 WARN Another upstairs UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 }, task: main
28014 Sep 22 23:21:45.048 INFO UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: f7ed2bae-5c71-405b-9355-9393f8d7847d, gen: 2 } is now active (read-write)
28015 Sep 22 23:21:45.048 INFO connection (127.0.0.1:49946): all done
28016 Sep 22 23:21:45.048 INFO connection (127.0.0.1:45897): all done
28017 Sep 22 23:21:45.048 INFO connection (127.0.0.1:36645): all done
280182023-09-22T23:21:45.048ZERROcrucible-pantry (datafile): [0] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) cmd_loop saw YouAreNoLongerActive 40e06af7-e69f-43e2-974b-bb00dc5e8960 f7ed2bae-5c71-405b-9355-9393f8d7847d 2
280192023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): [0] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) Active Active Active ds_transition to Disabled
280202023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): [0] Transition from Active to Disabled
280212023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 set inactive, session d90ddc5f-b441-432b-bb73-5ab29b73ba5b
280222023-09-22T23:21:45.049ZERROcrucible-pantry (datafile): 127.0.0.1:55664: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 0
280232023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): [0] 40e06af7-e69f-43e2-974b-bb00dc5e8960 Gone missing, transition from Disabled to Disconnected
280242023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): [0] 40e06af7-e69f-43e2-974b-bb00dc5e8960 connection to 127.0.0.1:55664 closed looper = 0
280252023-09-22T23:21:45.049ZERROcrucible-pantry (datafile): [1] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) cmd_loop saw YouAreNoLongerActive 40e06af7-e69f-43e2-974b-bb00dc5e8960 f7ed2bae-5c71-405b-9355-9393f8d7847d 2
280262023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): [1] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) Disconnected Active Active ds_transition to Disabled
280272023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): [1] Transition from Active to Disabled
280282023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 set inactive, session d90ddc5f-b441-432b-bb73-5ab29b73ba5b
280292023-09-22T23:21:45.049ZERROcrucible-pantry (datafile): 127.0.0.1:37034: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 1
280302023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): [1] 40e06af7-e69f-43e2-974b-bb00dc5e8960 Gone missing, transition from Disabled to Disconnected
280312023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): [1] 40e06af7-e69f-43e2-974b-bb00dc5e8960 connection to 127.0.0.1:37034 closed looper = 1
280322023-09-22T23:21:45.049ZERROcrucible-pantry (datafile): [2] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) cmd_loop saw YouAreNoLongerActive 40e06af7-e69f-43e2-974b-bb00dc5e8960 f7ed2bae-5c71-405b-9355-9393f8d7847d 2
280332023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): [2] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) Disconnected Disconnected Active ds_transition to Disabled
280342023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): [2] Transition from Active to Disabled
280352023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 set inactive, session d90ddc5f-b441-432b-bb73-5ab29b73ba5b
28036 Sep 22 23:21:45.049 INFO [0] downstairs client at 127.0.0.1:55664 has UUID b740b481-b810-4686-a6ad-5e7a129e9669
280372023-09-22T23:21:45.049ZERROcrucible-pantry (datafile): 127.0.0.1:53040: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 2
28038 Sep 22 23:21:45.049 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: b740b481-b810-4686-a6ad-5e7a129e9669, encrypted: true, database_read_version: 1, database_write_version: 1 }
280392023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): [2] 40e06af7-e69f-43e2-974b-bb00dc5e8960 Gone missing, transition from Disabled to Disconnected
28040 Sep 22 23:21:45.049 INFO 40e06af7-e69f-43e2-974b-bb00dc5e8960 WaitActive WaitActive WaitActive
280412023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): [2] 40e06af7-e69f-43e2-974b-bb00dc5e8960 connection to 127.0.0.1:53040 closed looper = 2
280422023-09-22T23:21:45.049ZWARNcrucible-pantry (datafile): [0] pm_task rx.recv() is None
28043 Sep 22 23:21:45.049 INFO [1] downstairs client at 127.0.0.1:37034 has UUID efa5c554-210a-4196-815d-fa3d1dfbb858
28044 Sep 22 23:21:45.049 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: efa5c554-210a-4196-815d-fa3d1dfbb858, encrypted: true, database_read_version: 1, database_write_version: 1 }
280452023-09-22T23:21:45.049ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:55664 task reports connection:false
28046 Sep 22 23:21:45.050 INFO 40e06af7-e69f-43e2-974b-bb00dc5e8960 WaitActive WaitActive WaitActive
280472023-09-22T23:21:45.050ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 Disconnected Disconnected Disconnected
280482023-09-22T23:21:45.050ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:55664 task reports offline
28049 Sep 22 23:21:45.050 INFO [2] downstairs client at 127.0.0.1:53040 has UUID e1ba5200-2113-47b5-8eab-dc553c509bb6
28050 Sep 22 23:21:45.050 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: e1ba5200-2113-47b5-8eab-dc553c509bb6, encrypted: true, database_read_version: 1, database_write_version: 1 }
280512023-09-22T23:21:45.050ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:37034 task reports connection:false
28052 Sep 22 23:21:45.050 INFO 40e06af7-e69f-43e2-974b-bb00dc5e8960 WaitActive WaitActive WaitActive
280532023-09-22T23:21:45.050ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 Disconnected Disconnected Disconnected
280542023-09-22T23:21:45.050ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:37034 task reports offline
280552023-09-22T23:21:45.050ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:53040 task reports connection:false
280562023-09-22T23:21:45.050ZINFOcrucible-pantry (datafile): 40e06af7-e69f-43e2-974b-bb00dc5e8960 Disconnected Disconnected Disconnected
280572023-09-22T23:21:45.050ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:53040 task reports offline
280582023-09-22T23:21:45.050ZWARNcrucible-pantry (datafile): [1] pm_task rx.recv() is None
280592023-09-22T23:21:45.050ZWARNcrucible-pantry (datafile): [2] pm_task rx.recv() is None
28060 test test::test_pantry_import_from_url_ovmf_bad_digest ... ok
28061 Sep 22 23:21:45.064 INFO Current flush_numbers [0..12]: [1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0]
28062 Sep 22 23:21:45.066 INFO Current flush_numbers [0..12]: [1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0]
28063 Sep 22 23:21:45.067 INFO Current flush_numbers [0..12]: [1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0]
28064 Sep 22 23:21:45.072 DEBG Read :1000 deps:[] res:true
28065 Sep 22 23:21:45.087 INFO Downstairs has completed Negotiation, task: proc
28066 Sep 22 23:21:45.089 INFO Downstairs has completed Negotiation, task: proc
28067 Sep 22 23:21:45.090 INFO Downstairs has completed Negotiation, task: proc
28068 Sep 22 23:21:45.091 INFO [0] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (f7ed2bae-5c71-405b-9355-9393f8d7847d) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
28069 Sep 22 23:21:45.091 INFO [0] Transition from WaitActive to WaitQuorum
28070 Sep 22 23:21:45.091 WARN [0] new RM replaced this: None
28071 Sep 22 23:21:45.091 INFO [0] Starts reconcile loop
28072 Sep 22 23:21:45.091 INFO [1] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (f7ed2bae-5c71-405b-9355-9393f8d7847d) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
28073 Sep 22 23:21:45.091 INFO [1] Transition from WaitActive to WaitQuorum
28074 Sep 22 23:21:45.091 WARN [1] new RM replaced this: None
28075 Sep 22 23:21:45.091 INFO [1] Starts reconcile loop
28076 Sep 22 23:21:45.091 INFO [2] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (f7ed2bae-5c71-405b-9355-9393f8d7847d) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
28077 Sep 22 23:21:45.091 INFO [2] Transition from WaitActive to WaitQuorum
28078 Sep 22 23:21:45.091 WARN [2] new RM replaced this: None
28079 Sep 22 23:21:45.092 INFO [2] Starts reconcile loop
28080 Sep 22 23:21:45.092 INFO [0] 127.0.0.1:55664 task reports connection:true
28081 Sep 22 23:21:45.092 INFO 40e06af7-e69f-43e2-974b-bb00dc5e8960 WaitQuorum WaitQuorum WaitQuorum
28082 Sep 22 23:21:45.092 INFO [0]R flush_numbers[0..12]: [1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0]
28083 Sep 22 23:21:45.092 INFO [0]R generation[0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0]
28084 Sep 22 23:21:45.092 INFO [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
28085 Sep 22 23:21:45.092 INFO [1]R flush_numbers[0..12]: [1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0]
28086 Sep 22 23:21:45.092 INFO [1]R generation[0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0]
28087 Sep 22 23:21:45.092 INFO [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
28088 Sep 22 23:21:45.092 INFO [2]R flush_numbers[0..12]: [1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0]
28089 Sep 22 23:21:45.092 INFO [2]R generation[0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0]
28090 Sep 22 23:21:45.092 INFO [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
28091 Sep 22 23:21:45.092 INFO Max found gen is 2
28092 Sep 22 23:21:45.092 INFO Generation requested: 2 >= found:2
28093 Sep 22 23:21:45.092 INFO Next flush: 5
28094 Sep 22 23:21:45.092 INFO All extents match
28095 Sep 22 23:21:45.092 INFO No downstairs repair required
28096 Sep 22 23:21:45.092 INFO No initial repair work was required
28097 Sep 22 23:21:45.092 INFO Set Downstairs and Upstairs active
28098 Sep 22 23:21:45.092 INFO 40e06af7-e69f-43e2-974b-bb00dc5e8960 is now active with session: f7ed2bae-5c71-405b-9355-9393f8d7847d
28099 Sep 22 23:21:45.092 INFO 40e06af7-e69f-43e2-974b-bb00dc5e8960 Set Active after no repair
28100 Sep 22 23:21:45.092 INFO Notify all downstairs, region set compare is done.
28101 Sep 22 23:21:45.092 INFO Set check for repair
28102 Sep 22 23:21:45.092 INFO [1] 127.0.0.1:37034 task reports connection:true
28103 Sep 22 23:21:45.092 INFO 40e06af7-e69f-43e2-974b-bb00dc5e8960 Active Active Active
28104 Sep 22 23:21:45.092 INFO Set check for repair
28105 Sep 22 23:21:45.092 INFO [2] 127.0.0.1:53040 task reports connection:true
28106 Sep 22 23:21:45.092 INFO 40e06af7-e69f-43e2-974b-bb00dc5e8960 Active Active Active
28107 Sep 22 23:21:45.092 INFO Set check for repair
28108 Sep 22 23:21:45.092 INFO [0] received reconcile message
28109 Sep 22 23:21:45.093 INFO [0] All repairs completed, exit
28110 Sep 22 23:21:45.093 INFO [0] Starts cmd_loop
28111 Sep 22 23:21:45.093 INFO [1] received reconcile message
28112 Sep 22 23:21:45.093 INFO [1] All repairs completed, exit
28113 Sep 22 23:21:45.093 INFO [1] Starts cmd_loop
28114 Sep 22 23:21:45.093 INFO [2] received reconcile message
28115 Sep 22 23:21:45.093 INFO [2] All repairs completed, exit
28116 Sep 22 23:21:45.093 INFO [2] Starts cmd_loop
28117 The guest has finished waiting for activation
28118 Sep 22 23:21:45.098 DEBG IO Flush 1001 has deps [JobId(1000)]
28119 Sep 22 23:21:45.102 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
28120 Sep 22 23:21:45.103 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
28121 Sep 22 23:21:45.104 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
28122 Sep 22 23:21:45.105 DEBG IO Read 1000 has deps []
28123 Sep 22 23:21:45.139 DEBG Read :1000 deps:[] res:true
28124 Sep 22 23:21:45.202 DEBG Read :1000 deps:[] res:true
28125 Sep 22 23:21:45.269 DEBG Read :1000 deps:[] res:true
28126 Sep 22 23:21:45.325 DEBG [0] Read AckReady 1000, : downstairs
28127 Sep 22 23:21:45.363 DEBG [1] Read already AckReady 1000, : downstairs
28128 Sep 22 23:21:45.401 DEBG [2] Read already AckReady 1000, : downstairs
28129 Sep 22 23:21:45.402 DEBG up_ds_listen was notified
28130 Sep 22 23:21:45.403 DEBG up_ds_listen process 1000
28131 Sep 22 23:21:45.403 DEBG [A] ack job 1000:1, : downstairs
28132 Sep 22 23:21:45.404 DEBG IO Write 1014 has deps [JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
28133 Sep 22 23:21:45.404 DEBG up_ds_listen was notified
28134 Sep 22 23:21:45.404 DEBG up_ds_listen process 1014
28135 Sep 22 23:21:45.404 DEBG [A] ack job 1014:15, : downstairs
28136 Sep 22 23:21:45.404 DEBG up_ds_listen checked 1 jobs, back to waiting
28137 Sep 22 23:21:45.501 DEBG up_ds_listen process 1001
28138 Sep 22 23:21:45.501 DEBG [A] ack job 1001:2, : downstairs
28139 Sep 22 23:21:45.501 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
28140 Sep 22 23:21:45.501 DEBG up_ds_listen checked 2 jobs, back to waiting
28141 Sep 22 23:21:45.501 DEBG up_ds_listen was notified
28142 Sep 22 23:21:45.501 DEBG up_ds_listen checked 0 jobs, back to waiting
28143 Sep 22 23:21:45.550 WARN returning error on write!
28144 Sep 22 23:21:45.550 DEBG Write :1000 deps:[] res:false
28145 Sep 22 23:21:45.551 WARN returning error on write!
28146 Sep 22 23:21:45.551 DEBG Write :1000 deps:[] res:false
28147 Sep 22 23:21:45.581 DEBG Write :1000 deps:[] res:true
28148 Sep 22 23:21:45.612 DEBG Write :1000 deps:[] res:true
28149 Sep 22 23:21:45.613 INFO [lossy] skipping 1001
28150 Sep 22 23:21:45.613 INFO [lossy] skipping 1001
28151 Sep 22 23:21:45.614 INFO [lossy] skipping 1000
28152 Sep 22 23:21:45.615 WARN returning error on write!
28153 Sep 22 23:21:45.615 DEBG Write :1000 deps:[] res:false
28154 Sep 22 23:21:45.615 INFO [lossy] skipping 1000
28155 Sep 22 23:21:45.616 WARN returning error on write!
28156 Sep 22 23:21:45.616 DEBG Write :1000 deps:[] res:false
28157 Sep 22 23:21:45.646 DEBG Write :1000 deps:[] res:true
28158 Sep 22 23:21:45.656 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
28159 Sep 22 23:21:45.656 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
28160 Sep 22 23:21:45.657 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
28161 Sep 22 23:21:45.956 INFO Checking if live repair is needed
28162 Sep 22 23:21:45.956 INFO No Live Repair required at this time
28163 Sep 22 23:21:45.989 DEBG IO Write 1015 has deps [JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
28164 Sep 22 23:21:45.989 DEBG IO Flush 1016 has deps [JobId(1015), JobId(1014), JobId(1013)]
28165 Sep 22 23:21:45.989 DEBG up_ds_listen was notified
28166 Sep 22 23:21:45.989 DEBG up_ds_listen process 1015
28167 Sep 22 23:21:45.989 DEBG [A] ack job 1015:16, : downstairs
28168 Sep 22 23:21:45.989 DEBG up_ds_listen checked 1 jobs, back to waiting
28169 Sep 22 23:21:46.229 DEBG [0] Read AckReady 1000, : downstairs
28170 Sep 22 23:21:46.319 DEBG IO Write 1017 has deps [JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
28171 Sep 22 23:21:46.319 DEBG up_ds_listen was notified
28172 Sep 22 23:21:46.320 DEBG up_ds_listen process 1017
28173 Sep 22 23:21:46.320 DEBG [A] ack job 1017:18, : downstairs
28174 Sep 22 23:21:46.320 DEBG up_ds_listen checked 1 jobs, back to waiting
28175 Sep 22 23:21:46.655 DEBG IO Write 1018 has deps [JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
28176 Sep 22 23:21:46.655 DEBG up_ds_listen was notified
28177 Sep 22 23:21:46.655 DEBG up_ds_listen process 1018
28178 Sep 22 23:21:46.655 DEBG [A] ack job 1018:19, : downstairs
28179 Sep 22 23:21:46.655 DEBG up_ds_listen checked 1 jobs, back to waiting
28180 Sep 22 23:21:46.656 DEBG IO Flush 1019 has deps [JobId(1018), JobId(1017), JobId(1016)]
28181 Sep 22 23:21:46.857 DEBG [1] Read already AckReady 1000, : downstairs
28182 Sep 22 23:21:46.983 INFO Request to deactivate this guest
28183 Sep 22 23:21:46.983 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b set deactivating.
28184 Sep 22 23:21:46.983 DEBG No work, no need to flush, return OK
28185 note: configured to log to "/dev/stdout"
281862023-09-22T23:21:46.984ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:49824
281872023-09-22T23:21:46.985ZINFOcrucible-pantry: listen IP: 127.0.0.1:49824
28188 Sep 22 23:21:46.991 DEBG IO Write 1020 has deps [JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
28189 Sep 22 23:21:46.992 DEBG up_ds_listen was notified
28190 Sep 22 23:21:46.992 DEBG up_ds_listen process 1020
28191 Sep 22 23:21:46.992 DEBG [A] ack job 1020:21, : downstairs
28192 Sep 22 23:21:46.992 DEBG up_ds_listen checked 1 jobs, back to waiting
281932023-09-22T23:21:47.054ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:49824 remote_addr = 127.0.0.1:33149
281942023-09-22T23:21:47.055ZINFOcrucible-pantry (datafile): no entry exists for volume 3dd77b9b-1720-4c07-b5d9-e4b24cceb521, constructing...
281952023-09-22T23:21:47.055ZINFOcrucible-pantry (datafile): Upstairs starts
281962023-09-22T23:21:47.055ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
281972023-09-22T23:21:47.055ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
281982023-09-22T23:21:47.055ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b
281992023-09-22T23:21:47.055ZINFOcrucible-pantry (datafile): Crucible 71f26a6c-b7c6-4f58-901b-ae4e36b1710b has session id: 9be27365-d3f4-4c51-a131-3bf3779747df
282002023-09-22T23:21:47.055ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:63038 looper = 0
282012023-09-22T23:21:47.055ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:50898 looper = 1
282022023-09-22T23:21:47.055ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:62519 looper = 2
282032023-09-22T23:21:47.055ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
282042023-09-22T23:21:47.055ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
282052023-09-22T23:21:47.055ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
282062023-09-22T23:21:47.119ZINFOcrucible-pantry (datafile): [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b looper connected looper = 0
282072023-09-22T23:21:47.119ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:63038 in state New
282082023-09-22T23:21:47.119ZINFOcrucible-pantry (datafile): [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b looper connected looper = 1
282092023-09-22T23:21:47.119ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:50898 in state New
282102023-09-22T23:21:47.119ZINFOcrucible-pantry (datafile): [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b looper connected looper = 2
28211 Sep 22 23:21:47.119 INFO accepted connection from 127.0.0.1:50361, task: main
282122023-09-22T23:21:47.119ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:62519 in state New
28213 Sep 22 23:21:47.119 INFO accepted connection from 127.0.0.1:56375, task: main
28214 Sep 22 23:21:47.120 INFO accepted connection from 127.0.0.1:64643, task: main
28215 Sep 22 23:21:47.120 INFO Connection request from 71f26a6c-b7c6-4f58-901b-ae4e36b1710b with version 4, task: proc
28216 Sep 22 23:21:47.120 INFO upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } connected, version 4, task: proc
28217 Sep 22 23:21:47.120 INFO Connection request from 71f26a6c-b7c6-4f58-901b-ae4e36b1710b with version 4, task: proc
28218 Sep 22 23:21:47.120 INFO upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } connected, version 4, task: proc
28219 Sep 22 23:21:47.120 INFO Connection request from 71f26a6c-b7c6-4f58-901b-ae4e36b1710b with version 4, task: proc
28220 Sep 22 23:21:47.120 INFO upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } connected, version 4, task: proc
282212023-09-22T23:21:47.120ZINFOcrucible-pantry (datafile): [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (3d6550fa-8c01-4792-b110-6653df927fd3) New New New ds_transition to WaitActive
282222023-09-22T23:21:47.120ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
282232023-09-22T23:21:47.120ZINFOcrucible-pantry (datafile): [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (3d6550fa-8c01-4792-b110-6653df927fd3) WaitActive New New ds_transition to WaitActive
282242023-09-22T23:21:47.120ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
282252023-09-22T23:21:47.120ZINFOcrucible-pantry (datafile): [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (3d6550fa-8c01-4792-b110-6653df927fd3) WaitActive WaitActive New ds_transition to WaitActive
282262023-09-22T23:21:47.120ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
28227 Sep 22 23:21:47.325 DEBG IO Write 1021 has deps [JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
28228 Sep 22 23:21:47.325 DEBG up_ds_listen was notified
28229 Sep 22 23:21:47.325 DEBG up_ds_listen process 1021
28230 Sep 22 23:21:47.325 DEBG [A] ack job 1021:22, : downstairs
28231 Sep 22 23:21:47.325 DEBG up_ds_listen checked 1 jobs, back to waiting
28232 Sep 22 23:21:47.326 DEBG IO Flush 1022 has deps [JobId(1021), JobId(1020), JobId(1019)]
282332023-09-22T23:21:47.387ZINFOcrucible-pantry (datafile): volume 3dd77b9b-1720-4c07-b5d9-e4b24cceb521 constructed ok
28234 The guest has requested activation
282352023-09-22T23:21:47.387ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b active request set
282362023-09-22T23:21:47.387ZINFOcrucible-pantry (datafile): [0] received activate with gen 2
282372023-09-22T23:21:47.387ZINFOcrucible-pantry (datafile): [0] client got ds_active_rx, promote! session 3d6550fa-8c01-4792-b110-6653df927fd3
282382023-09-22T23:21:47.387ZINFOcrucible-pantry (datafile): [1] received activate with gen 2
282392023-09-22T23:21:47.387ZINFOcrucible-pantry (datafile): [1] client got ds_active_rx, promote! session 3d6550fa-8c01-4792-b110-6653df927fd3
282402023-09-22T23:21:47.388ZINFOcrucible-pantry (datafile): [2] received activate with gen 2
28241 Sep 22 23:21:47.388 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } to UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 }
282422023-09-22T23:21:47.388ZINFOcrucible-pantry (datafile): [2] client got ds_active_rx, promote! session 3d6550fa-8c01-4792-b110-6653df927fd3
28243 Sep 22 23:21:47.388 WARN Signaling to UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } thread that UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } is being promoted (read-write)
28244 Sep 22 23:21:47.388 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } to UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 }
28245 Sep 22 23:21:47.388 WARN Signaling to UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } thread that UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } is being promoted (read-write)
28246 Sep 22 23:21:47.388 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } to UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 }
28247 Sep 22 23:21:47.388 WARN Signaling to UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } thread that UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } is being promoted (read-write)
28248 Sep 22 23:21:47.388 WARN Another upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 }, task: main
28249 Sep 22 23:21:47.388 INFO UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } is now active (read-write)
28250 Sep 22 23:21:47.388 WARN Another upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 }, task: main
28251 Sep 22 23:21:47.389 INFO UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } is now active (read-write)
28252 Sep 22 23:21:47.389 WARN Another upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 }, task: main
28253 Sep 22 23:21:47.389 INFO UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } is now active (read-write)
28254 Sep 22 23:21:47.389 INFO connection (127.0.0.1:40139): all done
28255 Sep 22 23:21:47.389 INFO connection (127.0.0.1:42516): all done
28256 Sep 22 23:21:47.389 INFO connection (127.0.0.1:55292): all done
28257 Sep 22 23:21:47.389 ERRO [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) cmd_loop saw YouAreNoLongerActive 71f26a6c-b7c6-4f58-901b-ae4e36b1710b 3d6550fa-8c01-4792-b110-6653df927fd3 2
28258 Sep 22 23:21:47.389 INFO [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) Active Active Active ds_transition to Disabled
28259 Sep 22 23:21:47.389 INFO [0] Transition from Active to Disabled
28260 Sep 22 23:21:47.389 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b set inactive, session 14046fa9-5850-443d-9708-a7d5d0130e56
28261 Sep 22 23:21:47.389 ERRO 127.0.0.1:63038: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1, looper: 0
28262 Sep 22 23:21:47.389 INFO [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Gone missing, transition from Disabled to Disconnected
28263 Sep 22 23:21:47.389 INFO [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b connection to 127.0.0.1:63038 closed, looper: 0
28264 Sep 22 23:21:47.389 ERRO [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) cmd_loop saw YouAreNoLongerActive 71f26a6c-b7c6-4f58-901b-ae4e36b1710b 3d6550fa-8c01-4792-b110-6653df927fd3 2
28265 Sep 22 23:21:47.389 INFO [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) Disconnected Active Active ds_transition to Disabled
28266 Sep 22 23:21:47.389 INFO [1] Transition from Active to Disabled
28267 Sep 22 23:21:47.389 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b set inactive, session 14046fa9-5850-443d-9708-a7d5d0130e56
28268 Sep 22 23:21:47.389 ERRO 127.0.0.1:50898: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1, looper: 1
28269 Sep 22 23:21:47.390 INFO [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Gone missing, transition from Disabled to Disconnected
28270 Sep 22 23:21:47.390 INFO [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b connection to 127.0.0.1:50898 closed, looper: 1
28271 Sep 22 23:21:47.390 ERRO [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) cmd_loop saw YouAreNoLongerActive 71f26a6c-b7c6-4f58-901b-ae4e36b1710b 3d6550fa-8c01-4792-b110-6653df927fd3 2
28272 Sep 22 23:21:47.390 INFO [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) Disconnected Disconnected Active ds_transition to Disabled
28273 Sep 22 23:21:47.390 INFO [2] Transition from Active to Disabled
28274 Sep 22 23:21:47.390 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b set inactive, session 14046fa9-5850-443d-9708-a7d5d0130e56
28275 Sep 22 23:21:47.390 ERRO 127.0.0.1:62519: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1, looper: 2
28276 Sep 22 23:21:47.390 INFO [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Gone missing, transition from Disabled to Disconnected
28277 Sep 22 23:21:47.390 INFO [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b connection to 127.0.0.1:62519 closed, looper: 2
28278 Sep 22 23:21:47.390 WARN [0] pm_task rx.recv() is None
28279 Sep 22 23:21:47.390 INFO [0] 127.0.0.1:63038 task reports connection:false
28280 Sep 22 23:21:47.390 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Disconnected Disconnected Disconnected
28281 Sep 22 23:21:47.390 INFO [0] 127.0.0.1:63038 task reports offline
28282 Sep 22 23:21:47.390 INFO [1] 127.0.0.1:50898 task reports connection:false
28283 Sep 22 23:21:47.390 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Disconnected Disconnected Disconnected
28284 Sep 22 23:21:47.390 INFO [1] 127.0.0.1:50898 task reports offline
28285 Sep 22 23:21:47.390 INFO [2] 127.0.0.1:62519 task reports connection:false
28286 Sep 22 23:21:47.390 INFO 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Disconnected Disconnected Disconnected
28287 Sep 22 23:21:47.390 INFO [2] 127.0.0.1:62519 task reports offline
28288 Sep 22 23:21:47.390 WARN [1] pm_task rx.recv() is None
28289 Sep 22 23:21:47.390 WARN [2] pm_task rx.recv() is None
282902023-09-22T23:21:47.390ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:63038 has UUID 0c4b6300-aa56-4191-90c9-1d56da8be939
282912023-09-22T23:21:47.390ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 0c4b6300-aa56-4191-90c9-1d56da8be939, encrypted: true, database_read_version: 1, database_write_version: 1 }
282922023-09-22T23:21:47.391ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b WaitActive WaitActive WaitActive
282932023-09-22T23:21:47.391ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:50898 has UUID 77625cc5-cf57-4c89-b7d2-c29dca5216c1
282942023-09-22T23:21:47.391ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 77625cc5-cf57-4c89-b7d2-c29dca5216c1, encrypted: true, database_read_version: 1, database_write_version: 1 }
282952023-09-22T23:21:47.391ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b WaitActive WaitActive WaitActive
282962023-09-22T23:21:47.391ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:62519 has UUID d89769b3-2748-4a20-ad6b-6b02a9292676
282972023-09-22T23:21:47.391ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: d89769b3-2748-4a20-ad6b-6b02a9292676, encrypted: true, database_read_version: 1, database_write_version: 1 }
282982023-09-22T23:21:47.391ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b WaitActive WaitActive WaitActive
28299 Sep 22 23:21:47.398 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
28300 Sep 22 23:21:47.399 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
28301 Sep 22 23:21:47.400 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
28302 Sep 22 23:21:47.413 INFO Downstairs has completed Negotiation, task: proc
28303 Sep 22 23:21:47.414 INFO Downstairs has completed Negotiation, task: proc
28304 Sep 22 23:21:47.415 INFO Downstairs has completed Negotiation, task: proc
283052023-09-22T23:21:47.415ZINFOcrucible-pantry (datafile): [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (3d6550fa-8c01-4792-b110-6653df927fd3) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
283062023-09-22T23:21:47.415ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
283072023-09-22T23:21:47.415ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
283082023-09-22T23:21:47.415ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
283092023-09-22T23:21:47.415ZINFOcrucible-pantry (datafile): [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (3d6550fa-8c01-4792-b110-6653df927fd3) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
283102023-09-22T23:21:47.415ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
283112023-09-22T23:21:47.415ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
283122023-09-22T23:21:47.415ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
283132023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (3d6550fa-8c01-4792-b110-6653df927fd3) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
283142023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
283152023-09-22T23:21:47.416ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
283162023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
283172023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:63038 task reports connection:true
283182023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b WaitQuorum WaitQuorum WaitQuorum
283192023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
283202023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
283212023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
283222023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
283232023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
28324 The guest has finished waiting for activation
283252023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
283262023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
283272023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
283282023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
283292023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): Max found gen is 1
283302023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): Generation requested: 2 >= found:1
283312023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): Next flush: 1
283322023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): All extents match
283332023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): No downstairs repair required
283342023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): No initial repair work was required
283352023-09-22T23:21:47.416ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
283362023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b is now active with session: 3d6550fa-8c01-4792-b110-6653df927fd3
283372023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Set Active after no repair
283382023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
283392023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): Set check for repair
283402023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:50898 task reports connection:true
283412023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Active Active Active
283422023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): Set check for repair
283432023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:62519 task reports connection:true
283442023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Active Active Active
283452023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): Set check for repair
283462023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): [0] received reconcile message
283472023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
283482023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
283492023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): [1] received reconcile message
283502023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
283512023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
283522023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): [2] received reconcile message
283532023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
283542023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
283552023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): volume 3dd77b9b-1720-4c07-b5d9-e4b24cceb521 activated ok
283562023-09-22T23:21:47.417ZINFOcrucible-pantry (datafile): volume 3dd77b9b-1720-4c07-b5d9-e4b24cceb521 constructed and inserted ok
283572023-09-22T23:21:47.417ZINFOcrucible-pantry (dropshot): request completed latency_us = 362314 local_addr = 127.0.0.1:49824 method = POST remote_addr = 127.0.0.1:33149 req_id = 37378f22-ea19-4ef3-8f88-c6c7f7d630a2 response_code = 200 uri = /crucible/pantry/0/volume/3dd77b9b-1720-4c07-b5d9-e4b24cceb521
283582023-09-22T23:21:47.418ZINFOcrucible-pantry (datafile): Scrub check for 3dd77b9b-1720-4c07-b5d9-e4b24cceb521
283592023-09-22T23:21:47.418ZINFOcrucible-pantry (datafile): Scrub for 3dd77b9b-1720-4c07-b5d9-e4b24cceb521 begins
283602023-09-22T23:21:47.418ZINFOcrucible-pantry (datafile): Scrub with total_size:1966080 block_size:512
283612023-09-22T23:21:47.418ZINFOcrucible-pantry (datafile): Scrubs from block 0 to 3840 in (256) 131072 size IOs pm:0
283622023-09-22T23:21:47.418ZINFOcrucible-pantry (dropshot): request completed latency_us = 638 local_addr = 127.0.0.1:49824 method = POST remote_addr = 127.0.0.1:33149 req_id = b4b9054e-0bf0-4378-b5dd-d72b3ecef36b response_code = 200 uri = /crucible/pantry/0/volume/3dd77b9b-1720-4c07-b5d9-e4b24cceb521/scrub
283632023-09-22T23:21:47.419ZINFOcrucible-pantry (dropshot): request completed latency_us = 184 local_addr = 127.0.0.1:49824 method = GET remote_addr = 127.0.0.1:33149 req_id = 0560e9e5-8850-4ece-87fd-66ef564218e8 response_code = 200 uri = /crucible/pantry/0/job/9890f602-aa95-4bcc-8172-78f624d84944/is_finished
28364 Sep 22 23:21:47.479 DEBG [2] Read already AckReady 1000, : downstairs
28365 Sep 22 23:21:47.484 DEBG up_ds_listen was notified
28366 Sep 22 23:21:47.484 DEBG up_ds_listen process 1000
28367 Sep 22 23:21:47.485 DEBG [A] ack job 1000:1, : downstairs
28368 Sep 22 23:21:47.584 DEBG up_ds_listen checked 1 jobs, back to waiting
28369 0 512 ok
28370 512 1024 ok
28371 1024 1536 ok
28372 1536 2048 ok
28373 2048 2560 ok
28374 2560 3072 ok
28375 3072 3584 ok
28376 3584 4096 ok
28377 4096 4608 ok
28378 4608 5120 ok
28379 5120 5632 ok
28380 5632 6144 ok
28381 6144 6656 ok
28382 6656 7168 ok
28383 7168 7680 ok
28384 7680 8192 ok
28385 8192 8704 ok
28386 8704 9216 ok
28387 9216 9728 ok
28388 9728 10240 ok
28389 10240 10752 ok
28390 10752 11264 ok
28391 11264 11776 ok
28392 11776 12288 ok
28393 12288 12800 ok
28394 12800 13312 ok
28395 13312 13824 ok
28396 13824 14336 ok
28397 14336 14848 ok
28398 14848 15360 ok
28399 15360 15872 ok
28400 15872 16384 ok
28401 16384 16896 ok
28402 16896 17408 ok
28403 17408 17920 ok
28404 17920 18432 ok
28405 18432 18944 ok
28406 18944 19456 ok
28407 19456 19968 ok
28408 19968 20480 ok
28409 20480 20992 ok
28410 20992 21504 ok
28411 21504 22016 ok
28412 22016 22528 ok
28413 22528 23040 ok
28414 23040 23552 ok
28415 23552 24064 ok
28416 24064 24576 ok
28417 24576 25088 ok
28418 25088 25600 ok
28419 25600 26112 ok
28420 26112 26624 ok
28421 26624 27136 ok
28422 27136 27648 ok
28423 27648 28160 ok
28424 28160 28672 ok
28425 28672 29184 ok
28426 29184 29696 ok
28427 29696 30208 ok
28428 30208 30720 ok
28429 30720 31232 ok
28430 31232 31744 ok
28431 31744 32256 ok
28432 32256 32768 ok
28433 32768 33280 ok
28434 33280 33792 ok
28435 33792 34304 ok
28436 34304 34816 ok
28437 34816 35328 ok
28438 35328 35840 ok
28439 35840 36352 ok
28440 36352 36864 ok
28441 36864 37376 ok
28442 37376 37888 ok
28443 37888 38400 ok
28444 38400 38912 ok
28445 38912 39424 ok
28446 39424 39936 ok
28447 39936 40448 ok
28448 40448 40960 ok
28449 40960 41472 ok
28450 41472 41984 ok
28451 41984 42496 ok
28452 42496 43008 ok
28453 43008 43520 ok
28454 43520 44032 ok
28455 44032 44544 ok
28456 44544 45056 ok
28457 45056 45568 ok
28458 45568 46080 ok
28459 46080 46592 ok
28460 46592 47104 ok
28461 47104 47616 ok
28462 47616 48128 ok
28463 48128 48640 ok
28464 48640 49152 ok
28465 49152 49664 ok
28466 49664 50176 ok
28467 50176 50688 ok
28468 50688 51200 ok
28469 51200 51712 ok
28470 51712 52224 ok
28471 52224 52736 ok
28472 52736 53248 ok
28473 53248 53760 ok
28474 53760 54272 ok
28475 54272 54784 ok
28476 54784 55296 ok
28477 55296 55808 ok
28478 55808 56320 ok
28479 56320 56832 ok
28480 56832 57344 ok
28481 57344 57856 ok
28482 57856 58368 ok
28483 58368 58880 ok
28484 58880 59392 ok
28485 59392 59904 ok
28486 59904 60416 ok
28487 60416 60928 ok
28488 Sep 22 23:21:47.587 DEBG IO Flush 1001 has deps [JobId(1000)]
28489 Sep 22 23:21:47.587 INFO Checking if live repair is needed
28490 Sep 22 23:21:47.587 INFO No Live Repair required at this time
28491 60928 61440 ok
28492 61440 61952 ok
28493 61952 62464 ok
28494 62464 62976 ok
28495 62976 63488 ok
28496 63488 64000 ok
28497 64000 64512 ok
28498 64512 65024 ok
28499 65024 65536 ok
28500 65536 66048 ok
28501 66048 66560 ok
28502 66560 67072 ok
28503 67072 67584 ok
28504 67584 68096 ok
28505 68096 68608 ok
28506 68608 69120 ok
28507 69120 69632 ok
28508 69632 70144 ok
28509 70144 70656 ok
28510 70656 71168 ok
28511 71168 71680 ok
28512 71680 72192 ok
28513 72192 72704 ok
28514 72704 73216 ok
28515 73216 73728 ok
28516 73728 74240 ok
28517 74240 74752 ok
28518 74752 75264 ok
28519 75264 75776 ok
28520 75776 76288 ok
28521 76288 76800 ok
28522 76800 77312 ok
28523 77312 77824 ok
28524 77824 78336 ok
28525 78336 78848 ok
28526 78848 79360 ok
28527 79360 79872 ok
28528 79872 80384 ok
28529 80384 80896 ok
28530 80896 81408 ok
28531 81408 81920 ok
28532 81920 82432 ok
28533 82432 82944 ok
28534 82944 83456 ok
28535 83456 83968 ok
28536 83968 84480 ok
28537 84480 84992 ok
28538 84992 85504 ok
28539 85504 86016 ok
28540 86016 86528 ok
28541 86528 87040 ok
28542 87040 87552 ok
28543 87552 88064 ok
28544 88064 88576 ok
28545 88576 89088 ok
28546 89088 89600 ok
28547 89600 90112 ok
28548 90112 90624 ok
28549 90624 91136 ok
28550 91136 91648 ok
28551 91648 92160 ok
28552 92160 92672 ok
28553 92672 93184 ok
28554 93184 93696 ok
28555 93696 94208 ok
28556 94208 94720 ok
28557 94720 95232 ok
28558 95232 95744 ok
28559 95744 96256 ok
28560 96256 96768 ok
28561 96768 97280 ok
28562 97280 97792 ok
28563 97792 98304 ok
28564 98304 98816 ok
28565 98816 99328 ok
28566 99328 99840 ok
28567 99840 100352 ok
28568 100352 100864 ok
28569 100864 101376 ok
28570 101376 101888 ok
28571 101888 102400 ok
28572 102400 102912 ok
28573 102912 103424 ok
28574 103424 103936 ok
28575 103936 104448 ok
28576 104448 104960 ok
28577 104960 105472 ok
28578 105472 105984 ok
28579 105984 106496 ok
28580 106496 107008 ok
28581 107008 107520 ok
28582 107520 108032 ok
28583 108032 108544 ok
28584 108544 109056 ok
28585 109056 109568 ok
28586 109568 110080 ok
28587 110080 110592 ok
28588 110592 111104 ok
28589 111104 111616 ok
28590 111616 112128 ok
28591 112128 112640 ok
28592 112640 113152 ok
28593 113152 113664 ok
28594 113664 114176 ok
28595 114176 114688 ok
28596 114688 115200 ok
28597 115200 115712 ok
28598 115712 116224 ok
28599 116224 116736 ok
28600 116736 117248 ok
28601 117248 117760 ok
28602 117760 118272 ok
28603 118272 118784 ok
28604 118784 119296 ok
28605 119296 119808 ok
28606 119808 120320 ok
28607 120320 120832 ok
28608 120832 121344 ok
28609 121344 121856 ok
28610 121856 122368 ok
28611 122368 122880 ok
28612 122880 123392 ok
28613 123392 123904 ok
28614 123904 124416 ok
28615 124416 124928 ok
28616 124928 125440 ok
28617 125440 125952 ok
28618 125952 126464 ok
286192023-09-22T23:21:47.588ZINFOcrucible-pantry (datafile): [0] 40e06af7-e69f-43e2-974b-bb00dc5e8960 looper connected looper = 0
286202023-09-22T23:21:47.588ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:55664 in state Disconnected
286212023-09-22T23:21:47.588ZINFOcrucible-pantry (datafile): [1] 40e06af7-e69f-43e2-974b-bb00dc5e8960 looper connected looper = 1
286222023-09-22T23:21:47.588ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:37034 in state Disconnected
286232023-09-22T23:21:47.588ZINFOcrucible-pantry (datafile): [2] 40e06af7-e69f-43e2-974b-bb00dc5e8960 looper connected looper = 2
286242023-09-22T23:21:47.588ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:53040 in state Disconnected
28625 Sep 22 23:21:47.588 INFO accepted connection from 127.0.0.1:42145, task: main
28626 Sep 22 23:21:47.588 INFO accepted connection from 127.0.0.1:36746, task: main
28627 Sep 22 23:21:47.588 INFO accepted connection from 127.0.0.1:48902, task: main
28628 Sep 22 23:21:47.589 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:5 g:2
28629 Sep 22 23:21:47.589 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:5 g:2
28630 Sep 22 23:21:47.589 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:5 g:2
28631 126464 126976 ok
28632 126976 127488 ok
28633 127488 128000 ok
28634 128000 128512 ok
28635 128512 129024 ok
28636 129024 129536 ok
28637 129536 130048 ok
28638 130048 130560 ok
28639 130560 131072 ok
28640 131072 131584 ok
28641 131584 132096 ok
28642 132096 132608 ok
28643 132608 133120 ok
28644 133120 133632 ok
28645 133632 134144 ok
28646 134144 134656 ok
28647 134656 135168 ok
28648 135168 135680 ok
28649 135680 136192 ok
28650 136192 136704 ok
28651 136704 137216 ok
28652 137216 137728 ok
28653 137728 138240 ok
28654 138240 138752 ok
28655 138752 139264 ok
28656 139264 139776 ok
28657 139776 140288 ok
28658 140288 140800 ok
28659 140800 141312 ok
28660 141312 141824 ok
28661 141824 142336 ok
28662 142336 142848 ok
28663 142848 143360 ok
28664 143360 143872 ok
28665 143872 144384 ok
28666 144384 144896 ok
28667 144896 145408 ok
28668 145408 145920 ok
28669 145920 146432 ok
28670 146432 146944 ok
28671 146944 147456 ok
28672 147456 147968 ok
28673 147968 148480 ok
28674 148480 148992 ok
28675 148992 149504 ok
28676 149504 150016 ok
28677 150016 150528 ok
28678 150528 151040 ok
28679 151040 151552 ok
28680 151552 152064 ok
28681 152064 152576 ok
28682 152576 153088 ok
28683 153088 153600 ok
28684 153600 154112 ok
28685 154112 154624 ok
28686 154624 155136 ok
28687 155136 155648 ok
28688 155648 156160 ok
28689 156160 156672 ok
28690 156672 157184 ok
28691 157184 157696 ok
28692 157696 158208 ok
28693 158208 158720 ok
28694 158720 159232 ok
28695 159232 159744 ok
28696 159744 160256 ok
28697 160256 160768 ok
28698 160768 161280 ok
28699 161280 161792 ok
28700 161792 162304 ok
28701 162304 162816 ok
28702 162816 163328 ok
28703 163328 163840 ok
28704 163840 164352 ok
28705 164352 164864 ok
28706 164864 165376 ok
28707 165376 165888 ok
28708 165888 166400 ok
28709 166400 166912 ok
28710 166912 167424 ok
28711 167424 167936 ok
28712 167936 168448 ok
28713 168448 168960 ok
28714 168960 169472 ok
28715 169472 169984 ok
28716 169984 170496 ok
28717 170496 171008 ok
28718 171008 171520 ok
28719 171520 172032 ok
28720 172032 172544 ok
28721 172544 173056 ok
28722 173056 173568 ok
28723 173568 174080 ok
28724 174080 174592 ok
28725 174592 175104 ok
28726 175104 175616 ok
28727 175616 176128 ok
28728 176128 176640 ok
28729 176640 177152 ok
28730 177152 177664 ok
28731 177664 178176 ok
28732 178176 178688 ok
28733 178688 179200 ok
28734 179200 179712 ok
28735 179712 180224 ok
28736 180224 180736 ok
28737 180736 181248 ok
28738 181248 181760 ok
28739 181760 182272 ok
28740 182272 182784 ok
28741 182784 183296 ok
28742 183296 183808 ok
28743 183808 184320 ok
28744 184320 184832 ok
28745 184832 185344 ok
28746 185344 185856 ok
28747 185856 186368 ok
28748 186368 186880 ok
28749 186880 187392 ok
28750 187392 187904 ok
28751 187904 188416 ok
28752 188416 188928 ok
28753 188928 189440 ok
28754 189440 189952 ok
28755 189952 190464 ok
28756 190464 190976 ok
28757 190976 191488 ok
28758 191488 192000 ok
28759 Sep 22 23:21:47.590 INFO Connection request from 40e06af7-e69f-43e2-974b-bb00dc5e8960 with version 4, task: proc
28760 Sep 22 23:21:47.590 INFO upstairs UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } connected, version 4, task: proc
28761 Sep 22 23:21:47.590 INFO Connection request from 40e06af7-e69f-43e2-974b-bb00dc5e8960 with version 4, task: proc
28762 Sep 22 23:21:47.590 INFO upstairs UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } connected, version 4, task: proc
28763 Sep 22 23:21:47.590 INFO Connection request from 40e06af7-e69f-43e2-974b-bb00dc5e8960 with version 4, task: proc
28764 Sep 22 23:21:47.590 INFO upstairs UpstairsConnection { upstairs_id: 40e06af7-e69f-43e2-974b-bb00dc5e8960, session_id: d90ddc5f-b441-432b-bb73-5ab29b73ba5b, gen: 1 } connected, version 4, task: proc
28765 Sep 22 23:21:47.590 DEBG up_ds_listen was notified
28766 Sep 22 23:21:47.590 DEBG up_ds_listen process 1001
28767 Sep 22 23:21:47.590 DEBG [A] ack job 1001:2, : downstairs
28768 Sep 22 23:21:47.590 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
28769 Sep 22 23:21:47.590 DEBG up_ds_listen checked 1 jobs, back to waiting
28770 192000 192512 ok
28771 192512 193024 ok
28772 193024 193536 ok
28773 193536 194048 ok
28774 194048 194560 ok
28775 194560 195072 ok
28776 195072 195584 ok
28777 195584 196096 ok
28778 196096 196608 ok
28779 196608 197120 ok
28780 197120 197632 ok
28781 197632 198144 ok
28782 198144 198656 ok
28783 198656 199168 ok
28784 199168 199680 ok
28785 199680 200192 ok
28786 200192 200704 ok
28787 200704 201216 ok
28788 201216 201728 ok
28789 201728 202240 ok
28790 202240 202752 ok
28791 202752 203264 ok
28792 203264 203776 ok
28793 203776 204288 ok
28794 204288 204800 ok
28795 204800 205312 ok
28796 205312 205824 ok
28797 205824 206336 ok
28798 206336 206848 ok
28799 206848 207360 ok
28800 207360 207872 ok
28801 207872 208384 ok
28802 208384 208896 ok
28803 208896 209408 ok
28804 209408 209920 ok
28805 209920 210432 ok
28806 210432 210944 ok
28807 210944 211456 ok
28808 211456 211968 ok
28809 211968 212480 ok
28810 212480 212992 ok
28811 212992 213504 ok
28812 213504 214016 ok
28813 214016 214528 ok
28814 214528 215040 ok
28815 215040 215552 ok
28816 215552 216064 ok
28817 216064 216576 ok
28818 216576 217088 ok
28819 217088 217600 ok
28820 217600 218112 ok
28821 218112 218624 ok
28822 218624 219136 ok
28823 219136 219648 ok
28824 219648 220160 ok
28825 220160 220672 ok
28826 220672 221184 ok
28827 221184 221696 ok
28828 221696 222208 ok
28829 222208 222720 ok
28830 222720 223232 ok
28831 223232 223744 ok
28832 223744 224256 ok
28833 224256 224768 ok
28834 224768 225280 ok
28835 225280 225792 ok
28836 225792 226304 ok
28837 226304 226816 ok
28838 226816 227328 ok
28839 227328 227840 ok
28840 227840 228352 ok
28841 228352 228864 ok
28842 228864 229376 ok
28843 229376 229888 ok
28844 229888 230400 ok
28845 230400 230912 ok
28846 230912 231424 ok
28847 231424 231936 ok
28848 231936 232448 ok
28849 232448 232960 ok
28850 232960 233472 ok
28851 233472 233984 ok
28852 233984 234496 ok
28853 234496 235008 ok
28854 235008 235520 ok
28855 235520 236032 ok
28856 236032 236544 ok
28857 236544 237056 ok
28858 237056 237568 ok
28859 237568 238080 ok
28860 238080 238592 ok
28861 238592 239104 ok
28862 239104 239616 ok
28863 239616 240128 ok
28864 240128 240640 ok
28865 240640 241152 ok
28866 241152 241664 ok
28867 241664 242176 ok
28868 242176 242688 ok
28869 242688 243200 ok
28870 243200 243712 ok
28871 243712 244224 ok
28872 244224 244736 ok
28873 244736 245248 ok
28874 245248 245760 ok
28875 245760 246272 ok
28876 246272 246784 ok
28877 246784 247296 ok
28878 247296 247808 ok
28879 247808 248320 ok
28880 248320 248832 ok
28881 248832 249344 ok
28882 249344 249856 ok
28883 249856 250368 ok
28884 250368 250880 ok
28885 250880 251392 ok
28886 251392 251904 ok
28887 251904 252416 ok
28888 252416 252928 ok
28889 252928 253440 ok
28890 253440 253952 ok
28891 253952 254464 ok
28892 254464 254976 ok
28893 254976 255488 ok
28894 255488 256000 ok
28895 256000 256512 ok
28896 256512 257024 ok
28897 257024 257536 ok
288982023-09-22T23:21:47.591ZINFOcrucible-pantry (datafile): [0] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) Disconnected Disconnected Disconnected ds_transition to WaitActive
288992023-09-22T23:21:47.591ZINFOcrucible-pantry (datafile): [0] Transition from Disconnected to WaitActive
289002023-09-22T23:21:47.591ZINFOcrucible-pantry (datafile): [1] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) WaitActive Disconnected Disconnected ds_transition to WaitActive
289012023-09-22T23:21:47.591ZINFOcrucible-pantry (datafile): [1] Transition from Disconnected to WaitActive
28902 257536 258048 ok
28903 258048 258560 ok
28904 258560 259072 ok
28905 259072{"msg":"[2] 40e06af7-e69f-43e2-974b-bb00dc5e8960 (d90ddc5f-b441-432b-bb73-5ab29b73ba5b) WaitActive WaitActive Disconnected ds_transition to WaitActive","v":0,"name":"crucible-pantry","level":30,"time":"2023-09-22T23:21:47.591358439Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"component":"datafile"}
28906 259584 ok
28907 259584 {"msg":"[2] Transition from Disconnected to WaitActive","v":0,"name":"crucible-pantry","level":30,"time":"2023-09-22T23:21:47.591418248Z","hostname":"ip-10-150-1-55.us-west-2.compute.internal","pid":4769,"component":"datafile"}
28908 260096 ok
28909 260096 260608 ok
28910 260608 261120 ok
28911 261120 261632 ok
28912 261632 262144 ok
28913 262144 262656 ok
28914 262656 263168 ok
28915 263168 263680 ok
28916 263680 264192 ok
28917 264192 264704 ok
28918 264704 265216 ok
28919 265216 265728 ok
28920 265728 266240 ok
28921 266240 266752 ok
28922 266752 267264 ok
28923 267264 267776 ok
28924 267776 268288 ok
28925 268288 268800 ok
28926 268800 269312 ok
28927 269312 269824 ok
28928 269824 270336 ok
28929 270336 270848 ok
28930 270848 271360 ok
28931 271360 271872 ok
28932 271872 272384 ok
28933 272384 272896 ok
28934 272896 273408 ok
28935 273408 273920 ok
28936 273920 274432 ok
28937 274432 274944 ok
28938 274944 275456 ok
28939 275456 275968 ok
28940 275968 276480 ok
28941 276480 276992 ok
28942 276992 277504 ok
28943 277504 278016 ok
28944 278016 278528 ok
28945 278528 279040 ok
28946 279040 279552 ok
28947 279552 280064 ok
28948 280064 280576 ok
28949 280576 281088 ok
28950 281088 281600 ok
28951 281600 282112 ok
28952 282112 282624 ok
28953 282624 283136 ok
28954 283136 283648 ok
28955 283648 284160 ok
28956 284160 284672 ok
28957 284672 285184 ok
28958 285184 285696 ok
28959 285696 286208 ok
28960 286208 286720 ok
28961 286720 287232 ok
28962 287232 287744 ok
28963 287744 288256 ok
28964 288256 288768 ok
28965 288768 289280 ok
28966 289280 289792 ok
28967 289792 290304 ok
28968 290304 290816 ok
28969 290816 291328 ok
28970 291328 291840 ok
28971 291840 292352 ok
28972 292352 292864 ok
28973 292864 293376 ok
28974 293376 293888 ok
28975 293888 294400 ok
28976 294400 294912 ok
28977 294912 295424 ok
28978 295424 295936 ok
28979 295936 296448 ok
28980 296448 296960 ok
28981 296960 297472 ok
28982 297472 297984 ok
28983 297984 298496 ok
28984 298496 299008 ok
28985 299008 299520 ok
28986 299520 300032 ok
28987 300032 300544 ok
28988 300544 301056 ok
28989 301056 301568 ok
28990 301568 302080 ok
28991 302080 302592 ok
28992 302592 303104 ok
28993 303104 303616 ok
28994 303616 304128 ok
28995 304128 304640 ok
28996 304640 305152 ok
28997 305152 305664 ok
28998 305664 306176 ok
28999 306176 306688 ok
29000 306688 307200 ok
29001 307200 307712 ok
29002 307712 308224 ok
29003 308224 308736 ok
29004 308736 309248 ok
29005 309248 309760 ok
29006 309760 310272 ok
29007 310272 310784 ok
29008 310784 311296 ok
29009 311296 311808 ok
29010 311808 312320 ok
29011 312320 312832 ok
29012 312832 313344 ok
29013 313344 313856 ok
29014 313856 314368 ok
29015 314368 314880 ok
29016 314880 315392 ok
29017 315392 315904 ok
29018 315904 316416 ok
29019 316416 316928 ok
29020 316928 317440 ok
29021 317440 317952 ok
29022 317952 318464 ok
29023 318464 318976 ok
29024 318976 319488 ok
29025 319488 320000 ok
29026 320000 320512 ok
29027 320512 321024 ok
29028 321024 321536 ok
29029 321536 322048 ok
29030 322048 322560 ok
29031 322560 323072 ok
29032 323072 323584 ok
29033 323584 324096 ok
29034 324096 324608 ok
29035 324608 325120 ok
29036 325120 325632 ok
29037 325632 326144 ok
29038 326144 326656 ok
29039 326656 327168 ok
29040 327168 327680 ok
29041 327680 328192 ok
29042 328192 328704 ok
29043 328704 329216 ok
29044 329216 329728 ok
29045 329728 330240 ok
29046 330240 330752 ok
29047 330752 331264 ok
29048 331264 331776 ok
29049 331776 332288 ok
29050 332288 332800 ok
29051 332800 333312 ok
29052 333312 333824 ok
29053 333824 334336 ok
29054 334336 334848 ok
29055 334848 335360 ok
29056 335360 335872 ok
29057 335872 336384 ok
29058 336384 336896 ok
29059 336896 337408 ok
29060 337408 337920 ok
29061 337920 338432 ok
29062 338432 338944 ok
29063 338944 339456 ok
29064 339456 339968 ok
29065 339968 340480 ok
29066 340480 340992 ok
29067 340992 341504 ok
29068 341504 342016 ok
29069 342016 342528 ok
29070 342528 343040 ok
29071 343040 343552 ok
29072 343552 344064 ok
29073 344064 344576 ok
29074 344576 345088 ok
29075 345088 345600 ok
29076 345600 346112 ok
29077 346112 346624 ok
29078 346624 347136 ok
29079 347136 347648 ok
29080 347648 348160 ok
29081 348160 348672 ok
29082 348672 349184 ok
29083 349184 349696 ok
29084 349696 350208 ok
29085 350208 350720 ok
29086 350720 351232 ok
29087 351232 351744 ok
29088 351744 352256 ok
29089 352256 352768 ok
29090 352768 353280 ok
29091 353280 353792 ok
29092 353792 354304 ok
29093 354304 354816 ok
29094 354816 355328 ok
29095 355328 355840 ok
29096 355840 356352 ok
29097 356352 356864 ok
29098 356864 357376 ok
29099 357376 357888 ok
29100 357888 358400 ok
29101 358400 358912 ok
29102 358912 359424 ok
29103 359424 359936 ok
29104 359936 360448 ok
29105 360448 360960 ok
29106 360960 361472 ok
29107 361472 361984 ok
29108 361984 362496 ok
29109 362496 363008 ok
29110 363008 363520 ok
29111 363520 364032 ok
29112 364032 364544 ok
29113 364544 365056 ok
29114 365056 365568 ok
29115 365568 366080 ok
29116 366080 366592 ok
29117 366592 367104 ok
29118 367104 367616 ok
29119 367616 368128 ok
29120 368128 368640 ok
29121 368640 369152 ok
29122 369152 369664 ok
29123 369664 370176 ok
29124 370176 370688 ok
29125 370688 371200 ok
29126 371200 371712 ok
29127 371712 372224 ok
29128 372224 372736 ok
29129 372736 373248 ok
29130 373248 373760 ok
29131 373760 374272 ok
29132 374272 374784 ok
29133 374784 375296 ok
29134 375296 375808 ok
29135 375808 376320 ok
29136 376320 376832 ok
29137 376832 377344 ok
29138 377344 377856 ok
29139 377856 378368 ok
29140 378368 378880 ok
29141 378880 379392 ok
29142 379392 379904 ok
29143 379904 380416 ok
29144 380416 380928 ok
29145 380928 381440 ok
29146 381440 381952 ok
29147 381952 382464 ok
29148 382464 382976 ok
29149 382976 383488 ok
29150 383488 384000 ok
29151 384000 384512 ok
29152 384512 385024 ok
29153 385024 385536 ok
29154 385536 386048 ok
29155 386048 386560 ok
29156 386560 387072 ok
29157 387072 387584 ok
29158 387584 388096 ok
29159 388096 388608 ok
29160 388608 389120 ok
29161 389120 389632 ok
29162 389632 390144 ok
29163 390144 390656 ok
29164 390656 391168 ok
29165 391168 391680 ok
29166 391680 392192 ok
29167 392192 392704 ok
29168 392704 393216 ok
29169 393216 393728 ok
29170 393728 394240 ok
29171 394240 394752 ok
29172 394752 395264 ok
29173 395264 395776 ok
29174 395776 396288 ok
29175 396288 396800 ok
29176 396800 397312 ok
29177 397312 397824 ok
29178 397824 398336 ok
29179 398336 398848 ok
29180 398848 399360 ok
29181 399360 399872 ok
29182 399872 400384 ok
29183 400384 400896 ok
29184 400896 401408 ok
29185 401408 401920 ok
29186 401920 402432 ok
29187 402432 402944 ok
29188 402944 403456 ok
29189 403456 403968 ok
29190 403968 404480 ok
29191 404480 404992 ok
29192 404992 405504 ok
29193 405504 406016 ok
29194 406016 406528 ok
29195 406528 407040 ok
29196 407040 407552 ok
29197 407552 408064 ok
29198 408064 408576 ok
29199 408576 409088 ok
29200 409088 409600 ok
29201 409600 410112 ok
29202 410112 410624 ok
29203 410624 411136 ok
29204 411136 411648 ok
29205 411648 412160 ok
29206 412160 412672 ok
29207 412672 413184 ok
29208 413184 413696 ok
29209 413696 414208 ok
29210 414208 414720 ok
29211 414720 415232 ok
29212 415232 415744 ok
29213 415744 416256 ok
29214 416256 416768 ok
29215 416768 417280 ok
29216 417280 417792 ok
29217 417792 418304 ok
29218 418304 418816 ok
29219 418816 419328 ok
29220 419328 419840 ok
29221 419840 420352 ok
29222 420352 420864 ok
29223 420864 421376 ok
29224 421376 421888 ok
29225 421888 422400 ok
29226 422400 422912 ok
29227 422912 423424 ok
29228 423424 423936 ok
29229 423936 424448 ok
29230 424448 424960 ok
29231 424960 425472 ok
29232 425472 425984 ok
29233 425984 426496 ok
29234 426496 427008 ok
29235 427008 427520 ok
29236 427520 428032 ok
29237 428032 428544 ok
29238 428544 429056 ok
29239 429056 429568 ok
29240 429568 430080 ok
29241 430080 430592 ok
29242 430592 431104 ok
29243 431104 431616 ok
29244 431616 432128 ok
29245 432128 432640 ok
29246 432640 433152 ok
29247 433152 433664 ok
29248 433664 434176 ok
29249 434176 434688 ok
29250 434688 435200 ok
29251 435200 435712 ok
29252 435712 436224 ok
29253 436224 436736 ok
29254 436736 437248 ok
29255 437248 437760 ok
29256 437760 438272 ok
29257 438272 438784 ok
29258 438784 439296 ok
29259 439296 439808 ok
29260 439808 440320 ok
29261 440320 440832 ok
29262 440832 441344 ok
29263 441344 441856 ok
29264 441856 442368 ok
29265 442368 442880 ok
29266 442880 443392 ok
29267 443392 443904 ok
29268 443904 444416 ok
29269 444416 444928 ok
29270 444928 445440 ok
29271 445440 445952 ok
29272 445952 446464 ok
29273 446464 446976 ok
29274 446976 447488 ok
29275 447488 448000 ok
29276 448000 448512 ok
29277 448512 449024 ok
29278 449024 449536 ok
29279 449536 450048 ok
29280 450048 450560 ok
29281 450560 451072 ok
29282 451072 451584 ok
29283 451584 452096 ok
29284 452096 452608 ok
29285 452608 453120 ok
29286 453120 453632 ok
29287 453632 454144 ok
29288 454144 454656 ok
29289 454656 455168 ok
29290 455168 455680 ok
29291 455680 456192 ok
29292 456192 456704 ok
29293 456704 457216 ok
29294 457216 457728 ok
29295 457728 458240 ok
29296 458240 458752 ok
29297 458752 459264 ok
29298 459264 459776 ok
29299 459776 460288 ok
29300 460288 460800 ok
29301 460800 461312 ok
29302 461312 461824 ok
29303 461824 462336 ok
29304 462336 462848 ok
29305 462848 463360 ok
29306 463360 463872 ok
29307 463872 464384 ok
29308 464384 464896 ok
29309 464896 465408 ok
29310 465408 465920 ok
29311 465920 466432 ok
29312 466432 466944 ok
29313 466944 467456 ok
29314 467456 467968 ok
29315 467968 468480 ok
29316 468480 468992 ok
29317 468992 469504 ok
29318 469504 470016 ok
29319 470016 470528 ok
29320 470528 471040 ok
29321 471040 471552 ok
29322 471552 472064 ok
29323 472064 472576 ok
29324 472576 473088 ok
29325 473088 473600 ok
29326 473600 474112 ok
29327 474112 474624 ok
29328 474624 475136 ok
29329 475136 475648 ok
29330 475648 476160 ok
29331 476160 476672 ok
29332 476672 477184 ok
29333 477184 477696 ok
29334 477696 478208 ok
29335 478208 478720 ok
29336 478720 479232 ok
29337 479232 479744 ok
29338 479744 480256 ok
29339 480256 480768 ok
29340 480768 481280 ok
29341 481280 481792 ok
29342 481792 482304 ok
29343 482304 482816 ok
29344 482816 483328 ok
29345 483328 483840 ok
29346 483840 484352 ok
29347 484352 484864 ok
29348 484864 485376 ok
29349 485376 485888 ok
29350 485888 486400 ok
29351 486400 486912 ok
29352 486912 487424 ok
29353 487424 487936 ok
29354 487936 488448 ok
29355 488448 488960 ok
29356 488960 489472 ok
29357 489472 489984 ok
29358 489984 490496 ok
29359 490496 491008 ok
29360 491008 491520 ok
29361 491520 492032 ok
29362 492032 492544 ok
29363 492544 493056 ok
29364 493056 493568 ok
29365 493568 494080 ok
29366 494080 494592 ok
29367 494592 495104 ok
29368 495104 495616 ok
29369 495616 496128 ok
29370 496128 496640 ok
29371 496640 497152 ok
29372 497152 497664 ok
29373 497664 498176 ok
29374 498176 498688 ok
29375 498688 499200 ok
29376 499200 499712 ok
29377 499712 500224 ok
29378 500224 500736 ok
29379 500736 501248 ok
29380 501248 501760 ok
29381 501760 502272 ok
29382 502272 502784 ok
29383 502784 503296 ok
29384 503296 503808 ok
29385 503808 504320 ok
29386 504320 504832 ok
29387 504832 505344 ok
29388 505344 505856 ok
29389 505856 506368 ok
29390 506368 506880 ok
29391 506880 507392 ok
29392 507392 507904 ok
29393 507904 508416 ok
29394 508416 508928 ok
29395 508928 509440 ok
29396 509440 509952 ok
29397 509952 510464 ok
29398 510464 510976 ok
29399 510976 511488 ok
29400 511488 512000 ok
29401 512000 512512 ok
29402 512512 513024 ok
29403 513024 513536 ok
29404 513536 514048 ok
29405 514048 514560 ok
29406 514560 515072 ok
29407 515072 515584 ok
29408 515584 516096 ok
29409 516096 516608 ok
29410 516608 517120 ok
29411 517120 517632 ok
29412 517632 518144 ok
29413 518144 518656 ok
29414 518656 519168 ok
29415 519168 519680 ok
29416 519680 520192 ok
29417 520192 520704 ok
29418 520704 521216 ok
29419 521216 521728 ok
29420 521728 522240 ok
29421 522240 522752 ok
29422 522752 523264 ok
29423 523264 523776 ok
29424 523776 524288 ok
29425 524288 524800 ok
29426 524800 525312 ok
29427 525312 525824 ok
29428 525824 526336 ok
29429 526336 526848 ok
29430 526848 527360 ok
29431 527360 527872 ok
29432 527872 528384 ok
29433 528384 528896 ok
29434 528896 529408 ok
29435 529408 529920 ok
29436 529920 530432 ok
29437 530432 530944 ok
29438 530944 531456 ok
29439 531456 531968 ok
29440 531968 532480 ok
29441 532480 532992 ok
29442 532992 533504 ok
29443 533504 534016 ok
29444 534016 534528 ok
29445 534528 535040 ok
29446 535040 535552 ok
29447 535552 536064 ok
29448 536064 536576 ok
29449 536576 537088 ok
29450 537088 537600 ok
29451 537600 538112 ok
29452 538112 538624 ok
29453 538624 539136 ok
29454 539136 539648 ok
29455 539648 540160 ok
29456 540160 540672 ok
29457 540672 541184 ok
29458 541184 541696 ok
29459 541696 542208 ok
29460 542208 542720 ok
29461 542720 543232 ok
29462 543232 543744 ok
29463 543744 544256 ok
29464 544256 544768 ok
29465 544768 545280 ok
29466 545280 545792 ok
29467 545792 546304 ok
29468 546304 546816 ok
29469 546816 547328 ok
29470 547328 547840 ok
29471 547840 548352 ok
29472 548352 548864 ok
29473 548864 549376 ok
29474 549376 549888 ok
29475 549888 550400 ok
29476 550400 550912 ok
29477 550912 551424 ok
29478 551424 551936 ok
29479 551936 552448 ok
29480 552448 552960 ok
29481 552960 553472 ok
29482 553472 553984 ok
29483 553984 554496 ok
29484 554496 555008 ok
29485 555008 555520 ok
29486 555520 556032 ok
29487 556032 556544 ok
29488 556544 557056 ok
29489 557056 557568 ok
29490 557568 558080 ok
29491 558080 558592 ok
29492 558592 559104 ok
29493 559104 559616 ok
29494 559616 560128 ok
29495 560128 560640 ok
29496 560640 561152 ok
29497 561152 561664 ok
29498 561664 562176 ok
29499 562176 562688 ok
29500 562688 563200 ok
29501 563200 563712 ok
29502 563712 564224 ok
29503 564224 564736 ok
29504 564736 565248 ok
29505 565248 565760 ok
29506 565760 566272 ok
29507 566272 566784 ok
29508 566784 567296 ok
29509 567296 567808 ok
29510 567808 568320 ok
29511 568320 568832 ok
29512 568832 569344 ok
29513 569344 569856 ok
29514 569856 570368 ok
29515 570368 570880 ok
29516 570880 571392 ok
29517 571392 571904 ok
29518 571904 572416 ok
29519 572416 572928 ok
29520 572928 573440 ok
29521 573440 573952 ok
29522 573952 574464 ok
29523 574464 574976 ok
29524 574976 575488 ok
29525 575488 576000 ok
29526 576000 576512 ok
29527 576512 577024 ok
29528 577024 577536 ok
29529 577536 578048 ok
29530 578048 578560 ok
29531 578560 579072 ok
29532 579072 579584 ok
29533 579584 580096 ok
29534 580096 580608 ok
29535 580608 581120 ok
29536 581120 581632 ok
29537 581632 582144 ok
29538 582144 582656 ok
29539 582656 583168 ok
29540 583168 583680 ok
29541 583680 584192 ok
29542 584192 584704 ok
29543 584704 585216 ok
29544 585216 585728 ok
29545 585728 586240 ok
29546 586240 586752 ok
29547 586752 587264 ok
29548 587264 587776 ok
29549 587776 588288 ok
29550 588288 588800 ok
29551 588800 589312 ok
29552 589312 589824 ok
29553 589824 590336 ok
29554 590336 590848 ok
29555 590848 591360 ok
29556 591360 591872 ok
29557 591872 592384 ok
29558 592384 592896 ok
29559 592896 593408 ok
29560 593408 593920 ok
29561 593920 594432 ok
29562 594432 594944 ok
29563 594944 595456 ok
29564 595456 595968 ok
29565 595968 596480 ok
29566 596480 596992 ok
29567 596992 597504 ok
29568 597504 598016 ok
29569 598016 598528 ok
29570 598528 599040 ok
29571 599040 599552 ok
29572 599552 600064 ok
29573 600064 600576 ok
29574 600576 601088 ok
29575 601088 601600 ok
29576 601600 602112 ok
29577 602112 602624 ok
29578 602624 603136 ok
29579 603136 603648 ok
29580 603648 604160 ok
29581 604160 604672 ok
29582 604672 605184 ok
29583 605184 605696 ok
29584 605696 606208 ok
29585 606208 606720 ok
29586 606720 607232 ok
29587 607232 607744 ok
29588 607744 608256 ok
29589 608256 608768 ok
29590 608768 609280 ok
29591 609280 609792 ok
29592 609792 610304 ok
29593 610304 610816 ok
29594 610816 611328 ok
29595 611328 611840 ok
29596 611840 612352 ok
29597 612352 612864 ok
29598 612864 613376 ok
29599 613376 613888 ok
29600 613888 614400 ok
29601 614400 614912 ok
29602 614912 615424 ok
29603 615424 615936 ok
29604 615936 616448 ok
29605 616448 616960 ok
29606 616960 617472 ok
29607 617472 617984 ok
29608 617984 618496 ok
29609 618496 619008 ok
29610 619008 619520 ok
29611 619520 620032 ok
29612 620032 620544 ok
29613 620544 621056 ok
29614 621056 621568 ok
29615 621568 622080 ok
29616 622080 622592 ok
29617 622592 623104 ok
29618 623104 623616 ok
29619 623616 624128 ok
29620 624128 624640 ok
29621 624640 625152 ok
29622 625152 625664 ok
29623 625664 626176 ok
29624 626176 626688 ok
29625 626688 627200 ok
29626 627200 627712 ok
29627 627712 628224 ok
29628 628224 628736 ok
29629 628736 629248 ok
29630 629248 629760 ok
29631 629760 630272 ok
29632 630272 630784 ok
29633 630784 631296 ok
29634 631296 631808 ok
29635 631808 632320 ok
29636 632320 632832 ok
29637 632832 633344 ok
29638 633344 633856 ok
29639 633856 634368 ok
29640 634368 634880 ok
29641 634880 635392 ok
29642 635392 635904 ok
29643 635904 636416 ok
29644 636416 636928 ok
29645 636928 637440 ok
29646 637440 637952 ok
29647 637952 638464 ok
29648 638464 638976 ok
29649 638976 639488 ok
29650 639488 640000 ok
29651 640000 640512 ok
29652 640512 641024 ok
29653 641024 641536 ok
29654 641536 642048 ok
29655 642048 642560 ok
29656 642560 643072 ok
29657 643072 643584 ok
29658 643584 644096 ok
29659 644096 644608 ok
29660 644608 645120 ok
29661 645120 645632 ok
29662 645632 646144 ok
29663 646144 646656 ok
29664 646656 647168 ok
29665 647168 647680 ok
29666 647680 648192 ok
29667 648192 648704 ok
29668 648704 649216 ok
29669 649216 649728 ok
29670 649728 650240 ok
29671 650240 650752 ok
29672 650752 651264 ok
29673 651264 651776 ok
29674 651776 652288 ok
29675 652288 652800 ok
29676 652800 653312 ok
29677 653312 653824 ok
29678 653824 654336 ok
29679 654336 654848 ok
29680 654848 655360 ok
29681 655360 655872 ok
29682 655872 656384 ok
29683 656384 656896 ok
29684 656896 657408 ok
29685 657408 657920 ok
29686 657920 658432 ok
29687 658432 658944 ok
29688 658944 659456 ok
29689 659456 659968 ok
29690 659968 660480 ok
29691 660480 660992 ok
29692 660992 661504 ok
29693 661504 662016 ok
29694 662016 662528 ok
29695 662528 663040 ok
29696 663040 663552 ok
29697 663552 664064 ok
29698 664064 664576 ok
29699 664576 665088 ok
29700 665088 665600 ok
29701 665600 666112 ok
29702 666112 666624 ok
29703 666624 667136 ok
29704 667136 667648 ok
29705 667648 668160 ok
29706 668160 668672 ok
29707 668672 669184 ok
29708 669184 669696 ok
29709 669696 670208 ok
29710 670208 670720 ok
29711 670720 671232 ok
29712 671232 671744 ok
29713 671744 672256 ok
29714 672256 672768 ok
29715 672768 673280 ok
29716 673280 673792 ok
29717 673792 674304 ok
29718 674304 674816 ok
29719 674816 675328 ok
29720 675328 675840 ok
29721 675840 676352 ok
29722 676352 676864 ok
29723 676864 677376 ok
29724 677376 677888 ok
29725 677888 678400 ok
29726 678400 678912 ok
29727 678912 679424 ok
29728 679424 679936 ok
29729 679936 680448 ok
29730 680448 680960 ok
29731 680960 681472 ok
29732 681472 681984 ok
29733 681984 682496 ok
29734 682496 683008 ok
29735 683008 683520 ok
29736 683520 684032 ok
29737 684032 684544 ok
29738 684544 685056 ok
29739 685056 685568 ok
29740 685568 686080 ok
29741 686080 686592 ok
29742 686592 687104 ok
29743 687104 687616 ok
29744 687616 688128 ok
29745 688128 688640 ok
29746 688640 689152 ok
29747 689152 689664 ok
29748 689664 690176 ok
29749 690176 690688 ok
29750 690688 691200 ok
29751 691200 691712 ok
29752 691712 692224 ok
29753 692224 692736 ok
29754 692736 693248 ok
29755 693248 693760 ok
29756 693760 694272 ok
29757 694272 694784 ok
29758 694784 695296 ok
29759 695296 695808 ok
29760 695808 696320 ok
29761 696320 696832 ok
29762 696832 697344 ok
29763 697344 697856 ok
29764 697856 698368 ok
29765 698368 698880 ok
29766 698880 699392 ok
29767 699392 699904 ok
29768 699904 700416 ok
29769 700416 700928 ok
29770 700928 701440 ok
29771 701440 701952 ok
29772 701952 702464 ok
29773 702464 702976 ok
29774 702976 703488 ok
29775 703488 704000 ok
29776 704000 704512 ok
29777 704512 705024 ok
29778 705024 705536 ok
29779 705536 706048 ok
29780 706048 706560 ok
29781 706560 707072 ok
29782 707072 707584 ok
29783 707584 708096 ok
29784 708096 708608 ok
29785 708608 709120 ok
29786 709120 709632 ok
29787 709632 710144 ok
29788 710144 710656 ok
29789 710656 711168 ok
29790 711168 711680 ok
29791 711680 712192 ok
29792 712192 712704 ok
29793 712704 713216 ok
29794 713216 713728 ok
29795 713728 714240 ok
29796 714240 714752 ok
29797 714752 715264 ok
29798 715264 715776 ok
29799 715776 716288 ok
29800 716288 716800 ok
29801 716800 717312 ok
29802 717312 717824 ok
29803 717824 718336 ok
29804 718336 718848 ok
29805 718848 719360 ok
29806 719360 719872 ok
29807 719872 720384 ok
29808 720384 720896 ok
29809 720896 721408 ok
29810 721408 721920 ok
29811 721920 722432 ok
29812 722432 722944 ok
29813 722944 723456 ok
29814 723456 723968 ok
29815 723968 724480 ok
29816 724480 724992 ok
29817 724992 725504 ok
29818 725504 726016 ok
29819 726016 726528 ok
29820 726528 727040 ok
29821 727040 727552 ok
29822 727552 728064 ok
29823 728064 728576 ok
29824 728576 729088 ok
29825 729088 729600 ok
29826 729600 730112 ok
29827 730112 730624 ok
29828 730624 731136 ok
29829 731136 731648 ok
29830 731648 732160 ok
29831 732160 732672 ok
29832 732672 733184 ok
29833 733184 733696 ok
29834 733696 734208 ok
29835 734208 734720 ok
29836 734720 735232 ok
29837 735232 735744 ok
29838 735744 736256 ok
29839 736256 736768 ok
29840 736768 737280 ok
29841 737280 737792 ok
29842 737792 738304 ok
29843 738304 738816 ok
29844 738816 739328 ok
29845 739328 739840 ok
29846 739840 740352 ok
29847 740352 740864 ok
29848 740864 741376 ok
29849 741376 741888 ok
29850 741888 742400 ok
29851 742400 742912 ok
29852 742912 743424 ok
29853 743424 743936 ok
29854 743936 744448 ok
29855 744448 744960 ok
29856 744960 745472 ok
29857 745472 745984 ok
29858 745984 746496 ok
29859 746496 747008 ok
29860 747008 747520 ok
29861 747520 748032 ok
29862 748032 748544 ok
29863 748544 749056 ok
29864 749056 749568 ok
29865 749568 750080 ok
29866 750080 750592 ok
29867 750592 751104 ok
29868 751104 751616 ok
29869 751616 752128 ok
29870 752128 752640 ok
29871 752640 753152 ok
29872 753152 753664 ok
29873 753664 754176 ok
29874 754176 754688 ok
29875 754688 755200 ok
29876 755200 755712 ok
29877 755712 756224 ok
29878 756224 756736 ok
29879 756736 757248 ok
29880 757248 757760 ok
29881 757760 758272 ok
29882 758272 758784 ok
29883 758784 759296 ok
29884 759296 759808 ok
29885 759808 760320 ok
29886 760320 760832 ok
29887 760832 761344 ok
29888 761344 761856 ok
29889 761856 762368 ok
29890 762368 762880 ok
29891 762880 763392 ok
29892 763392 763904 ok
29893 763904 764416 ok
29894 764416 764928 ok
29895 764928 765440 ok
29896 765440 765952 ok
29897 765952 766464 ok
29898 766464 766976 ok
29899 766976 767488 ok
29900 767488 768000 ok
29901 768000 768512 ok
29902 768512 769024 ok
29903 769024 769536 ok
29904 769536 770048 ok
29905 770048 770560 ok
29906 770560 771072 ok
29907 771072 771584 ok
29908 771584 772096 ok
29909 772096 772608 ok
29910 772608 773120 ok
29911 773120 773632 ok
29912 773632 774144 ok
29913 774144 774656 ok
29914 774656 775168 ok
29915 775168 775680 ok
29916 775680 776192 ok
29917 776192 776704 ok
29918 776704 777216 ok
29919 777216 777728 ok
29920 777728 778240 ok
29921 778240 778752 ok
29922 778752 779264 ok
29923 779264 779776 ok
29924 779776 780288 ok
29925 780288 780800 ok
29926 780800 781312 ok
29927 781312 781824 ok
29928 781824 782336 ok
29929 782336 782848 ok
29930 782848 783360 ok
29931 783360 783872 ok
29932 783872 784384 ok
29933 784384 784896 ok
29934 784896 785408 ok
29935 785408 785920 ok
29936 785920 786432 ok
29937 786432 786944 ok
29938 786944 787456 ok
29939 787456 787968 ok
29940 787968 788480 ok
29941 788480 788992 ok
29942 788992 789504 ok
29943 789504 790016 ok
29944 790016 790528 ok
29945 790528 791040 ok
29946 791040 791552 ok
29947 791552 792064 ok
29948 792064 792576 ok
29949 792576 793088 ok
29950 793088 793600 ok
29951 793600 794112 ok
29952 794112 794624 ok
29953 794624 795136 ok
29954 795136 795648 ok
29955 795648 796160 ok
29956 796160 796672 ok
29957 796672 797184 ok
29958 797184 797696 ok
29959 797696 798208 ok
29960 798208 798720 ok
29961 798720 799232 ok
29962 799232 799744 ok
29963 799744 800256 ok
29964 800256 800768 ok
29965 800768 801280 ok
29966 801280 801792 ok
29967 801792 802304 ok
29968 802304 802816 ok
29969 802816 803328 ok
29970 803328 803840 ok
29971 803840 804352 ok
29972 804352 804864 ok
29973 804864 805376 ok
29974 805376 805888 ok
29975 805888 806400 ok
29976 806400 806912 ok
29977 806912 807424 ok
29978 807424 807936 ok
29979 807936 808448 ok
29980 808448 808960 ok
29981 808960 809472 ok
29982 809472 809984 ok
29983 809984 810496 ok
29984 810496 811008 ok
29985 811008 811520 ok
29986 811520 812032 ok
29987 812032 812544 ok
29988 812544 813056 ok
29989 813056 813568 ok
29990 813568 814080 ok
29991 814080 814592 ok
29992 814592 815104 ok
29993 815104 815616 ok
29994 815616 816128 ok
29995 816128 816640 ok
29996 816640 817152 ok
29997 817152 817664 ok
29998 817664 818176 ok
29999 818176 818688 ok
30000 818688 819200 ok
30001 819200 819712 ok
30002 819712 820224 ok
30003 820224 820736 ok
30004 820736 821248 ok
30005 821248 821760 ok
30006 821760 822272 ok
30007 822272 822784 ok
30008 822784 823296 ok
30009 823296 823808 ok
30010 823808 824320 ok
30011 824320 824832 ok
30012 824832 825344 ok
30013 825344 825856 ok
30014 825856 826368 ok
30015 826368 826880 ok
30016 826880 827392 ok
30017 827392 827904 ok
30018 827904 828416 ok
30019 828416 828928 ok
30020 828928 829440 ok
30021 829440 829952 ok
30022 829952 830464 ok
30023 830464 830976 ok
30024 830976 831488 ok
30025 831488 832000 ok
30026 832000 832512 ok
30027 832512 833024 ok
30028 833024 833536 ok
30029 833536 834048 ok
30030 834048 834560 ok
30031 834560 835072 ok
30032 835072 835584 ok
30033 835584 836096 ok
30034 836096 836608 ok
30035 836608 837120 ok
30036 837120 837632 ok
30037 837632 838144 ok
30038 838144 838656 ok
30039 838656 839168 ok
30040 839168 839680 ok
30041 839680 840192 ok
30042 840192 840704 ok
30043 840704 841216 ok
30044 841216 841728 ok
30045 841728 842240 ok
30046 842240 842752 ok
30047 842752 843264 ok
30048 843264 843776 ok
30049 843776 844288 ok
30050 844288 844800 ok
30051 844800 845312 ok
30052 845312 845824 ok
30053 845824 846336 ok
30054 846336 846848 ok
30055 846848 847360 ok
30056 847360 847872 ok
30057 847872 848384 ok
30058 848384 848896 ok
30059 848896 849408 ok
30060 849408 849920 ok
30061 849920 850432 ok
30062 850432 850944 ok
30063 850944 851456 ok
30064 851456 851968 ok
30065 851968 852480 ok
30066 852480 852992 ok
30067 852992 853504 ok
30068 853504 854016 ok
30069 854016 854528 ok
30070 854528 855040 ok
30071 855040 855552 ok
30072 855552 856064 ok
30073 856064 856576 ok
30074 856576 857088 ok
30075 857088 857600 ok
30076 857600 858112 ok
30077 858112 858624 ok
30078 858624 859136 ok
30079 859136 859648 ok
30080 859648 860160 ok
30081 860160 860672 ok
30082 860672 861184 ok
30083 861184 861696 ok
30084 861696 862208 ok
30085 862208 862720 ok
30086 862720 863232 ok
30087 863232 863744 ok
30088 863744 864256 ok
30089 864256 864768 ok
30090 864768 865280 ok
30091 865280 865792 ok
30092 865792 866304 ok
30093 866304 866816 ok
30094 866816 867328 ok
30095 867328 867840 ok
30096 867840 868352 ok
30097 868352 868864 ok
30098 868864 869376 ok
30099 869376 869888 ok
30100 869888 870400 ok
30101 870400 870912 ok
30102 870912 871424 ok
30103 871424 871936 ok
30104 871936 872448 ok
30105 872448 872960 ok
30106 872960 873472 ok
30107 873472 873984 ok
30108 873984 874496 ok
30109 874496 875008 ok
30110 875008 875520 ok
30111 875520 876032 ok
30112 876032 876544 ok
30113 876544 877056 ok
30114 877056 877568 ok
30115 877568 878080 ok
30116 878080 878592 ok
30117 878592 879104 ok
30118 879104 879616 ok
30119 879616 880128 ok
30120 880128 880640 ok
30121 880640 881152 ok
30122 881152 881664 ok
30123 881664 882176 ok
30124 882176 882688 ok
30125 882688 883200 ok
30126 883200 883712 ok
30127 883712 884224 ok
30128 884224 884736 ok
30129 884736 885248 ok
30130 885248 885760 ok
30131 885760 886272 ok
30132 886272 886784 ok
30133 886784 887296 ok
30134 887296 887808 ok
30135 887808 888320 ok
30136 888320 888832 ok
30137 888832 889344 ok
30138 889344 889856 ok
30139 889856 890368 ok
30140 890368 890880 ok
30141 890880 891392 ok
30142 891392 891904 ok
30143 891904 892416 ok
30144 892416 892928 ok
30145 892928 893440 ok
30146 893440 893952 ok
30147 893952 894464 ok
30148 894464 894976 ok
30149 894976 895488 ok
30150 895488 896000 ok
30151 896000 896512 ok
30152 896512 897024 ok
30153 897024 897536 ok
30154 897536 898048 ok
30155 898048 898560 ok
30156 898560 899072 ok
30157 899072 899584 ok
30158 899584 900096 ok
30159 900096 900608 ok
30160 900608 901120 ok
30161 901120 901632 ok
30162 901632 902144 ok
30163 902144 902656 ok
30164 902656 903168 ok
30165 903168 903680 ok
30166 903680 904192 ok
30167 904192 904704 ok
30168 904704 905216 ok
30169 905216 905728 ok
30170 905728 906240 ok
30171 906240 906752 ok
30172 906752 907264 ok
30173 907264 907776 ok
30174 907776 908288 ok
30175 908288 908800 ok
30176 908800 909312 ok
30177 909312 909824 ok
30178 909824 910336 ok
30179 910336 910848 ok
30180 910848 911360 ok
30181 911360 911872 ok
30182 911872 912384 ok
30183 912384 912896 ok
30184 912896 913408 ok
30185 913408 913920 ok
30186 913920 914432 ok
30187 914432 914944 ok
30188 914944 915456 ok
30189 915456 915968 ok
30190 915968 916480 ok
30191 916480 916992 ok
30192 916992 917504 ok
30193 917504 918016 ok
30194 918016 918528 ok
30195 918528 919040 ok
30196 919040 919552 ok
30197 919552 920064 ok
30198 920064 920576 ok
30199 920576 921088 ok
30200 921088 921600 ok
30201 921600 922112 ok
30202 922112 922624 ok
30203 922624 923136 ok
30204 923136 923648 ok
30205 923648 924160 ok
30206 924160 924672 ok
30207 924672 925184 ok
30208 925184 925696 ok
30209 925696 926208 ok
30210 926208 926720 ok
30211 926720 927232 ok
30212 927232 927744 ok
30213 927744 928256 ok
30214 928256 928768 ok
30215 928768 929280 ok
30216 929280 929792 ok
30217 929792 930304 ok
30218 930304 930816 ok
30219 930816 931328 ok
30220 931328 931840 ok
30221 931840 932352 ok
30222 932352 932864 ok
30223 932864 933376 ok
30224 933376 933888 ok
30225 933888 934400 ok
30226 934400 934912 ok
30227 934912 935424 ok
30228 935424 935936 ok
30229 935936 936448 ok
30230 936448 936960 ok
30231 936960 937472 ok
30232 937472 937984 ok
30233 937984 938496 ok
30234 938496 939008 ok
30235 939008 939520 ok
30236 939520 940032 ok
30237 940032 940544 ok
30238 940544 941056 ok
30239 941056 941568 ok
30240 941568 942080 ok
30241 942080 942592 ok
30242 942592 943104 ok
30243 943104 943616 ok
30244 943616 944128 ok
30245 944128 944640 ok
30246 944640 945152 ok
30247 945152 945664 ok
30248 945664 946176 ok
30249 946176 946688 ok
30250 946688 947200 ok
30251 947200 947712 ok
30252 947712 948224 ok
30253 948224 948736 ok
30254 948736 949248 ok
30255 949248 949760 ok
30256 949760 950272 ok
30257 950272 950784 ok
30258 950784 951296 ok
30259 951296 951808 ok
30260 951808 952320 ok
30261 952320 952832 ok
30262 952832 953344 ok
30263 953344 953856 ok
30264 953856 954368 ok
30265 954368 954880 ok
30266 954880 955392 ok
30267 955392 955904 ok
30268 955904 956416 ok
30269 956416 956928 ok
30270 956928 957440 ok
30271 957440 957952 ok
30272 957952 958464 ok
30273 958464 958976 ok
30274 958976 959488 ok
30275 959488 960000 ok
30276 960000 960512 ok
30277 960512 961024 ok
30278 961024 961536 ok
30279 961536 962048 ok
30280 962048 962560 ok
30281 962560 963072 ok
30282 963072 963584 ok
30283 963584 964096 ok
30284 964096 964608 ok
30285 964608 965120 ok
30286 965120 965632 ok
30287 965632 966144 ok
30288 966144 966656 ok
30289 966656 967168 ok
30290 967168 967680 ok
30291 967680 968192 ok
30292 968192 968704 ok
30293 968704 969216 ok
30294 969216 969728 ok
30295 969728 970240 ok
30296 970240 970752 ok
30297 970752 971264 ok
30298 971264 971776 ok
30299 971776 972288 ok
30300 972288 972800 ok
30301 972800 973312 ok
30302 973312 973824 ok
30303 973824 974336 ok
30304 974336 974848 ok
30305 974848 975360 ok
30306 975360 975872 ok
30307 975872 976384 ok
30308 976384 976896 ok
30309 976896 977408 ok
30310 977408 977920 ok
30311 977920 978432 ok
30312 978432 978944 ok
30313 978944 979456 ok
30314 979456 979968 ok
30315 979968 980480 ok
30316 980480 980992 ok
30317 980992 981504 ok
30318 981504 982016 ok
30319 982016 982528 ok
30320 982528 983040 ok
30321 983040 983552 ok
30322 983552 984064 ok
30323 984064 984576 ok
30324 984576 985088 ok
30325 985088 985600 ok
30326 985600 986112 ok
30327 986112 986624 ok
30328 986624 987136 ok
30329 987136 987648 ok
30330 987648 988160 ok
30331 988160 988672 ok
30332 988672 989184 ok
30333 989184 989696 ok
30334 989696 990208 ok
30335 990208 990720 ok
30336 990720 991232 ok
30337 991232 991744 ok
30338 991744 992256 ok
30339 992256 992768 ok
30340 992768 993280 ok
30341 993280 993792 ok
30342 993792 994304 ok
30343 994304 994816 ok
30344 994816 995328 ok
30345 995328 995840 ok
30346 995840 996352 ok
30347 996352 996864 ok
30348 996864 997376 ok
30349 997376 997888 ok
30350 997888 998400 ok
30351 998400 998912 ok
30352 998912 999424 ok
30353 999424 999936 ok
30354 999936 1000448 ok
30355 1000448 1000960 ok
30356 1000960 1001472 ok
30357 1001472 1001984 ok
30358 1001984 1002496 ok
30359 1002496 1003008 ok
30360 1003008 1003520 ok
30361 1003520 1004032 ok
30362 1004032 1004544 ok
30363 1004544 1005056 ok
30364 1005056 1005568 ok
30365 1005568 1006080 ok
30366 1006080 1006592 ok
30367 1006592 1007104 ok
30368 1007104 1007616 ok
30369 1007616 1008128 ok
30370 1008128 1008640 ok
30371 1008640 1009152 ok
30372 1009152 1009664 ok
30373 1009664 1010176 ok
30374 1010176 1010688 ok
30375 1010688 1011200 ok
30376 1011200 1011712 ok
30377 1011712 1012224 ok
30378 1012224 1012736 ok
30379 1012736 1013248 ok
30380 1013248 1013760 ok
30381 1013760 1014272 ok
30382 1014272 1014784 ok
30383 1014784 1015296 ok
30384 1015296 1015808 ok
30385 1015808 1016320 ok
30386 1016320 1016832 ok
30387 1016832 1017344 ok
30388 1017344 1017856 ok
30389 1017856 1018368 ok
30390 1018368 1018880 ok
30391 1018880 1019392 ok
30392 1019392 1019904 ok
30393 1019904 1020416 ok
30394 1020416 1020928 ok
30395 1020928 1021440 ok
30396 1021440 1021952 ok
30397 1021952 1022464 ok
30398 1022464 1022976 ok
30399 1022976 1023488 ok
30400 1023488 1024000 ok
30401 1024000 1024512 ok
30402 1024512 1025024 ok
30403 1025024 1025536 ok
30404 1025536 1026048 ok
30405 1026048 1026560 ok
30406 1026560 1027072 ok
30407 1027072 1027584 ok
30408 1027584 1028096 ok
30409 1028096 1028608 ok
30410 1028608 1029120 ok
30411 1029120 1029632 ok
30412 1029632 1030144 ok
30413 1030144 1030656 ok
30414 1030656 1031168 ok
30415 1031168 1031680 ok
30416 1031680 1032192 ok
30417 1032192 1032704 ok
30418 1032704 1033216 ok
30419 1033216 1033728 ok
30420 1033728 1034240 ok
30421 1034240 1034752 ok
30422 1034752 1035264 ok
30423 1035264 1035776 ok
30424 1035776 1036288 ok
30425 1036288 1036800 ok
30426 1036800 1037312 ok
30427 1037312 1037824 ok
30428 1037824 1038336 ok
30429 1038336 1038848 ok
30430 1038848 1039360 ok
30431 1039360 1039872 ok
30432 1039872 1040384 ok
30433 1040384 1040896 ok
30434 1040896 1041408 ok
30435 1041408 1041920 ok
30436 1041920 1042432 ok
30437 1042432 1042944 ok
30438 1042944 1043456 ok
30439 1043456 1043968 ok
30440 1043968 1044480 ok
30441 1044480 1044992 ok
30442 1044992 1045504 ok
30443 1045504 1046016 ok
30444 1046016 1046528 ok
30445 1046528 1047040 ok
30446 1047040 1047552 ok
30447 1047552 1048064 ok
30448 1048064 1048576 ok
30449 1048576 1049088 ok
30450 1049088 1049600 ok
30451 1049600 1050112 ok
30452 1050112 1050624 ok
30453 1050624 1051136 ok
30454 1051136 1051648 ok
30455 1051648 1052160 ok
30456 1052160 1052672 ok
30457 1052672 1053184 ok
30458 1053184 1053696 ok
30459 1053696 1054208 ok
30460 1054208 1054720 ok
30461 1054720 1055232 ok
30462 1055232 1055744 ok
30463 1055744 1056256 ok
30464 1056256 1056768 ok
30465 1056768 1057280 ok
30466 1057280 1057792 ok
30467 1057792 1058304 ok
30468 1058304 1058816 ok
30469 1058816 1059328 ok
30470 1059328 1059840 ok
30471 1059840 1060352 ok
30472 1060352 1060864 ok
30473 1060864 1061376 ok
30474 1061376 1061888 ok
30475 1061888 1062400 ok
30476 1062400 1062912 ok
30477 1062912 1063424 ok
30478 1063424 1063936 ok
30479 1063936 1064448 ok
30480 1064448 1064960 ok
30481 1064960 1065472 ok
30482 1065472 1065984 ok
30483 1065984 1066496 ok
30484 1066496 1067008 ok
30485 1067008 1067520 ok
30486 1067520 1068032 ok
30487 1068032 1068544 ok
30488 1068544 1069056 ok
30489 1069056 1069568 ok
30490 1069568 1070080 ok
30491 1070080 1070592 ok
30492 1070592 1071104 ok
30493 1071104 1071616 ok
30494 1071616 1072128 ok
30495 1072128 1072640 ok
30496 1072640 1073152 ok
30497 1073152 1073664 ok
30498 1073664 1074176 ok
30499 1074176 1074688 ok
30500 1074688 1075200 ok
30501 1075200 1075712 ok
30502 1075712 1076224 ok
30503 1076224 1076736 ok
30504 1076736 1077248 ok
30505 1077248 1077760 ok
30506 1077760 1078272 ok
30507 1078272 1078784 ok
30508 1078784 1079296 ok
30509 1079296 1079808 ok
30510 1079808 1080320 ok
30511 1080320 1080832 ok
30512 1080832 1081344 ok
30513 1081344 1081856 ok
30514 1081856 1082368 ok
30515 1082368 1082880 ok
30516 1082880 1083392 ok
30517 1083392 1083904 ok
30518 1083904 1084416 ok
30519 1084416 1084928 ok
30520 1084928 1085440 ok
30521 1085440 1085952 ok
30522 1085952 1086464 ok
30523 1086464 1086976 ok
30524 1086976 1087488 ok
30525 1087488 1088000 ok
30526 1088000 1088512 ok
30527 1088512 1089024 ok
30528 1089024 1089536 ok
30529 1089536 1090048 ok
30530 1090048 1090560 ok
30531 1090560 1091072 ok
30532 1091072 1091584 ok
30533 1091584 1092096 ok
30534 1092096 1092608 ok
30535 1092608 1093120 ok
30536 1093120 1093632 ok
30537 1093632 1094144 ok
30538 1094144 1094656 ok
30539 1094656 1095168 ok
30540 1095168 1095680 ok
30541 1095680 1096192 ok
30542 1096192 1096704 ok
30543 1096704 1097216 ok
30544 1097216 1097728 ok
30545 1097728 1098240 ok
30546 1098240 1098752 ok
30547 1098752 1099264 ok
30548 1099264 1099776 ok
30549 1099776 1100288 ok
30550 1100288 1100800 ok
30551 1100800 1101312 ok
30552 1101312 1101824 ok
30553 1101824 1102336 ok
30554 1102336 1102848 ok
30555 1102848 1103360 ok
30556 1103360 1103872 ok
30557 1103872 1104384 ok
30558 1104384 1104896 ok
30559 1104896 1105408 ok
30560 1105408 1105920 ok
30561 1105920 1106432 ok
30562 1106432 1106944 ok
30563 1106944 1107456 ok
30564 1107456 1107968 ok
30565 1107968 1108480 ok
30566 1108480 1108992 ok
30567 1108992 1109504 ok
30568 1109504 1110016 ok
30569 1110016 1110528 ok
30570 1110528 1111040 ok
30571 1111040 1111552 ok
30572 1111552 1112064 ok
30573 1112064 1112576 ok
30574 1112576 1113088 ok
30575 1113088 1113600 ok
30576 1113600 1114112 ok
30577 1114112 1114624 ok
30578 1114624 1115136 ok
30579 1115136 1115648 ok
30580 1115648 1116160 ok
30581 1116160 1116672 ok
30582 1116672 1117184 ok
30583 1117184 1117696 ok
30584 1117696 1118208 ok
30585 1118208 1118720 ok
30586 1118720 1119232 ok
30587 1119232 1119744 ok
30588 1119744 1120256 ok
30589 1120256 1120768 ok
30590 1120768 1121280 ok
30591 1121280 1121792 ok
30592 1121792 1122304 ok
30593 1122304 1122816 ok
30594 1122816 1123328 ok
30595 1123328 1123840 ok
30596 1123840 1124352 ok
30597 1124352 1124864 ok
30598 1124864 1125376 ok
30599 1125376 1125888 ok
30600 1125888 1126400 ok
30601 1126400 1126912 ok
30602 1126912 1127424 ok
30603 1127424 1127936 ok
30604 1127936 1128448 ok
30605 1128448 1128960 ok
30606 1128960 1129472 ok
30607 1129472 1129984 ok
30608 1129984 1130496 ok
30609 1130496 1131008 ok
30610 1131008 1131520 ok
30611 1131520 1132032 ok
30612 1132032 1132544 ok
30613 1132544 1133056 ok
30614 1133056 1133568 ok
30615 1133568 1134080 ok
30616 1134080 1134592 ok
30617 1134592 1135104 ok
30618 1135104 1135616 ok
30619 1135616 1136128 ok
30620 1136128 1136640 ok
30621 1136640 1137152 ok
30622 1137152 1137664 ok
30623 1137664 1138176 ok
30624 1138176 1138688 ok
30625 1138688 1139200 ok
30626 1139200 1139712 ok
30627 1139712 1140224 ok
30628 1140224 1140736 ok
30629 1140736 1141248 ok
30630 1141248 1141760 ok
30631 1141760 1142272 ok
30632 1142272 1142784 ok
30633 1142784 1143296 ok
30634 1143296 1143808 ok
30635 1143808 1144320 ok
30636 1144320 1144832 ok
30637 1144832 1145344 ok
30638 1145344 1145856 ok
30639 1145856 1146368 ok
30640 1146368 1146880 ok
30641 1146880 1147392 ok
30642 1147392 1147904 ok
30643 1147904 1148416 ok
30644 1148416 1148928 ok
30645 1148928 1149440 ok
30646 1149440 1149952 ok
30647 1149952 1150464 ok
30648 1150464 1150976 ok
30649 1150976 1151488 ok
30650 1151488 1152000 ok
30651 1152000 1152512 ok
30652 1152512 1153024 ok
30653 1153024 1153536 ok
30654 1153536 1154048 ok
30655 1154048 1154560 ok
30656 1154560 1155072 ok
30657 1155072 1155584 ok
30658 1155584 1156096 ok
30659 1156096 1156608 ok
30660 1156608 1157120 ok
30661 1157120 1157632 ok
30662 1157632 1158144 ok
30663 1158144 1158656 ok
30664 1158656 1159168 ok
30665 1159168 1159680 ok
30666 1159680 1160192 ok
30667 1160192 1160704 ok
30668 1160704 1161216 ok
30669 1161216 1161728 ok
30670 1161728 1162240 ok
30671 1162240 1162752 ok
30672 1162752 1163264 ok
30673 1163264 1163776 ok
30674 1163776 1164288 ok
30675 1164288 1164800 ok
30676 1164800 1165312 ok
30677 1165312 1165824 ok
30678 1165824 1166336 ok
30679 1166336 1166848 ok
30680 1166848 1167360 ok
30681 1167360 1167872 ok
30682 1167872 1168384 ok
30683 1168384 1168896 ok
30684 1168896 1169408 ok
30685 1169408 1169920 ok
30686 1169920 1170432 ok
30687 1170432 1170944 ok
30688 1170944 1171456 ok
30689 1171456 1171968 ok
30690 1171968 1172480 ok
30691 1172480 1172992 ok
30692 1172992 1173504 ok
30693 1173504 1174016 ok
30694 1174016 1174528 ok
30695 1174528 1175040 ok
30696 1175040 1175552 ok
30697 1175552 1176064 ok
30698 1176064 1176576 ok
30699 1176576 1177088 ok
30700 1177088 1177600 ok
30701 1177600 1178112 ok
30702 1178112 1178624 ok
30703 1178624 1179136 ok
30704 1179136 1179648 ok
30705 1179648 1180160 ok
30706 1180160 1180672 ok
30707 1180672 1181184 ok
30708 1181184 1181696 ok
30709 1181696 1182208 ok
30710 1182208 1182720 ok
30711 1182720 1183232 ok
30712 1183232 1183744 ok
30713 1183744 1184256 ok
30714 1184256 1184768 ok
30715 1184768 1185280 ok
30716 1185280 1185792 ok
30717 1185792 1186304 ok
30718 1186304 1186816 ok
30719 1186816 1187328 ok
30720 1187328 1187840 ok
30721 1187840 1188352 ok
30722 1188352 1188864 ok
30723 1188864 1189376 ok
30724 1189376 1189888 ok
30725 1189888 1190400 ok
30726 1190400 1190912 ok
30727 1190912 1191424 ok
30728 1191424 1191936 ok
30729 1191936 1192448 ok
30730 1192448 1192960 ok
30731 1192960 1193472 ok
30732 1193472 1193984 ok
30733 1193984 1194496 ok
30734 1194496 1195008 ok
30735 1195008 1195520 ok
30736 1195520 1196032 ok
30737 1196032 1196544 ok
30738 1196544 1197056 ok
30739 1197056 1197568 ok
30740 1197568 1198080 ok
30741 1198080 1198592 ok
30742 1198592 1199104 ok
30743 1199104 1199616 ok
30744 1199616 1200128 ok
30745 1200128 1200640 ok
30746 1200640 1201152 ok
30747 1201152 1201664 ok
30748 1201664 1202176 ok
30749 1202176 1202688 ok
30750 1202688 1203200 ok
30751 1203200 1203712 ok
30752 1203712 1204224 ok
30753 1204224 1204736 ok
30754 1204736 1205248 ok
30755 1205248 1205760 ok
30756 1205760 1206272 ok
30757 1206272 1206784 ok
30758 1206784 1207296 ok
30759 1207296 1207808 ok
30760 1207808 1208320 ok
30761 1208320 1208832 ok
30762 1208832 1209344 ok
30763 1209344 1209856 ok
30764 1209856 1210368 ok
30765 1210368 1210880 ok
30766 1210880 1211392 ok
30767 1211392 1211904 ok
30768 1211904 1212416 ok
30769 1212416 1212928 ok
30770 1212928 1213440 ok
30771 1213440 1213952 ok
30772 1213952 1214464 ok
30773 1214464 1214976 ok
30774 1214976 1215488 ok
30775 1215488 1216000 ok
30776 1216000 1216512 ok
30777 1216512 1217024 ok
30778 1217024 1217536 ok
30779 1217536 1218048 ok
30780 1218048 1218560 ok
30781 1218560 1219072 ok
30782 1219072 1219584 ok
30783 1219584 1220096 ok
30784 1220096 1220608 ok
30785 1220608 1221120 ok
30786 1221120 1221632 ok
30787 1221632 1222144 ok
30788 1222144 1222656 ok
30789 1222656 1223168 ok
30790 1223168 1223680 ok
30791 1223680 1224192 ok
30792 1224192 1224704 ok
30793 1224704 1225216 ok
30794 1225216 1225728 ok
30795 1225728 1226240 ok
30796 1226240 1226752 ok
30797 1226752 1227264 ok
30798 1227264 1227776 ok
30799 1227776 1228288 ok
30800 1228288 1228800 ok
30801 1228800 1229312 ok
30802 1229312 1229824 ok
30803 1229824 1230336 ok
30804 1230336 1230848 ok
30805 1230848 1231360 ok
30806 1231360 1231872 ok
30807 1231872 1232384 ok
30808 1232384 1232896 ok
30809 1232896 1233408 ok
30810 1233408 1233920 ok
30811 1233920 1234432 ok
30812 1234432 1234944 ok
30813 1234944 1235456 ok
30814 1235456 1235968 ok
30815 1235968 1236480 ok
30816 1236480 1236992 ok
30817 1236992 1237504 ok
30818 1237504 1238016 ok
30819 1238016 1238528 ok
30820 1238528 1239040 ok
30821 1239040 1239552 ok
30822 1239552 1240064 ok
30823 1240064 1240576 ok
30824 1240576 1241088 ok
30825 1241088 1241600 ok
30826 1241600 1242112 ok
30827 1242112 1242624 ok
30828 1242624 1243136 ok
30829 1243136 1243648 ok
30830 1243648 1244160 ok
30831 1244160 1244672 ok
30832 1244672 1245184 ok
30833 1245184 1245696 ok
30834 1245696 1246208 ok
30835 1246208 1246720 ok
30836 1246720 1247232 ok
30837 1247232 1247744 ok
30838 1247744 1248256 ok
30839 1248256 1248768 ok
30840 1248768 1249280 ok
30841 1249280 1249792 ok
30842 1249792 1250304 ok
30843 1250304 1250816 ok
30844 1250816 1251328 ok
30845 1251328 1251840 ok
30846 1251840 1252352 ok
30847 1252352 1252864 ok
30848 1252864 1253376 ok
30849 1253376 1253888 ok
30850 1253888 1254400 ok
30851 1254400 1254912 ok
30852 1254912 1255424 ok
30853 1255424 1255936 ok
30854 1255936 1256448 ok
30855 1256448 1256960 ok
30856 1256960 1257472 ok
30857 1257472 1257984 ok
30858 1257984 1258496 ok
30859 1258496 1259008 ok
30860 1259008 1259520 ok
30861 1259520 1260032 ok
30862 1260032 1260544 ok
30863 1260544 1261056 ok
30864 1261056 1261568 ok
30865 1261568 1262080 ok
30866 1262080 1262592 ok
30867 1262592 1263104 ok
30868 1263104 1263616 ok
30869 1263616 1264128 ok
30870 1264128 1264640 ok
30871 1264640 1265152 ok
30872 1265152 1265664 ok
30873 1265664 1266176 ok
30874 1266176 1266688 ok
30875 1266688 1267200 ok
30876 1267200 1267712 ok
30877 1267712 1268224 ok
30878 1268224 1268736 ok
30879 1268736 1269248 ok
30880 1269248 1269760 ok
30881 1269760 1270272 ok
30882 1270272 1270784 ok
30883 1270784 1271296 ok
30884 1271296 1271808 ok
30885 1271808 1272320 ok
30886 1272320 1272832 ok
30887 1272832 1273344 ok
30888 1273344 1273856 ok
30889 1273856 1274368 ok
30890 1274368 1274880 ok
30891 1274880 1275392 ok
30892 1275392 1275904 ok
30893 1275904 1276416 ok
30894 1276416 1276928 ok
30895 1276928 1277440 ok
30896 1277440 1277952 ok
30897 1277952 1278464 ok
30898 1278464 1278976 ok
30899 1278976 1279488 ok
30900 1279488 1280000 ok
30901 1280000 1280512 ok
30902 1280512 1281024 ok
30903 1281024 1281536 ok
30904 1281536 1282048 ok
30905 1282048 1282560 ok
30906 1282560 1283072 ok
30907 1283072 1283584 ok
30908 1283584 1284096 ok
30909 1284096 1284608 ok
30910 1284608 1285120 ok
30911 1285120 1285632 ok
30912 1285632 1286144 ok
30913 1286144 1286656 ok
30914 1286656 1287168 ok
30915 1287168 1287680 ok
30916 1287680 1288192 ok
30917 1288192 1288704 ok
30918 1288704 1289216 ok
30919 1289216 1289728 ok
30920 1289728 1290240 ok
30921 1290240 1290752 ok
30922 1290752 1291264 ok
30923 1291264 1291776 ok
30924 1291776 1292288 ok
30925 1292288 1292800 ok
30926 1292800 1293312 ok
30927 1293312 1293824 ok
30928 1293824 1294336 ok
30929 1294336 1294848 ok
30930 1294848 1295360 ok
30931 1295360 1295872 ok
30932 1295872 1296384 ok
30933 1296384 1296896 ok
30934 1296896 1297408 ok
30935 1297408 1297920 ok
30936 1297920 1298432 ok
30937 1298432 1298944 ok
30938 1298944 1299456 ok
30939 1299456 1299968 ok
30940 1299968 1300480 ok
30941 1300480 1300992 ok
30942 1300992 1301504 ok
30943 1301504 1302016 ok
30944 1302016 1302528 ok
30945 1302528 1303040 ok
30946 1303040 1303552 ok
30947 1303552 1304064 ok
30948 1304064 1304576 ok
30949 1304576 1305088 ok
30950 1305088 1305600 ok
30951 1305600 1306112 ok
30952 1306112 1306624 ok
30953 1306624 1307136 ok
30954 1307136 1307648 ok
30955 1307648 1308160 ok
30956 1308160 1308672 ok
30957 1308672 1309184 ok
30958 1309184 1309696 ok
30959 1309696 1310208 ok
30960 1310208 1310720 ok
30961 1310720 1311232 ok
30962 1311232 1311744 ok
30963 1311744 1312256 ok
30964 1312256 1312768 ok
30965 1312768 1313280 ok
30966 1313280 1313792 ok
30967 1313792 1314304 ok
30968 1314304 1314816 ok
30969 1314816 1315328 ok
30970 1315328 1315840 ok
30971 1315840 1316352 ok
30972 1316352 1316864 ok
30973 1316864 1317376 ok
30974 1317376 1317888 ok
30975 1317888 1318400 ok
30976 1318400 1318912 ok
30977 1318912 1319424 ok
30978 1319424 1319936 ok
30979 1319936 1320448 ok
30980 1320448 1320960 ok
30981 1320960 1321472 ok
30982 1321472 1321984 ok
30983 1321984 1322496 ok
30984 1322496 1323008 ok
30985 1323008 1323520 ok
30986 1323520 1324032 ok
30987 1324032 1324544 ok
30988 1324544 1325056 ok
30989 1325056 1325568 ok
30990 1325568 1326080 ok
30991 1326080 1326592 ok
30992 1326592 1327104 ok
30993 1327104 1327616 ok
30994 1327616 1328128 ok
30995 1328128 1328640 ok
30996 1328640 1329152 ok
30997 1329152 1329664 ok
30998 1329664 1330176 ok
30999 1330176 1330688 ok
31000 1330688 1331200 ok
31001 1331200 1331712 ok
31002 1331712 1332224 ok
31003 1332224 1332736 ok
31004 1332736 1333248 ok
31005 1333248 1333760 ok
31006 1333760 1334272 ok
31007 1334272 1334784 ok
31008 1334784 1335296 ok
31009 1335296 1335808 ok
31010 1335808 1336320 ok
31011 1336320 1336832 ok
31012 1336832 1337344 ok
31013 1337344 1337856 ok
31014 1337856 1338368 ok
31015 1338368 1338880 ok
31016 1338880 1339392 ok
31017 1339392 1339904 ok
31018 1339904 1340416 ok
31019 1340416 1340928 ok
31020 1340928 1341440 ok
31021 1341440 1341952 ok
31022 1341952 1342464 ok
31023 1342464 1342976 ok
31024 1342976 1343488 ok
31025 1343488 1344000 ok
31026 1344000 1344512 ok
31027 1344512 1345024 ok
31028 1345024 1345536 ok
31029 1345536 1346048 ok
31030 1346048 1346560 ok
31031 1346560 1347072 ok
31032 1347072 1347584 ok
31033 1347584 1348096 ok
31034 1348096 1348608 ok
31035 1348608 1349120 ok
31036 1349120 1349632 ok
31037 1349632 1350144 ok
31038 1350144 1350656 ok
31039 1350656 1351168 ok
31040 1351168 1351680 ok
31041 1351680 1352192 ok
31042 1352192 1352704 ok
31043 1352704 1353216 ok
31044 1353216 1353728 ok
31045 1353728 1354240 ok
31046 1354240 1354752 ok
31047 1354752 1355264 ok
31048 1355264 1355776 ok
31049 1355776 1356288 ok
31050 1356288 1356800 ok
31051 1356800 1357312 ok
31052 1357312 1357824 ok
31053 1357824 1358336 ok
31054 1358336 1358848 ok
31055 1358848 1359360 ok
31056 1359360 1359872 ok
31057 1359872 1360384 ok
31058 1360384 1360896 ok
31059 1360896 1361408 ok
31060 1361408 1361920 ok
31061 1361920 1362432 ok
31062 1362432 1362944 ok
31063 1362944 1363456 ok
31064 1363456 1363968 ok
31065 1363968 1364480 ok
31066 1364480 1364992 ok
31067 1364992 1365504 ok
31068 1365504 1366016 ok
31069 1366016 1366528 ok
31070 1366528 1367040 ok
31071 1367040 1367552 ok
31072 1367552 1368064 ok
31073 1368064 1368576 ok
31074 1368576 1369088 ok
31075 1369088 1369600 ok
31076 1369600 1370112 ok
31077 1370112 1370624 ok
31078 1370624 1371136 ok
31079 1371136 1371648 ok
31080 1371648 1372160 ok
31081 1372160 1372672 ok
31082 1372672 1373184 ok
31083 1373184 1373696 ok
31084 1373696 1374208 ok
31085 1374208 1374720 ok
31086 1374720 1375232 ok
31087 1375232 1375744 ok
31088 1375744 1376256 ok
31089 1376256 1376768 ok
31090 1376768 1377280 ok
31091 1377280 1377792 ok
31092 1377792 1378304 ok
31093 1378304 1378816 ok
31094 1378816 1379328 ok
31095 1379328 1379840 ok
31096 1379840 1380352 ok
31097 1380352 1380864 ok
31098 1380864 1381376 ok
31099 1381376 1381888 ok
31100 1381888 1382400 ok
31101 1382400 1382912 ok
31102 1382912 1383424 ok
31103 1383424 1383936 ok
31104 1383936 1384448 ok
31105 1384448 1384960 ok
31106 1384960 1385472 ok
31107 1385472 1385984 ok
31108 1385984 1386496 ok
31109 1386496 1387008 ok
31110 1387008 1387520 ok
31111 1387520 1388032 ok
31112 1388032 1388544 ok
31113 1388544 1389056 ok
31114 1389056 1389568 ok
31115 1389568 1390080 ok
31116 1390080 1390592 ok
31117 1390592 1391104 ok
31118 1391104 1391616 ok
31119 1391616 1392128 ok
31120 1392128 1392640 ok
31121 1392640 1393152 ok
31122 1393152 1393664 ok
31123 1393664 1394176 ok
31124 1394176 1394688 ok
31125 1394688 1395200 ok
31126 1395200 1395712 ok
31127 1395712 1396224 ok
31128 1396224 1396736 ok
31129 1396736 1397248 ok
31130 1397248 1397760 ok
31131 1397760 1398272 ok
31132 1398272 1398784 ok
31133 1398784 1399296 ok
31134 1399296 1399808 ok
31135 1399808 1400320 ok
31136 1400320 1400832 ok
31137 1400832 1401344 ok
31138 1401344 1401856 ok
31139 1401856 1402368 ok
31140 1402368 1402880 ok
31141 1402880 1403392 ok
31142 1403392 1403904 ok
31143 1403904 1404416 ok
31144 1404416 1404928 ok
31145 1404928 1405440 ok
31146 1405440 1405952 ok
31147 1405952 1406464 ok
31148 1406464 1406976 ok
31149 1406976 1407488 ok
31150 1407488 1408000 ok
31151 1408000 1408512 ok
31152 1408512 1409024 ok
31153 1409024 1409536 ok
31154 1409536 1410048 ok
31155 1410048 1410560 ok
31156 1410560 1411072 ok
31157 1411072 1411584 ok
31158 1411584 1412096 ok
31159 1412096 1412608 ok
31160 1412608 1413120 ok
31161 1413120 1413632 ok
31162 1413632 1414144 ok
31163 1414144 1414656 ok
31164 1414656 1415168 ok
31165 1415168 1415680 ok
31166 1415680 1416192 ok
31167 1416192 1416704 ok
31168 1416704 1417216 ok
31169 1417216 1417728 ok
31170 1417728 1418240 ok
31171 1418240 1418752 ok
31172 1418752 1419264 ok
31173 1419264 1419776 ok
31174 1419776 1420288 ok
31175 1420288 1420800 ok
31176 1420800 1421312 ok
31177 1421312 1421824 ok
31178 1421824 1422336 ok
31179 1422336 1422848 ok
31180 1422848 1423360 ok
31181 1423360 1423872 ok
31182 1423872 1424384 ok
31183 1424384 1424896 ok
31184 1424896 1425408 ok
31185 1425408 1425920 ok
31186 1425920 1426432 ok
31187 1426432 1426944 ok
31188 1426944 1427456 ok
31189 1427456 1427968 ok
31190 1427968 1428480 ok
31191 1428480 1428992 ok
31192 1428992 1429504 ok
31193 1429504 1430016 ok
31194 1430016 1430528 ok
31195 1430528 1431040 ok
31196 1431040 1431552 ok
31197 1431552 1432064 ok
31198 1432064 1432576 ok
31199 1432576 1433088 ok
31200 1433088 1433600 ok
31201 1433600 1434112 ok
31202 1434112 1434624 ok
31203 1434624 1435136 ok
31204 1435136 1435648 ok
31205 1435648 1436160 ok
31206 1436160 1436672 ok
31207 1436672 1437184 ok
31208 1437184 1437696 ok
31209 1437696 1438208 ok
31210 1438208 1438720 ok
31211 1438720 1439232 ok
31212 1439232 1439744 ok
31213 1439744 1440256 ok
31214 1440256 1440768 ok
31215 1440768 1441280 ok
31216 1441280 1441792 ok
31217 1441792 1442304 ok
31218 1442304 1442816 ok
31219 1442816 1443328 ok
31220 1443328 1443840 ok
31221 1443840 1444352 ok
31222 1444352 1444864 ok
31223 1444864 1445376 ok
31224 1445376 1445888 ok
31225 1445888 1446400 ok
31226 1446400 1446912 ok
31227 1446912 1447424 ok
31228 1447424 1447936 ok
31229 1447936 1448448 ok
31230 1448448 1448960 ok
31231 1448960 1449472 ok
31232 1449472 1449984 ok
31233 1449984 1450496 ok
31234 1450496 1451008 ok
31235 1451008 1451520 ok
31236 1451520 1452032 ok
31237 1452032 1452544 ok
31238 1452544 1453056 ok
31239 1453056 1453568 ok
31240 1453568 1454080 ok
31241 1454080 1454592 ok
31242 1454592 1455104 ok
31243 1455104 1455616 ok
31244 1455616 1456128 ok
31245 1456128 1456640 ok
31246 1456640 1457152 ok
31247 1457152 1457664 ok
31248 1457664 1458176 ok
31249 1458176 1458688 ok
31250 1458688 1459200 ok
31251 1459200 1459712 ok
31252 1459712 1460224 ok
31253 1460224 1460736 ok
31254 1460736 1461248 ok
31255 1461248 1461760 ok
31256 1461760 1462272 ok
31257 1462272 1462784 ok
31258 1462784 1463296 ok
31259 1463296 1463808 ok
31260 1463808 1464320 ok
31261 1464320 1464832 ok
31262 1464832 1465344 ok
31263 1465344 1465856 ok
31264 1465856 1466368 ok
31265 1466368 1466880 ok
31266 1466880 1467392 ok
31267 1467392 1467904 ok
31268 1467904 1468416 ok
31269 1468416 1468928 ok
31270 1468928 1469440 ok
31271 1469440 1469952 ok
31272 1469952 1470464 ok
31273 1470464 1470976 ok
31274 1470976 1471488 ok
31275 1471488 1472000 ok
31276 1472000 1472512 ok
31277 1472512 1473024 ok
31278 1473024 1473536 ok
31279 1473536 1474048 ok
31280 1474048 1474560 ok
31281 1474560 1475072 ok
31282 1475072 1475584 ok
31283 1475584 1476096 ok
31284 1476096 1476608 ok
31285 1476608 1477120 ok
31286 1477120 1477632 ok
31287 1477632 1478144 ok
31288 1478144 1478656 ok
31289 1478656 1479168 ok
31290 1479168 1479680 ok
31291 1479680 1480192 ok
31292 1480192 1480704 ok
31293 1480704 1481216 ok
31294 1481216 1481728 ok
31295 1481728 1482240 ok
31296 1482240 1482752 ok
31297 1482752 1483264 ok
31298 1483264 1483776 ok
31299 1483776 1484288 ok
31300 1484288 1484800 ok
31301 1484800 1485312 ok
31302 1485312 1485824 ok
31303 1485824 1486336 ok
31304 1486336 1486848 ok
31305 1486848 1487360 ok
31306 1487360 1487872 ok
31307 1487872 1488384 ok
31308 1488384 1488896 ok
31309 1488896 1489408 ok
31310 1489408 1489920 ok
31311 1489920 1490432 ok
31312 1490432 1490944 ok
31313 1490944 1491456 ok
31314 1491456 1491968 ok
31315 1491968 1492480 ok
31316 1492480 1492992 ok
31317 1492992 1493504 ok
31318 1493504 1494016 ok
31319 1494016 1494528 ok
31320 1494528 1495040 ok
31321 1495040 1495552 ok
31322 1495552 1496064 ok
31323 1496064 1496576 ok
31324 1496576 1497088 ok
31325 1497088 1497600 ok
31326 1497600 1498112 ok
31327 1498112 1498624 ok
31328 1498624 1499136 ok
31329 1499136 1499648 ok
31330 1499648 1500160 ok
31331 1500160 1500672 ok
31332 1500672 1501184 ok
31333 1501184 1501696 ok
31334 1501696 1502208 ok
31335 1502208 1502720 ok
31336 1502720 1503232 ok
31337 1503232 1503744 ok
31338 1503744 1504256 ok
31339 1504256 1504768 ok
31340 1504768 1505280 ok
31341 1505280 1505792 ok
31342 1505792 1506304 ok
31343 1506304 1506816 ok
31344 1506816 1507328 ok
31345 1507328 1507840 ok
31346 1507840 1508352 ok
31347 1508352 1508864 ok
31348 1508864 1509376 ok
31349 1509376 1509888 ok
31350 1509888 1510400 ok
31351 1510400 1510912 ok
31352 1510912 1511424 ok
31353 1511424 1511936 ok
31354 1511936 1512448 ok
31355 1512448 1512960 ok
31356 1512960 1513472 ok
31357 1513472 1513984 ok
31358 1513984 1514496 ok
31359 1514496 1515008 ok
31360 1515008 1515520 ok
31361 1515520 1516032 ok
31362 1516032 1516544 ok
31363 1516544 1517056 ok
31364 1517056 1517568 ok
31365 1517568 1518080 ok
31366 1518080 1518592 ok
31367 1518592 1519104 ok
31368 1519104 1519616 ok
31369 1519616 1520128 ok
31370 1520128 1520640 ok
31371 1520640 1521152 ok
31372 1521152 1521664 ok
31373 1521664 1522176 ok
31374 1522176 1522688 ok
31375 1522688 1523200 ok
31376 1523200 1523712 ok
31377 1523712 1524224 ok
31378 1524224 1524736 ok
31379 1524736 1525248 ok
31380 1525248 1525760 ok
31381 1525760 1526272 ok
31382 1526272 1526784 ok
31383 1526784 1527296 ok
31384 1527296 1527808 ok
31385 1527808 1528320 ok
31386 1528320 1528832 ok
31387 1528832 1529344 ok
31388 1529344 1529856 ok
31389 1529856 1530368 ok
31390 1530368 1530880 ok
31391 1530880 1531392 ok
31392 1531392 1531904 ok
31393 1531904 1532416 ok
31394 1532416 1532928 ok
31395 1532928 1533440 ok
31396 1533440 1533952 ok
31397 1533952 1534464 ok
31398 1534464 1534976 ok
31399 1534976 1535488 ok
31400 1535488 1536000 ok
31401 1536000 1536512 ok
31402 1536512 1537024 ok
31403 1537024 1537536 ok
31404 1537536 1538048 ok
31405 1538048 1538560 ok
31406 1538560 1539072 ok
31407 1539072 1539584 ok
31408 1539584 1540096 ok
31409 1540096 1540608 ok
31410 1540608 1541120 ok
31411 1541120 1541632 ok
31412 1541632 1542144 ok
31413 1542144 1542656 ok
31414 1542656 1543168 ok
31415 1543168 1543680 ok
31416 1543680 1544192 ok
31417 1544192 1544704 ok
31418 1544704 1545216 ok
31419 1545216 1545728 ok
31420 1545728 1546240 ok
31421 1546240 1546752 ok
31422 1546752 1547264 ok
31423 1547264 1547776 ok
31424 1547776 1548288 ok
31425 1548288 1548800 ok
31426 1548800 1549312 ok
31427 1549312 1549824 ok
31428 1549824 1550336 ok
31429 1550336 1550848 ok
31430 1550848 1551360 ok
31431 1551360 1551872 ok
31432 1551872 1552384 ok
31433 1552384 1552896 ok
31434 1552896 1553408 ok
31435 1553408 1553920 ok
31436 1553920 1554432 ok
31437 1554432 1554944 ok
31438 1554944 1555456 ok
31439 1555456 1555968 ok
31440 1555968 1556480 ok
31441 1556480 1556992 ok
31442 1556992 1557504 ok
31443 1557504 1558016 ok
31444 1558016 1558528 ok
31445 1558528 1559040 ok
31446 1559040 1559552 ok
31447 1559552 1560064 ok
31448 1560064 1560576 ok
31449 1560576 1561088 ok
31450 1561088 1561600 ok
31451 1561600 1562112 ok
31452 1562112 1562624 ok
31453 1562624 1563136 ok
31454 1563136 1563648 ok
31455 1563648 1564160 ok
31456 1564160 1564672 ok
31457 1564672 1565184 ok
31458 1565184 1565696 ok
31459 1565696 1566208 ok
31460 1566208 1566720 ok
31461 1566720 1567232 ok
31462 1567232 1567744 ok
31463 1567744 1568256 ok
31464 1568256 1568768 ok
31465 1568768 1569280 ok
31466 1569280 1569792 ok
31467 1569792 1570304 ok
31468 1570304 1570816 ok
31469 1570816 1571328 ok
31470 1571328 1571840 ok
31471 1571840 1572352 ok
31472 1572352 1572864 ok
31473 1572864 1573376 ok
31474 1573376 1573888 ok
31475 1573888 1574400 ok
31476 1574400 1574912 ok
31477 1574912 1575424 ok
31478 1575424 1575936 ok
31479 1575936 1576448 ok
31480 1576448 1576960 ok
31481 1576960 1577472 ok
31482 1577472 1577984 ok
31483 1577984 1578496 ok
31484 1578496 1579008 ok
31485 1579008 1579520 ok
31486 1579520 1580032 ok
31487 1580032 1580544 ok
31488 1580544 1581056 ok
31489 1581056 1581568 ok
31490 1581568 1582080 ok
31491 1582080 1582592 ok
31492 1582592 1583104 ok
31493 1583104 1583616 ok
31494 1583616 1584128 ok
31495 1584128 1584640 ok
31496 1584640 1585152 ok
31497 1585152 1585664 ok
31498 1585664 1586176 ok
31499 1586176 1586688 ok
31500 1586688 1587200 ok
31501 1587200 1587712 ok
31502 1587712 1588224 ok
31503 1588224 1588736 ok
31504 1588736 1589248 ok
31505 1589248 1589760 ok
31506 1589760 1590272 ok
31507 1590272 1590784 ok
31508 1590784 1591296 ok
31509 1591296 1591808 ok
31510 1591808 1592320 ok
31511 1592320 1592832 ok
31512 1592832 1593344 ok
31513 1593344 1593856 ok
31514 1593856 1594368 ok
31515 1594368 1594880 ok
31516 1594880 1595392 ok
31517 1595392 1595904 ok
31518 1595904 1596416 ok
31519 1596416 1596928 ok
31520 1596928 1597440 ok
31521 1597440 1597952 ok
31522 1597952 1598464 ok
31523 1598464 1598976 ok
31524 1598976 1599488 ok
31525 1599488 1600000 ok
31526 1600000 1600512 ok
31527 1600512 1601024 ok
31528 1601024 1601536 ok
31529 1601536 1602048 ok
31530 1602048 1602560 ok
31531 1602560 1603072 ok
31532 1603072 1603584 ok
31533 1603584 1604096 ok
31534 1604096 1604608 ok
31535 1604608 1605120 ok
31536 1605120 1605632 ok
31537 1605632 1606144 ok
31538 1606144 1606656 ok
31539 1606656 1607168 ok
31540 1607168 1607680 ok
31541 1607680 1608192 ok
31542 1608192 1608704 ok
31543 1608704 1609216 ok
31544 1609216 1609728 ok
31545 1609728 1610240 ok
31546 1610240 1610752 ok
31547 1610752 1611264 ok
31548 1611264 1611776 ok
31549 1611776 1612288 ok
31550 1612288 1612800 ok
31551 1612800 1613312 ok
31552 1613312 1613824 ok
31553 1613824 1614336 ok
31554 1614336 1614848 ok
31555 1614848 1615360 ok
31556 1615360 1615872 ok
31557 1615872 1616384 ok
31558 1616384 1616896 ok
31559 1616896 1617408 ok
31560 1617408 1617920 ok
31561 1617920 1618432 ok
31562 1618432 1618944 ok
31563 1618944 1619456 ok
31564 1619456 1619968 ok
31565 1619968 1620480 ok
31566 1620480 1620992 ok
31567 1620992 1621504 ok
31568 1621504 1622016 ok
31569 1622016 1622528 ok
31570 1622528 1623040 ok
31571 1623040 1623552 ok
31572 1623552 1624064 ok
31573 1624064 1624576 ok
31574 1624576 1625088 ok
31575 1625088 1625600 ok
31576 1625600 1626112 ok
31577 1626112 1626624 ok
31578 1626624 1627136 ok
31579 1627136 1627648 ok
31580 1627648 1628160 ok
31581 1628160 1628672 ok
31582 1628672 1629184 ok
31583 1629184 1629696 ok
31584 1629696 1630208 ok
31585 1630208 1630720 ok
31586 1630720 1631232 ok
31587 1631232 1631744 ok
31588 1631744 1632256 ok
31589 1632256 1632768 ok
31590 1632768 1633280 ok
31591 1633280 1633792 ok
31592 1633792 1634304 ok
31593 1634304 1634816 ok
31594 1634816 1635328 ok
31595 1635328 1635840 ok
31596 1635840 1636352 ok
31597 1636352 1636864 ok
31598 1636864 1637376 ok
31599 1637376 1637888 ok
31600 1637888 1638400 ok
31601 1638400 1638912 ok
31602 1638912 1639424 ok
31603 1639424 1639936 ok
31604 1639936 1640448 ok
31605 1640448 1640960 ok
31606 1640960 1641472 ok
31607 1641472 1641984 ok
31608 1641984 1642496 ok
31609 1642496 1643008 ok
31610 1643008 1643520 ok
31611 1643520 1644032 ok
31612 1644032 1644544 ok
31613 1644544 1645056 ok
31614 1645056 1645568 ok
31615 1645568 1646080 ok
31616 1646080 1646592 ok
31617 1646592 1647104 ok
31618 1647104 1647616 ok
31619 1647616 1648128 ok
31620 1648128 1648640 ok
31621 1648640 1649152 ok
31622 1649152 1649664 ok
31623 1649664 1650176 ok
31624 1650176 1650688 ok
31625 1650688 1651200 ok
31626 1651200 1651712 ok
31627 1651712 1652224 ok
31628 1652224 1652736 ok
31629 1652736 1653248 ok
31630 1653248 1653760 ok
31631 1653760 1654272 ok
31632 1654272 1654784 ok
31633 1654784 1655296 ok
31634 1655296 1655808 ok
31635 1655808 1656320 ok
31636 1656320 1656832 ok
31637 1656832 1657344 ok
31638 1657344 1657856 ok
31639 1657856 1658368 ok
31640 1658368 1658880 ok
31641 1658880 1659392 ok
31642 1659392 1659904 ok
31643 1659904 1660416 ok
31644 1660416 1660928 ok
31645 1660928 1661440 ok
31646 1661440 1661952 ok
31647 1661952 1662464 ok
31648 1662464 1662976 ok
31649 1662976 1663488 ok
31650 1663488 1664000 ok
31651 1664000 1664512 ok
31652 1664512 1665024 ok
31653 1665024 1665536 ok
31654 1665536 1666048 ok
31655 1666048 1666560 ok
31656 1666560 1667072 ok
31657 1667072 1667584 ok
31658 1667584 1668096 ok
31659 1668096 1668608 ok
31660 1668608 1669120 ok
31661 1669120 1669632 ok
31662 1669632 1670144 ok
31663 1670144 1670656 ok
31664 1670656 1671168 ok
31665 1671168 1671680 ok
31666 1671680 1672192 ok
31667 1672192 1672704 ok
31668 1672704 1673216 ok
31669 1673216 1673728 ok
31670 1673728 1674240 ok
31671 1674240 1674752 ok
31672 1674752 1675264 ok
31673 1675264 1675776 ok
31674 1675776 1676288 ok
31675 1676288 1676800 ok
31676 1676800 1677312 ok
31677 1677312 1677824 ok
31678 1677824 1678336 ok
31679 1678336 1678848 ok
31680 1678848 1679360 ok
31681 1679360 1679872 ok
31682 1679872 1680384 ok
31683 1680384 1680896 ok
31684 1680896 1681408 ok
31685 1681408 1681920 ok
31686 1681920 1682432 ok
31687 1682432 1682944 ok
31688 1682944 1683456 ok
31689 1683456 1683968 ok
31690 1683968 1684480 ok
31691 1684480 1684992 ok
31692 1684992 1685504 ok
31693 1685504 1686016 ok
31694 1686016 1686528 ok
31695 1686528 1687040 ok
31696 1687040 1687552 ok
31697 1687552 1688064 ok
31698 1688064 1688576 ok
31699 1688576 1689088 ok
31700 1689088 1689600 ok
31701 1689600 1690112 ok
31702 1690112 1690624 ok
31703 1690624 1691136 ok
31704 1691136 1691648 ok
31705 1691648 1692160 ok
31706 1692160 1692672 ok
31707 1692672 1693184 ok
31708 1693184 1693696 ok
31709 1693696 1694208 ok
31710 1694208 1694720 ok
31711 1694720 1695232 ok
31712 1695232 1695744 ok
31713 1695744 1696256 ok
31714 1696256 1696768 ok
31715 1696768 1697280 ok
31716 1697280 1697792 ok
31717 1697792 1698304 ok
31718 1698304 1698816 ok
31719 1698816 1699328 ok
31720 1699328 1699840 ok
31721 1699840 1700352 ok
31722 1700352 1700864 ok
31723 1700864 1701376 ok
31724 1701376 1701888 ok
31725 1701888 1702400 ok
31726 1702400 1702912 ok
31727 1702912 1703424 ok
31728 1703424 1703936 ok
31729 1703936 1704448 ok
31730 1704448 1704960 ok
31731 1704960 1705472 ok
31732 1705472 1705984 ok
31733 1705984 1706496 ok
31734 1706496 1707008 ok
31735 1707008 1707520 ok
31736 1707520 1708032 ok
31737 1708032 1708544 ok
31738 1708544 1709056 ok
31739 1709056 1709568 ok
31740 1709568 1710080 ok
31741 1710080 1710592 ok
31742 1710592 1711104 ok
31743 1711104 1711616 ok
31744 1711616 1712128 ok
31745 1712128 1712640 ok
31746 1712640 1713152 ok
31747 1713152 1713664 ok
31748 1713664 1714176 ok
31749 1714176 1714688 ok
31750 1714688 1715200 ok
31751 1715200 1715712 ok
31752 1715712 1716224 ok
31753 1716224 1716736 ok
31754 1716736 1717248 ok
31755 1717248 1717760 ok
31756 1717760 1718272 ok
31757 1718272 1718784 ok
31758 1718784 1719296 ok
31759 1719296 1719808 ok
31760 1719808 1720320 ok
31761 1720320 1720832 ok
31762 1720832 1721344 ok
31763 1721344 1721856 ok
31764 1721856 1722368 ok
31765 1722368 1722880 ok
31766 1722880 1723392 ok
31767 1723392 1723904 ok
31768 1723904 1724416 ok
31769 1724416 1724928 ok
31770 1724928 1725440 ok
31771 1725440 1725952 ok
31772 1725952 1726464 ok
31773 1726464 1726976 ok
31774 1726976 1727488 ok
31775 1727488 1728000 ok
31776 1728000 1728512 ok
31777 1728512 1729024 ok
31778 1729024 1729536 ok
31779 1729536 1730048 ok
31780 1730048 1730560 ok
31781 1730560 1731072 ok
31782 1731072 1731584 ok
31783 1731584 1732096 ok
31784 1732096 1732608 ok
31785 1732608 1733120 ok
31786 1733120 1733632 ok
31787 1733632 1734144 ok
31788 1734144 1734656 ok
31789 1734656 1735168 ok
31790 1735168 1735680 ok
31791 1735680 1736192 ok
31792 1736192 1736704 ok
31793 1736704 1737216 ok
31794 1737216 1737728 ok
31795 1737728 1738240 ok
31796 1738240 1738752 ok
31797 1738752 1739264 ok
31798 1739264 1739776 ok
31799 1739776 1740288 ok
31800 1740288 1740800 ok
31801 1740800 1741312 ok
31802 1741312 1741824 ok
31803 1741824 1742336 ok
31804 1742336 1742848 ok
31805 1742848 1743360 ok
31806 1743360 1743872 ok
31807 1743872 1744384 ok
31808 1744384 1744896 ok
31809 1744896 1745408 ok
31810 1745408 1745920 ok
31811 1745920 1746432 ok
31812 1746432 1746944 ok
31813 1746944 1747456 ok
31814 1747456 1747968 ok
31815 1747968 1748480 ok
31816 1748480 1748992 ok
31817 1748992 1749504 ok
31818 1749504 1750016 ok
31819 1750016 1750528 ok
31820 1750528 1751040 ok
31821 1751040 1751552 ok
31822 1751552 1752064 ok
31823 1752064 1752576 ok
31824 1752576 1753088 ok
31825 1753088 1753600 ok
31826 1753600 1754112 ok
31827 1754112 1754624 ok
31828 1754624 1755136 ok
31829 1755136 1755648 ok
31830 1755648 1756160 ok
31831 1756160 1756672 ok
31832 1756672 1757184 ok
31833 1757184 1757696 ok
31834 1757696 1758208 ok
31835 1758208 1758720 ok
31836 1758720 1759232 ok
31837 1759232 1759744 ok
31838 1759744 1760256 ok
31839 1760256 1760768 ok
31840 1760768 1761280 ok
31841 1761280 1761792 ok
31842 1761792 1762304 ok
31843 1762304 1762816 ok
31844 1762816 1763328 ok
31845 1763328 1763840 ok
31846 1763840 1764352 ok
31847 1764352 1764864 ok
31848 1764864 1765376 ok
31849 1765376 1765888 ok
31850 1765888 1766400 ok
31851 1766400 1766912 ok
31852 1766912 1767424 ok
31853 1767424 1767936 ok
31854 1767936 1768448 ok
31855 1768448 1768960 ok
31856 1768960 1769472 ok
31857 1769472 1769984 ok
31858 1769984 1770496 ok
31859 1770496 1771008 ok
31860 1771008 1771520 ok
31861 1771520 1772032 ok
31862 1772032 1772544 ok
31863 1772544 1773056 ok
31864 1773056 1773568 ok
31865 1773568 1774080 ok
31866 1774080 1774592 ok
31867 1774592 1775104 ok
31868 1775104 1775616 ok
31869 1775616 1776128 ok
31870 1776128 1776640 ok
31871 1776640 1777152 ok
31872 1777152 1777664 ok
31873 1777664 1778176 ok
31874 1778176 1778688 ok
31875 1778688 1779200 ok
31876 1779200 1779712 ok
31877 1779712 1780224 ok
31878 1780224 1780736 ok
31879 1780736 1781248 ok
31880 1781248 1781760 ok
31881 1781760 1782272 ok
31882 1782272 1782784 ok
31883 1782784 1783296 ok
31884 1783296 1783808 ok
31885 1783808 1784320 ok
31886 1784320 1784832 ok
31887 1784832 1785344 ok
31888 1785344 1785856 ok
31889 1785856 1786368 ok
31890 1786368 1786880 ok
31891 1786880 1787392 ok
31892 1787392 1787904 ok
31893 1787904 1788416 ok
31894 1788416 1788928 ok
31895 1788928 1789440 ok
31896 1789440 1789952 ok
31897 1789952 1790464 ok
31898 1790464 1790976 ok
31899 1790976 1791488 ok
31900 1791488 1792000 ok
31901 1792000 1792512 ok
31902 1792512 1793024 ok
31903 1793024 1793536 ok
31904 1793536 1794048 ok
31905 1794048 1794560 ok
31906 1794560 1795072 ok
31907 1795072 1795584 ok
31908 1795584 1796096 ok
31909 1796096 1796608 ok
31910 1796608 1797120 ok
31911 1797120 1797632 ok
31912 1797632 1798144 ok
31913 1798144 1798656 ok
31914 1798656 1799168 ok
31915 1799168 1799680 ok
31916 1799680 1800192 ok
31917 1800192 1800704 ok
31918 1800704 1801216 ok
31919 1801216 1801728 ok
31920 1801728 1802240 ok
31921 1802240 1802752 ok
31922 1802752 1803264 ok
31923 1803264 1803776 ok
31924 1803776 1804288 ok
31925 1804288 1804800 ok
31926 1804800 1805312 ok
31927 1805312 1805824 ok
31928 1805824 1806336 ok
31929 1806336 1806848 ok
31930 1806848 1807360 ok
31931 1807360 1807872 ok
31932 1807872 1808384 ok
31933 1808384 1808896 ok
31934 1808896 1809408 ok
31935 1809408 1809920 ok
31936 1809920 1810432 ok
31937 1810432 1810944 ok
31938 1810944 1811456 ok
31939 1811456 1811968 ok
31940 1811968 1812480 ok
31941 1812480 1812992 ok
31942 1812992 1813504 ok
31943 1813504 1814016 ok
31944 1814016 1814528 ok
31945 1814528 1815040 ok
31946 1815040 1815552 ok
31947 1815552 1816064 ok
31948 1816064 1816576 ok
31949 1816576 1817088 ok
31950 1817088 1817600 ok
31951 1817600 1818112 ok
31952 1818112 1818624 ok
31953 1818624 1819136 ok
31954 1819136 1819648 ok
31955 1819648 1820160 ok
31956 1820160 1820672 ok
31957 1820672 1821184 ok
31958 1821184 1821696 ok
31959 1821696 1822208 ok
31960 1822208 1822720 ok
31961 1822720 1823232 ok
31962 1823232 1823744 ok
31963 1823744 1824256 ok
31964 1824256 1824768 ok
31965 1824768 1825280 ok
31966 1825280 1825792 ok
31967 1825792 1826304 ok
31968 1826304 1826816 ok
31969 1826816 1827328 ok
31970 1827328 1827840 ok
31971 1827840 1828352 ok
31972 1828352 1828864 ok
31973 1828864 1829376 ok
31974 1829376 1829888 ok
31975 1829888 1830400 ok
31976 1830400 1830912 ok
31977 1830912 1831424 ok
31978 1831424 1831936 ok
31979 1831936 1832448 ok
31980 1832448 1832960 ok
31981 1832960 1833472 ok
31982 1833472 1833984 ok
31983 1833984 1834496 ok
31984 1834496 1835008 ok
31985 1835008 1835520 ok
31986 1835520 1836032 ok
31987 1836032 1836544 ok
31988 1836544 1837056 ok
31989 1837056 1837568 ok
31990 1837568 1838080 ok
31991 1838080 1838592 ok
31992 1838592 1839104 ok
31993 1839104 1839616 ok
31994 1839616 1840128 ok
31995 1840128 1840640 ok
31996 1840640 1841152 ok
31997 1841152 1841664 ok
31998 1841664 1842176 ok
31999 1842176 1842688 ok
32000 1842688 1843200 ok
32001 1843200 1843712 ok
32002 1843712 1844224 ok
32003 1844224 1844736 ok
32004 1844736 1845248 ok
32005 1845248 1845760 ok
32006 1845760 1846272 ok
32007 1846272 1846784 ok
32008 1846784 1847296 ok
32009 1847296 1847808 ok
32010 1847808 1848320 ok
32011 1848320 1848832 ok
32012 1848832 1849344 ok
32013 1849344 1849856 ok
32014 1849856 1850368 ok
32015 1850368 1850880 ok
32016 1850880 1851392 ok
32017 1851392 1851904 ok
32018 1851904 1852416 ok
32019 1852416 1852928 ok
32020 1852928 1853440 ok
32021 1853440 1853952 ok
32022 1853952 1854464 ok
32023 1854464 1854976 ok
32024 1854976 1855488 ok
32025 1855488 1856000 ok
32026 1856000 1856512 ok
32027 1856512 1857024 ok
32028 1857024 1857536 ok
32029 1857536 1858048 ok
32030 1858048 1858560 ok
32031 1858560 1859072 ok
32032 1859072 1859584 ok
32033 1859584 1860096 ok
32034 1860096 1860608 ok
32035 1860608 1861120 ok
32036 1861120 1861632 ok
32037 1861632 1862144 ok
32038 1862144 1862656 ok
32039 1862656 1863168 ok
32040 1863168 1863680 ok
32041 1863680 1864192 ok
32042 1864192 1864704 ok
32043 1864704 1865216 ok
32044 1865216 1865728 ok
32045 1865728 1866240 ok
32046 1866240 1866752 ok
32047 1866752 1867264 ok
32048 1867264 1867776 ok
32049 1867776 1868288 ok
32050 1868288 1868800 ok
32051 1868800 1869312 ok
32052 1869312 1869824 ok
32053 1869824 1870336 ok
32054 1870336 1870848 ok
32055 1870848 1871360 ok
32056 1871360 1871872 ok
32057 1871872 1872384 ok
32058 1872384 1872896 ok
32059 1872896 1873408 ok
32060 1873408 1873920 ok
32061 1873920 1874432 ok
32062 1874432 1874944 ok
32063 1874944 1875456 ok
32064 1875456 1875968 ok
32065 1875968 1876480 ok
32066 1876480 1876992 ok
32067 1876992 1877504 ok
32068 1877504 1878016 ok
32069 1878016 1878528 ok
32070 1878528 1879040 ok
32071 1879040 1879552 ok
32072 1879552 1880064 ok
32073 1880064 1880576 ok
32074 1880576 1881088 ok
32075 1881088 1881600 ok
32076 1881600 1882112 ok
32077 1882112 1882624 ok
32078 1882624 1883136 ok
32079 1883136 1883648 ok
32080 1883648 1884160 ok
32081 1884160 1884672 ok
32082 1884672 1885184 ok
32083 1885184 1885696 ok
32084 1885696 1886208 ok
32085 1886208 1886720 ok
32086 1886720 1887232 ok
32087 1887232 1887744 ok
32088 1887744 1888256 ok
32089 1888256 1888768 ok
32090 1888768 1889280 ok
32091 1889280 1889792 ok
32092 1889792 1890304 ok
32093 1890304 1890816 ok
32094 1890816 1891328 ok
32095 1891328 1891840 ok
32096 1891840 1892352 ok
32097 1892352 1892864 ok
32098 1892864 1893376 ok
32099 1893376 1893888 ok
32100 1893888 1894400 ok
32101 1894400 1894912 ok
32102 1894912 1895424 ok
32103 1895424 1895936 ok
32104 1895936 1896448 ok
32105 1896448 1896960 ok
32106 1896960 1897472 ok
32107 1897472 1897984 ok
32108 1897984 1898496 ok
32109 1898496 1899008 ok
32110 1899008 1899520 ok
32111 1899520 1900032 ok
32112 1900032 1900544 ok
32113 1900544 1901056 ok
32114 1901056 1901568 ok
32115 1901568 1902080 ok
32116 1902080 1902592 ok
32117 1902592 1903104 ok
32118 1903104 1903616 ok
32119 1903616 1904128 ok
32120 1904128 1904640 ok
32121 1904640 1905152 ok
32122 1905152 1905664 ok
32123 1905664 1906176 ok
32124 1906176 1906688 ok
32125 1906688 1907200 ok
32126 1907200 1907712 ok
32127 1907712 1908224 ok
32128 1908224 1908736 ok
32129 1908736 1909248 ok
32130 1909248 1909760 ok
32131 1909760 1910272 ok
32132 1910272 1910784 ok
32133 1910784 1911296 ok
32134 1911296 1911808 ok
32135 1911808 1912320 ok
32136 1912320 1912832 ok
32137 1912832 1913344 ok
32138 1913344 1913856 ok
32139 1913856 1914368 ok
32140 1914368 1914880 ok
32141 1914880 1915392 ok
32142 1915392 1915904 ok
32143 1915904 1916416 ok
32144 1916416 1916928 ok
32145 1916928 1917440 ok
32146 1917440 1917952 ok
32147 1917952 1918464 ok
32148 1918464 1918976 ok
32149 1918976 1919488 ok
32150 1919488 1920000 ok
32151 1920000 1920512 ok
32152 1920512 1921024 ok
32153 1921024 1921536 ok
32154 1921536 1922048 ok
32155 1922048 1922560 ok
32156 1922560 1923072 ok
32157 1923072 1923584 ok
32158 1923584 1924096 ok
32159 1924096 1924608 ok
32160 1924608 1925120 ok
32161 1925120 1925632 ok
32162 1925632 1926144 ok
32163 1926144 1926656 ok
32164 1926656 1927168 ok
32165 1927168 1927680 ok
32166 1927680 1928192 ok
32167 1928192 1928704 ok
32168 1928704 1929216 ok
32169 1929216 1929728 ok
32170 1929728 1930240 ok
32171 1930240 1930752 ok
32172 1930752 1931264 ok
32173 1931264 1931776 ok
32174 1931776 1932288 ok
32175 1932288 1932800 ok
32176 1932800 1933312 ok
32177 1933312 1933824 ok
32178 1933824 1934336 ok
32179 1934336 1934848 ok
32180 1934848 1935360 ok
32181 1935360 1935872 ok
32182 1935872 1936384 ok
32183 1936384 1936896 ok
32184 1936896 1937408 ok
32185 1937408 1937920 ok
32186 1937920 1938432 ok
32187 1938432 1938944 ok
32188 1938944 1939456 ok
32189 1939456 1939968 ok
32190 1939968 1940480 ok
32191 1940480 1940992 ok
32192 1940992 1941504 ok
32193 1941504 1942016 ok
32194 1942016 1942528 ok
32195 1942528 1943040 ok
32196 1943040 1943552 ok
32197 1943552 1944064 ok
32198 1944064 1944576 ok
32199 1944576 1945088 ok
32200 1945088 1945600 ok
32201 1945600 1946112 ok
32202 1946112 1946624 ok
32203 1946624 1947136 ok
32204 1947136 1947648 ok
32205 1947648 1948160 ok
32206 1948160 1948672 ok
32207 1948672 1949184 ok
32208 1949184 1949696 ok
32209 1949696 1950208 ok
32210 1950208 1950720 ok
32211 1950720 1951232 ok
32212 1951232 1951744 ok
32213 1951744 1952256 ok
32214 1952256 1952768 ok
32215 1952768 1953280 ok
32216 1953280 1953792 ok
32217 1953792 1954304 ok
32218 1954304 1954816 ok
32219 1954816 1955328 ok
32220 1955328 1955840 ok
32221 1955840 1956352 ok
32222 1956352 1956864 ok
32223 1956864 1957376 ok
32224 1957376 1957888 ok
32225 1957888 1958400 ok
32226 1958400 1958912 ok
32227 1958912 1959424 ok
32228 1959424 1959936 ok
32229 1959936 1960448 ok
32230 1960448 1960960 ok
32231 1960960 1961472 ok
32232 1961472 1961984 ok
32233 1961984 1962496 ok
32234 1962496 1963008 ok
32235 1963008 1963520 ok
32236 1963520 1964032 ok
32237 1964032 1964544 ok
32238 1964544 1965056 ok
32239 1965056 1965568 ok
32240 1965568 1966080 ok
32241 Sep 22 23:21:47.660 DEBG IO Write 1023 has deps [JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32242 Sep 22 23:21:47.660 DEBG up_ds_listen was notified
32243 Sep 22 23:21:47.660 DEBG up_ds_listen process 1023
32244 Sep 22 23:21:47.660 DEBG [A] ack job 1023:24, : downstairs
32245 Sep 22 23:21:47.660 DEBG up_ds_listen checked 1 jobs, back to waiting
32246 test test::test_pantry_import_from_url_ovmf ... ok
322472023-09-22T23:21:47.705ZINFOcrucible-pantry (datafile): Scrub at offset 256/3840 sp:256
322482023-09-22T23:21:47.929ZINFOcrucible-pantry (datafile): Scrub at offset 512/3840 sp:512
32249 Sep 22 23:21:47.993 DEBG IO Write 1024 has deps [JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32250 Sep 22 23:21:47.993 DEBG up_ds_listen was notified
32251 Sep 22 23:21:47.993 DEBG up_ds_listen process 1024
32252 Sep 22 23:21:47.993 DEBG [A] ack job 1024:25, : downstairs
32253 Sep 22 23:21:47.993 DEBG up_ds_listen checked 1 jobs, back to waiting
32254 Sep 22 23:21:47.994 DEBG IO Flush 1025 has deps [JobId(1024), JobId(1023), JobId(1022)]
32255 Sep 22 23:21:48.060 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:2
32256 Sep 22 23:21:48.061 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:2
32257 Sep 22 23:21:48.061 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:2
322582023-09-22T23:21:48.151ZINFOcrucible-pantry (datafile): Scrub at offset 768/3840 sp:768
32259 Sep 22 23:21:48.324 DEBG IO Write 1026 has deps [JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32260 Sep 22 23:21:48.324 DEBG up_ds_listen was notified
32261 Sep 22 23:21:48.324 DEBG up_ds_listen process 1026
32262 Sep 22 23:21:48.325 DEBG [A] ack job 1026:27, : downstairs
32263 Sep 22 23:21:48.325 DEBG up_ds_listen checked 1 jobs, back to waiting
322642023-09-22T23:21:48.381ZINFOcrucible-pantry (datafile): Scrub at offset 1024/3840 sp:1024
32265 Sep 22 23:21:48.391 INFO [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b looper connected, looper: 0
32266 Sep 22 23:21:48.391 INFO [0] Proc runs for 127.0.0.1:63038 in state Disconnected
32267 Sep 22 23:21:48.391 INFO [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b looper connected, looper: 1
32268 Sep 22 23:21:48.391 INFO [1] Proc runs for 127.0.0.1:50898 in state Disconnected
32269 Sep 22 23:21:48.391 INFO [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b looper connected, looper: 2
32270 Sep 22 23:21:48.391 INFO [2] Proc runs for 127.0.0.1:62519 in state Disconnected
32271 Sep 22 23:21:48.391 INFO accepted connection from 127.0.0.1:51418, task: main
32272 Sep 22 23:21:48.391 INFO accepted connection from 127.0.0.1:53793, task: main
32273 Sep 22 23:21:48.391 INFO accepted connection from 127.0.0.1:59016, task: main
32274 Sep 22 23:21:48.392 INFO Connection request from 71f26a6c-b7c6-4f58-901b-ae4e36b1710b with version 4, task: proc
32275 Sep 22 23:21:48.392 INFO upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } connected, version 4, task: proc
32276 Sep 22 23:21:48.392 INFO Connection request from 71f26a6c-b7c6-4f58-901b-ae4e36b1710b with version 4, task: proc
32277 Sep 22 23:21:48.392 INFO upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } connected, version 4, task: proc
32278 Sep 22 23:21:48.392 INFO Connection request from 71f26a6c-b7c6-4f58-901b-ae4e36b1710b with version 4, task: proc
32279 Sep 22 23:21:48.392 INFO upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 14046fa9-5850-443d-9708-a7d5d0130e56, gen: 1 } connected, version 4, task: proc
32280 Sep 22 23:21:48.392 INFO [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) Disconnected Disconnected Disconnected ds_transition to WaitActive
32281 Sep 22 23:21:48.392 INFO [0] Transition from Disconnected to WaitActive
32282 Sep 22 23:21:48.392 INFO [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) WaitActive Disconnected Disconnected ds_transition to WaitActive
32283 Sep 22 23:21:48.392 INFO [1] Transition from Disconnected to WaitActive
32284 Sep 22 23:21:48.392 INFO [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (14046fa9-5850-443d-9708-a7d5d0130e56) WaitActive WaitActive Disconnected ds_transition to WaitActive
32285 Sep 22 23:21:48.392 INFO [2] Transition from Disconnected to WaitActive
322862023-09-22T23:21:48.417ZINFOcrucible-pantry (datafile): Checking if live repair is needed
322872023-09-22T23:21:48.417ZINFOcrucible-pantry (datafile): No Live Repair required at this time
322882023-09-22T23:21:48.421ZINFOcrucible-pantry (dropshot): request completed latency_us = 221 local_addr = 127.0.0.1:49824 method = GET remote_addr = 127.0.0.1:33149 req_id = fa0c3c86-5e5c-47eb-8776-4c9243b7e4df response_code = 200 uri = /crucible/pantry/0/job/9890f602-aa95-4bcc-8172-78f624d84944/is_finished
32289 Sep 22 23:21:48.610 DEBG Flush :1006 extent_limit None deps:[JobId(1005), JobId(1004), JobId(1003)] res:true f:2 g:2
32290 Sep 22 23:21:48.610 DEBG Flush :1006 extent_limit None deps:[JobId(1005), JobId(1004), JobId(1003)] res:true f:2 g:2
32291 Sep 22 23:21:48.610 DEBG Flush :1006 extent_limit None deps:[JobId(1005), JobId(1004), JobId(1003)] res:true f:2 g:2
322922023-09-22T23:21:48.611ZINFOcrucible-pantry (datafile): Scrub at offset 1280/3840 sp:1280
32293 Sep 22 23:21:48.720 DEBG IO Write 1027 has deps [JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32294 Sep 22 23:21:48.721 DEBG up_ds_listen was notified
32295 Sep 22 23:21:48.721 DEBG up_ds_listen process 1027
32296 Sep 22 23:21:48.721 DEBG [A] ack job 1027:28, : downstairs
32297 Sep 22 23:21:48.721 DEBG up_ds_listen checked 1 jobs, back to waiting
322982023-09-22T23:21:48.835ZINFOcrucible-pantry (datafile): Scrub at offset 1536/3840 sp:1536
32299 Sep 22 23:21:48.867 DEBG IO Flush 1028 has deps [JobId(1027), JobId(1026), JobId(1025)]
32300 Sep 22 23:21:48.898 DEBG Write :1002 deps:[JobId(1001)] res:true
32301 Sep 22 23:21:48.929 DEBG Write :1002 deps:[JobId(1001)] res:true
32302 Sep 22 23:21:48.931 WARN returning error on write!
32303 Sep 22 23:21:48.931 DEBG Write :1002 deps:[JobId(1001)] res:false
32304 Sep 22 23:21:48.932 WARN returning error on write!
32305 Sep 22 23:21:48.932 DEBG Write :1002 deps:[JobId(1001)] res:false
32306 Sep 22 23:21:48.933 WARN returning error on write!
32307 Sep 22 23:21:48.933 DEBG Write :1002 deps:[JobId(1001)] res:false
32308 Sep 22 23:21:48.963 DEBG Write :1002 deps:[JobId(1001)] res:true
323092023-09-22T23:21:49.058ZINFOcrucible-pantry (datafile): Scrub at offset 1792/3840 sp:1792
32310 Sep 22 23:21:49.078 DEBG Flush :1009 extent_limit None deps:[JobId(1008), JobId(1007)] res:true f:3 g:2
32311 Sep 22 23:21:49.078 DEBG Flush :1009 extent_limit None deps:[JobId(1008), JobId(1007)] res:true f:3 g:2
32312 Sep 22 23:21:49.078 DEBG Flush :1009 extent_limit None deps:[JobId(1008), JobId(1007)] res:true f:3 g:2
323132023-09-22T23:21:49.280ZINFOcrucible-pantry (datafile): Scrub at offset 2048/3840 sp:2048
32314 Sep 22 23:21:49.296 DEBG IO Write 1029 has deps [JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32315 Sep 22 23:21:49.296 DEBG up_ds_listen was notified
32316 Sep 22 23:21:49.296 DEBG up_ds_listen process 1029
32317 Sep 22 23:21:49.296 DEBG [A] ack job 1029:30, : downstairs
32318 Sep 22 23:21:49.296 DEBG up_ds_listen checked 1 jobs, back to waiting
323192023-09-22T23:21:49.478ZINFOcrucible-pantry (dropshot): request completed latency_us = 302 local_addr = 127.0.0.1:49824 method = GET remote_addr = 127.0.0.1:33149 req_id = d63d82ca-abbb-4210-9877-afca8aab02a6 response_code = 200 uri = /crucible/pantry/0/job/9890f602-aa95-4bcc-8172-78f624d84944/is_finished
323202023-09-22T23:21:49.509ZINFOcrucible-pantry (datafile): Scrub at offset 2304/3840 sp:2304
32321 Sep 22 23:21:49.579 DEBG Flush :1012 extent_limit None deps:[JobId(1011), JobId(1010)] res:true f:4 g:2
32322 Sep 22 23:21:49.579 DEBG Flush :1012 extent_limit None deps:[JobId(1011), JobId(1010)] res:true f:4 g:2
32323 Sep 22 23:21:49.579 DEBG Flush :1012 extent_limit None deps:[JobId(1011), JobId(1010)] res:true f:4 g:2
32324 Sep 22 23:21:49.627 DEBG IO Write 1030 has deps [JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32325 Sep 22 23:21:49.627 DEBG up_ds_listen was notified
32326 Sep 22 23:21:49.627 DEBG up_ds_listen process 1030
32327 Sep 22 23:21:49.627 DEBG [A] ack job 1030:31, : downstairs
32328 Sep 22 23:21:49.628 DEBG up_ds_listen checked 1 jobs, back to waiting
32329 Sep 22 23:21:49.628 DEBG IO Flush 1031 has deps [JobId(1030), JobId(1029), JobId(1028)]
323302023-09-22T23:21:49.734ZINFOcrucible-pantry (datafile): Scrub at offset 2560/3840 sp:2560
323312023-09-22T23:21:49.959ZINFOcrucible-pantry (datafile): Scrub at offset 2816/3840 sp:2816
32332 Sep 22 23:21:49.960 DEBG IO Write 1032 has deps [JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32333 Sep 22 23:21:49.960 DEBG up_ds_listen was notified
32334 Sep 22 23:21:49.960 DEBG up_ds_listen process 1032
32335 Sep 22 23:21:49.961 DEBG [A] ack job 1032:33, : downstairs
32336 Sep 22 23:21:49.961 DEBG up_ds_listen checked 1 jobs, back to waiting
32337 Sep 22 23:21:49.961 INFO [lossy] sleeping 1 second
32338 Sep 22 23:21:49.961 INFO [lossy] sleeping 1 second
32339 Sep 22 23:21:50.081 DEBG Flush :1015 extent_limit None deps:[JobId(1014), JobId(1013)] res:true f:5 g:2
32340 Sep 22 23:21:50.081 DEBG Flush :1015 extent_limit None deps:[JobId(1014), JobId(1013)] res:true f:5 g:2
32341 Sep 22 23:21:50.081 DEBG Flush :1015 extent_limit None deps:[JobId(1014), JobId(1013)] res:true f:5 g:2
323422023-09-22T23:21:50.184ZINFOcrucible-pantry (datafile): Scrub at offset 3072/3840 sp:3072
32343 Sep 22 23:21:50.296 DEBG IO Write 1033 has deps [JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32344 Sep 22 23:21:50.296 DEBG up_ds_listen was notified
32345 Sep 22 23:21:50.296 DEBG up_ds_listen process 1033
32346 Sep 22 23:21:50.296 DEBG [A] ack job 1033:34, : downstairs
32347 Sep 22 23:21:50.296 DEBG up_ds_listen checked 1 jobs, back to waiting
32348 Sep 22 23:21:50.297 DEBG IO Flush 1034 has deps [JobId(1033), JobId(1032), JobId(1031)]
323492023-09-22T23:21:50.408ZINFOcrucible-pantry (datafile): Scrub at offset 3328/3840 sp:3328
323502023-09-22T23:21:50.511ZINFOcrucible-pantry (dropshot): request completed latency_us = 226 local_addr = 127.0.0.1:49824 method = GET remote_addr = 127.0.0.1:33149 req_id = c5c4385f-82e6-4f2d-a92a-752103ae423c response_code = 200 uri = /crucible/pantry/0/job/9890f602-aa95-4bcc-8172-78f624d84944/is_finished
32351 Sep 22 23:21:50.630 DEBG IO Write 1035 has deps [JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32352 Sep 22 23:21:50.630 DEBG up_ds_listen was notified
32353 Sep 22 23:21:50.630 DEBG up_ds_listen process 1035
32354 Sep 22 23:21:50.630 DEBG [A] ack job 1035:36, : downstairs
32355 Sep 22 23:21:50.630 DEBG up_ds_listen checked 1 jobs, back to waiting
32356 Sep 22 23:21:50.635 DEBG Flush :1019 extent_limit None deps:[JobId(1018), JobId(1017), JobId(1016)] res:true f:6 g:2
32357 Sep 22 23:21:50.635 DEBG Flush :1019 extent_limit None deps:[JobId(1018), JobId(1017), JobId(1016)] res:true f:6 g:2
32358 Sep 22 23:21:50.635 DEBG Flush :1019 extent_limit None deps:[JobId(1018), JobId(1017), JobId(1016)] res:true f:6 g:2
323592023-09-22T23:21:50.636ZINFOcrucible-pantry (datafile): Scrub at offset 3584/3840 sp:3584
323602023-09-22T23:21:50.861ZINFOcrucible-pantry (datafile): Scrub at offset 3840/3840 sp:3840
323612023-09-22T23:21:50.863ZINFOcrucible-pantry (datafile): Scrub 3dd77b9b-1720-4c07-b5d9-e4b24cceb521 done in 3 seconds. Retries:0 scrub_size:131072 size:3840 pause_milli:0
32362 Sep 22 23:21:50.864 DEBG Flush :1021 extent_limit None deps:[JobId(1020)] res:true f:7 g:2
32363 Sep 22 23:21:50.865 DEBG Flush :1021 extent_limit None deps:[JobId(1020)] res:true f:7 g:2
32364 Sep 22 23:21:50.865 DEBG Flush :1021 extent_limit None deps:[JobId(1020)] res:true f:7 g:2
32365 Sep 22 23:21:50.965 DEBG IO Write 1036 has deps [JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32366 Sep 22 23:21:50.965 DEBG up_ds_listen was notified
32367 Sep 22 23:21:50.965 DEBG up_ds_listen process 1036
32368 Sep 22 23:21:50.965 DEBG [A] ack job 1036:37, : downstairs
32369 Sep 22 23:21:50.965 DEBG up_ds_listen checked 1 jobs, back to waiting
32370 Sep 22 23:21:50.966 DEBG IO Flush 1037 has deps [JobId(1036), JobId(1035), JobId(1034)]
32371 Sep 22 23:21:51.302 DEBG IO Write 1038 has deps [JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32372 Sep 22 23:21:51.303 DEBG up_ds_listen was notified
32373 Sep 22 23:21:51.303 DEBG up_ds_listen process 1038
32374 Sep 22 23:21:51.303 DEBG [A] ack job 1038:39, : downstairs
32375 Sep 22 23:21:51.303 DEBG up_ds_listen checked 1 jobs, back to waiting
323762023-09-22T23:21:51.513ZINFOcrucible-pantry (dropshot): request completed latency_us = 261 local_addr = 127.0.0.1:49824 method = GET remote_addr = 127.0.0.1:33149 req_id = 63665cfe-9e11-49bd-a25a-bf021ba54baa response_code = 200 uri = /crucible/pantry/0/job/9890f602-aa95-4bcc-8172-78f624d84944/is_finished
323772023-09-22T23:21:51.513ZINFOcrucible-pantry (dropshot): request completed latency_us = 176 local_addr = 127.0.0.1:49824 method = GET remote_addr = 127.0.0.1:33149 req_id = fe74781b-b47a-41bc-a63b-2ab540a52781 response_code = 200 uri = /crucible/pantry/0/job/9890f602-aa95-4bcc-8172-78f624d84944/ok
323782023-09-22T23:21:51.514ZINFOcrucible-pantry (datafile): detach removing entry for volume 3dd77b9b-1720-4c07-b5d9-e4b24cceb521
323792023-09-22T23:21:51.514ZINFOcrucible-pantry (datafile): detaching volume 3dd77b9b-1720-4c07-b5d9-e4b24cceb521
32380 Sep 22 23:21:51.515 DEBG Flush :1022 extent_limit None deps:[] res:true f:8 g:2
32381 Sep 22 23:21:51.515 DEBG Flush :1022 extent_limit None deps:[] res:true f:8 g:2
32382 Sep 22 23:21:51.515 DEBG Flush :1022 extent_limit None deps:[] res:true f:8 g:2
323832023-09-22T23:21:51.515ZINFOcrucible-pantry (datafile): Request to deactivate this guest
323842023-09-22T23:21:51.515ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b set deactivating.
323852023-09-22T23:21:51.517ZINFOcrucible-pantry (dropshot): request completed latency_us = 3458 local_addr = 127.0.0.1:49824 method = DELETE remote_addr = 127.0.0.1:33149 req_id = eef1dd5b-b8e1-4310-a7a4-faedfe430dc6 response_code = 204 uri = /crucible/pantry/0/volume/3dd77b9b-1720-4c07-b5d9-e4b24cceb521
323862023-09-22T23:21:51.518ZINFOcrucible-pantry: Upstairs starts
323872023-09-22T23:21:51.518ZINFOcrucible-pantry: Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
323882023-09-22T23:21:51.518ZINFOcrucible-pantry: Upstairs <-> Downstairs Message Version: 4
323892023-09-22T23:21:51.518ZINFOcrucible-pantry: Crucible stats registered with UUID: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b
323902023-09-22T23:21:51.518ZINFOcrucible-pantry: Crucible 71f26a6c-b7c6-4f58-901b-ae4e36b1710b has session id: 3a34d657-eced-4b79-a336-c052da87a51b
323912023-09-22T23:21:51.518ZINFOcrucible-pantry: [0] connecting to 127.0.0.1:63038 looper = 0
323922023-09-22T23:21:51.518ZINFOcrucible-pantry: [1] connecting to 127.0.0.1:50898 looper = 1
323932023-09-22T23:21:51.518ZINFOcrucible-pantry: [2] connecting to 127.0.0.1:62519 looper = 2
323942023-09-22T23:21:51.518ZINFOcrucible-pantry: up_listen starts task = up_listen
323952023-09-22T23:21:51.518ZINFOcrucible-pantry: Wait for all three downstairs to come online
323962023-09-22T23:21:51.518ZINFOcrucible-pantry: Flush timeout: 0.5
323972023-09-22T23:21:51.518ZINFOcrucible-pantry: [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b looper connected looper = 0
323982023-09-22T23:21:51.519ZINFOcrucible-pantry: [0] Proc runs for 127.0.0.1:63038 in state New
323992023-09-22T23:21:51.519ZINFOcrucible-pantry: [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b looper connected looper = 1
324002023-09-22T23:21:51.519ZINFOcrucible-pantry: [1] Proc runs for 127.0.0.1:50898 in state New
324012023-09-22T23:21:51.519ZINFOcrucible-pantry: [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b looper connected looper = 2
324022023-09-22T23:21:51.519ZINFOcrucible-pantry: [2] Proc runs for 127.0.0.1:62519 in state New
32403 Sep 22 23:21:51.519 INFO accepted connection from 127.0.0.1:57723, task: main
32404 Sep 22 23:21:51.519 INFO accepted connection from 127.0.0.1:56683, task: main
32405 Sep 22 23:21:51.519 INFO accepted connection from 127.0.0.1:46008, task: main
32406 Sep 22 23:21:51.519 INFO Connection request from 71f26a6c-b7c6-4f58-901b-ae4e36b1710b with version 4, task: proc
32407 Sep 22 23:21:51.519 INFO upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 } connected, version 4, task: proc
32408 Sep 22 23:21:51.519 INFO Connection request from 71f26a6c-b7c6-4f58-901b-ae4e36b1710b with version 4, task: proc
32409 Sep 22 23:21:51.519 INFO upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 } connected, version 4, task: proc
32410 Sep 22 23:21:51.519 INFO Connection request from 71f26a6c-b7c6-4f58-901b-ae4e36b1710b with version 4, task: proc
32411 Sep 22 23:21:51.519 INFO upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 } connected, version 4, task: proc
324122023-09-22T23:21:51.519ZINFOcrucible-pantry: [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (b703b4b5-d321-4299-acb9-588c806f0017) New New New ds_transition to WaitActive
324132023-09-22T23:21:51.519ZINFOcrucible-pantry: [0] Transition from New to WaitActive
324142023-09-22T23:21:51.519ZINFOcrucible-pantry: [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (b703b4b5-d321-4299-acb9-588c806f0017) WaitActive New New ds_transition to WaitActive
324152023-09-22T23:21:51.519ZINFOcrucible-pantry: [1] Transition from New to WaitActive
324162023-09-22T23:21:51.520ZINFOcrucible-pantry: [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (b703b4b5-d321-4299-acb9-588c806f0017) WaitActive WaitActive New ds_transition to WaitActive
324172023-09-22T23:21:51.520ZINFOcrucible-pantry: [2] Transition from New to WaitActive
32418 The guest has requested activation
324192023-09-22T23:21:51.520ZINFOcrucible-pantry: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b active request set
324202023-09-22T23:21:51.520ZINFOcrucible-pantry: [0] received activate with gen 3
324212023-09-22T23:21:51.520ZINFOcrucible-pantry: [0] client got ds_active_rx, promote! session b703b4b5-d321-4299-acb9-588c806f0017
324222023-09-22T23:21:51.520ZINFOcrucible-pantry: [1] received activate with gen 3
324232023-09-22T23:21:51.520ZINFOcrucible-pantry: [1] client got ds_active_rx, promote! session b703b4b5-d321-4299-acb9-588c806f0017
324242023-09-22T23:21:51.520ZINFOcrucible-pantry: [2] received activate with gen 3
324252023-09-22T23:21:51.520ZINFOcrucible-pantry: [2] client got ds_active_rx, promote! session b703b4b5-d321-4299-acb9-588c806f0017
32426 Sep 22 23:21:51.520 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } to UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 }
32427 Sep 22 23:21:51.520 WARN Signaling to UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } thread that UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 } is being promoted (read-write)
32428 Sep 22 23:21:51.520 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } to UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 }
32429 Sep 22 23:21:51.520 WARN Signaling to UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } thread that UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 } is being promoted (read-write)
32430 Sep 22 23:21:51.520 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } to UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 }
32431 Sep 22 23:21:51.520 WARN Signaling to UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 } thread that UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 } is being promoted (read-write)
32432 Sep 22 23:21:51.520 WARN Another upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 }, task: main
32433 Sep 22 23:21:51.521 INFO UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 } is now active (read-write)
32434 Sep 22 23:21:51.521 WARN Another upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 }, task: main
32435 Sep 22 23:21:51.521 INFO UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 } is now active (read-write)
32436 Sep 22 23:21:51.521 WARN Another upstairs UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: 3d6550fa-8c01-4792-b110-6653df927fd3, gen: 2 }, task: main
32437 Sep 22 23:21:51.521 INFO UpstairsConnection { upstairs_id: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b, session_id: b703b4b5-d321-4299-acb9-588c806f0017, gen: 3 } is now active (read-write)
32438 Sep 22 23:21:51.521 INFO connection (127.0.0.1:50361): all done
32439 Sep 22 23:21:51.521 INFO connection (127.0.0.1:56375): all done
32440 Sep 22 23:21:51.521 INFO connection (127.0.0.1:64643): all done
324412023-09-22T23:21:51.521ZERROcrucible-pantry (datafile): [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (3d6550fa-8c01-4792-b110-6653df927fd3) cmd_loop saw YouAreNoLongerActive 71f26a6c-b7c6-4f58-901b-ae4e36b1710b b703b4b5-d321-4299-acb9-588c806f0017 3
324422023-09-22T23:21:51.521ZINFOcrucible-pantry (datafile): [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (3d6550fa-8c01-4792-b110-6653df927fd3) Active Active Active ds_transition to Disabled
324432023-09-22T23:21:51.521ZINFOcrucible-pantry (datafile): [0] Transition from Active to Disabled
324442023-09-22T23:21:51.521ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b set inactive, session 3d6550fa-8c01-4792-b110-6653df927fd3
324452023-09-22T23:21:51.521ZERROcrucible-pantry (datafile): 127.0.0.1:63038: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 2 looper = 0
324462023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Gone missing, transition from Disabled to Disconnected
324472023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b connection to 127.0.0.1:63038 closed looper = 0
324482023-09-22T23:21:51.522ZERROcrucible-pantry (datafile): [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (3d6550fa-8c01-4792-b110-6653df927fd3) cmd_loop saw YouAreNoLongerActive 71f26a6c-b7c6-4f58-901b-ae4e36b1710b b703b4b5-d321-4299-acb9-588c806f0017 3
324492023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (3d6550fa-8c01-4792-b110-6653df927fd3) Disconnected Active Active ds_transition to Disabled
324502023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [1] Transition from Active to Disabled
324512023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b set inactive, session 3d6550fa-8c01-4792-b110-6653df927fd3
324522023-09-22T23:21:51.522ZERROcrucible-pantry (datafile): 127.0.0.1:50898: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 2 looper = 1
324532023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Gone missing, transition from Disabled to Disconnected
324542023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b connection to 127.0.0.1:50898 closed looper = 1
324552023-09-22T23:21:51.522ZERROcrucible-pantry (datafile): [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (3d6550fa-8c01-4792-b110-6653df927fd3) cmd_loop saw YouAreNoLongerActive 71f26a6c-b7c6-4f58-901b-ae4e36b1710b b703b4b5-d321-4299-acb9-588c806f0017 3
324562023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (3d6550fa-8c01-4792-b110-6653df927fd3) Disconnected Disconnected Active ds_transition to Disabled
324572023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [2] Transition from Active to Disabled
324582023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b set inactive, session 3d6550fa-8c01-4792-b110-6653df927fd3
324592023-09-22T23:21:51.522ZERROcrucible-pantry (datafile): 127.0.0.1:62519: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 2 looper = 2
324602023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Gone missing, transition from Disabled to Disconnected
324612023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b connection to 127.0.0.1:62519 closed looper = 2
324622023-09-22T23:21:51.522ZWARNcrucible-pantry (datafile): [0] pm_task rx.recv() is None
324632023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:63038 task reports connection:false
324642023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Disconnected Disconnected Disconnected
324652023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:63038 task reports offline
324662023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:50898 task reports connection:false
324672023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Disconnected Disconnected Disconnected
324682023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:50898 task reports offline
324692023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:62519 task reports connection:false
324702023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Disconnected Disconnected Disconnected
324712023-09-22T23:21:51.522ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:62519 task reports offline
324722023-09-22T23:21:51.522ZWARNcrucible-pantry (datafile): [1] pm_task rx.recv() is None
324732023-09-22T23:21:51.522ZWARNcrucible-pantry (datafile): [2] pm_task rx.recv() is None
324742023-09-22T23:21:51.522ZINFOcrucible-pantry: [0] downstairs client at 127.0.0.1:63038 has UUID 0c4b6300-aa56-4191-90c9-1d56da8be939
324752023-09-22T23:21:51.522ZINFOcrucible-pantry: [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 0c4b6300-aa56-4191-90c9-1d56da8be939, encrypted: true, database_read_version: 1, database_write_version: 1 }
324762023-09-22T23:21:51.522ZINFOcrucible-pantry: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b WaitActive WaitActive WaitActive
324772023-09-22T23:21:51.522ZINFOcrucible-pantry: [1] downstairs client at 127.0.0.1:50898 has UUID 77625cc5-cf57-4c89-b7d2-c29dca5216c1
324782023-09-22T23:21:51.522ZINFOcrucible-pantry: [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 77625cc5-cf57-4c89-b7d2-c29dca5216c1, encrypted: true, database_read_version: 1, database_write_version: 1 }
324792023-09-22T23:21:51.522ZINFOcrucible-pantry: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b WaitActive WaitActive WaitActive
324802023-09-22T23:21:51.522ZINFOcrucible-pantry: [2] downstairs client at 127.0.0.1:62519 has UUID d89769b3-2748-4a20-ad6b-6b02a9292676
324812023-09-22T23:21:51.522ZINFOcrucible-pantry: [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: d89769b3-2748-4a20-ad6b-6b02a9292676, encrypted: true, database_read_version: 1, database_write_version: 1 }
324822023-09-22T23:21:51.522ZINFOcrucible-pantry: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b WaitActive WaitActive WaitActive
32483 Sep 22 23:21:51.530 INFO Current flush_numbers [0..12]: [1, 2, 3, 4, 5, 6, 6, 7, 0, 0, 0, 0]
32484 Sep 22 23:21:51.531 INFO Current flush_numbers [0..12]: [1, 2, 3, 4, 5, 6, 6, 7, 0, 0, 0, 0]
32485 Sep 22 23:21:51.532 INFO Current flush_numbers [0..12]: [1, 2, 3, 4, 5, 6, 6, 7, 0, 0, 0, 0]
32486 Sep 22 23:21:51.544 INFO Downstairs has completed Negotiation, task: proc
32487 Sep 22 23:21:51.545 INFO Downstairs has completed Negotiation, task: proc
32488 Sep 22 23:21:51.546 INFO Downstairs has completed Negotiation, task: proc
324892023-09-22T23:21:51.546ZINFOcrucible-pantry: [0] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (b703b4b5-d321-4299-acb9-588c806f0017) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
324902023-09-22T23:21:51.546ZINFOcrucible-pantry: [0] Transition from WaitActive to WaitQuorum
324912023-09-22T23:21:51.546ZWARNcrucible-pantry: [0] new RM replaced this: None
324922023-09-22T23:21:51.546ZINFOcrucible-pantry: [0] Starts reconcile loop
324932023-09-22T23:21:51.546ZINFOcrucible-pantry: [1] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (b703b4b5-d321-4299-acb9-588c806f0017) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
324942023-09-22T23:21:51.546ZINFOcrucible-pantry: [1] Transition from WaitActive to WaitQuorum
324952023-09-22T23:21:51.546ZWARNcrucible-pantry: [1] new RM replaced this: None
324962023-09-22T23:21:51.546ZINFOcrucible-pantry: [1] Starts reconcile loop
324972023-09-22T23:21:51.547ZINFOcrucible-pantry: [2] 71f26a6c-b7c6-4f58-901b-ae4e36b1710b (b703b4b5-d321-4299-acb9-588c806f0017) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
324982023-09-22T23:21:51.547ZINFOcrucible-pantry: [2] Transition from WaitActive to WaitQuorum
324992023-09-22T23:21:51.547ZWARNcrucible-pantry: [2] new RM replaced this: None
325002023-09-22T23:21:51.547ZINFOcrucible-pantry: [2] Starts reconcile loop
325012023-09-22T23:21:51.547ZINFOcrucible-pantry: [0] 127.0.0.1:63038 task reports connection:true
325022023-09-22T23:21:51.547ZINFOcrucible-pantry: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b WaitQuorum WaitQuorum WaitQuorum
325032023-09-22T23:21:51.547ZINFOcrucible-pantry: [0]R flush_numbers[0..12]: [1, 2, 3, 4, 5, 6, 6, 7, 0, 0, 0, 0]
325042023-09-22T23:21:51.547ZINFOcrucible-pantry: [0]R generation[0..12]: [2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0]
325052023-09-22T23:21:51.547ZINFOcrucible-pantry: [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
325062023-09-22T23:21:51.547ZINFOcrucible-pantry: [1]R flush_numbers[0..12]: [1, 2, 3, 4, 5, 6, 6, 7, 0, 0, 0, 0]
325072023-09-22T23:21:51.547ZINFOcrucible-pantry: [1]R generation[0..12]: [2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0]
325082023-09-22T23:21:51.547ZINFOcrucible-pantry: [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
325092023-09-22T23:21:51.547ZINFOcrucible-pantry: [2]R flush_numbers[0..12]: [1, 2, 3, 4, 5, 6, 6, 7, 0, 0, 0, 0]
325102023-09-22T23:21:51.547ZINFOcrucible-pantry: [2]R generation[0..12]: [2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0]
325112023-09-22T23:21:51.547ZINFOcrucible-pantry: [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
325122023-09-22T23:21:51.547ZINFOcrucible-pantry: Max found gen is 3
325132023-09-22T23:21:51.547ZINFOcrucible-pantry: Generation requested: 3 >= found:3
325142023-09-22T23:21:51.547ZINFOcrucible-pantry: Next flush: 8
32515 The guest has finished waiting for activation
325162023-09-22T23:21:51.547ZINFOcrucible-pantry: All extents match
325172023-09-22T23:21:51.547ZINFOcrucible-pantry: No downstairs repair required
325182023-09-22T23:21:51.547ZINFOcrucible-pantry: No initial repair work was required
325192023-09-22T23:21:51.547ZINFOcrucible-pantry: Set Downstairs and Upstairs active
325202023-09-22T23:21:51.547ZINFOcrucible-pantry: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b is now active with session: b703b4b5-d321-4299-acb9-588c806f0017
325212023-09-22T23:21:51.547ZINFOcrucible-pantry: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Set Active after no repair
325222023-09-22T23:21:51.547ZINFOcrucible-pantry: Notify all downstairs, region set compare is done.
325232023-09-22T23:21:51.547ZINFOcrucible-pantry: Set check for repair
325242023-09-22T23:21:51.547ZINFOcrucible-pantry: [1] 127.0.0.1:50898 task reports connection:true
325252023-09-22T23:21:51.547ZINFOcrucible-pantry: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Active Active Active
325262023-09-22T23:21:51.547ZINFOcrucible-pantry: Set check for repair
325272023-09-22T23:21:51.547ZINFOcrucible-pantry: [2] 127.0.0.1:62519 task reports connection:true
325282023-09-22T23:21:51.547ZINFOcrucible-pantry: 71f26a6c-b7c6-4f58-901b-ae4e36b1710b Active Active Active
325292023-09-22T23:21:51.547ZINFOcrucible-pantry: Set check for repair
325302023-09-22T23:21:51.547ZINFOcrucible-pantry: [0] received reconcile message
325312023-09-22T23:21:51.547ZINFOcrucible-pantry: [0] All repairs completed, exit
325322023-09-22T23:21:51.547ZINFOcrucible-pantry: [0] Starts cmd_loop
325332023-09-22T23:21:51.547ZINFOcrucible-pantry: [1] received reconcile message
325342023-09-22T23:21:51.547ZINFOcrucible-pantry: [1] All repairs completed, exit
325352023-09-22T23:21:51.547ZINFOcrucible-pantry: [1] Starts cmd_loop
325362023-09-22T23:21:51.547ZINFOcrucible-pantry: [2] received reconcile message
325372023-09-22T23:21:51.547ZINFOcrucible-pantry: [2] All repairs completed, exit
325382023-09-22T23:21:51.547ZINFOcrucible-pantry: [2] Starts cmd_loop
32539 Sep 22 23:21:51.589 DEBG Read :1000 deps:[] res:true
32540 Sep 22 23:21:51.641 DEBG IO Write 1039 has deps [JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32541 Sep 22 23:21:51.641 DEBG up_ds_listen was notified
32542 Sep 22 23:21:51.641 DEBG up_ds_listen process 1039
32543 Sep 22 23:21:51.642 DEBG [A] ack job 1039:40, : downstairs
32544 Sep 22 23:21:51.642 DEBG up_ds_listen checked 1 jobs, back to waiting
32545 Sep 22 23:21:51.643 DEBG IO Flush 1040 has deps [JobId(1039), JobId(1038), JobId(1037)]
32546 Sep 22 23:21:51.647 DEBG Read :1000 deps:[] res:true
32547 Sep 22 23:21:51.704 DEBG Read :1000 deps:[] res:true
32548 Sep 22 23:21:52.038 DEBG IO Write 1041 has deps [JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32549 Sep 22 23:21:52.038 DEBG up_ds_listen was notified
32550 Sep 22 23:21:52.038 DEBG up_ds_listen process 1041
32551 Sep 22 23:21:52.038 DEBG [A] ack job 1041:42, : downstairs
32552 Sep 22 23:21:52.038 DEBG up_ds_listen checked 1 jobs, back to waiting
32553 Sep 22 23:21:52.198 INFO [lossy] sleeping 1 second
32554 Sep 22 23:21:52.198 INFO [lossy] sleeping 1 second
32555 Sep 22 23:21:52.200 WARN returning error on write!
32556 Sep 22 23:21:52.200 DEBG Write :1003 deps:[JobId(1001)] res:false
32557 Sep 22 23:21:52.201 INFO [lossy] skipping 1003
32558 Sep 22 23:21:52.201 INFO [lossy] skipping 1003
32559 Sep 22 23:21:52.230 DEBG Write :1003 deps:[JobId(1001)] res:true
32560 Sep 22 23:21:52.231 INFO [lossy] sleeping 1 second
32561 Sep 22 23:21:52.565 DEBG IO Write 1042 has deps [JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32562 Sep 22 23:21:52.565 DEBG IO Flush 1043 has deps [JobId(1042), JobId(1041), JobId(1040)]
32563 Sep 22 23:21:52.565 DEBG up_ds_listen was notified
32564 Sep 22 23:21:52.565 DEBG up_ds_listen process 1042
32565 Sep 22 23:21:52.565 DEBG [A] ack job 1042:43, : downstairs
32566 Sep 22 23:21:52.565 DEBG up_ds_listen checked 1 jobs, back to waiting
32567 Sep 22 23:21:52.905 DEBG IO Write 1044 has deps [JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32568 Sep 22 23:21:52.905 DEBG up_ds_listen was notified
32569 Sep 22 23:21:52.905 DEBG up_ds_listen process 1044
32570 Sep 22 23:21:52.905 DEBG [A] ack job 1044:45, : downstairs
32571 Sep 22 23:21:52.905 DEBG up_ds_listen checked 1 jobs, back to waiting
32572 Sep 22 23:21:53.246 DEBG IO Write 1045 has deps [JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32573 Sep 22 23:21:53.246 DEBG up_ds_listen was notified
32574 Sep 22 23:21:53.246 DEBG up_ds_listen process 1045
32575 Sep 22 23:21:53.247 DEBG [A] ack job 1045:46, : downstairs
32576 Sep 22 23:21:53.247 DEBG up_ds_listen checked 1 jobs, back to waiting
32577 Sep 22 23:21:53.247 DEBG IO Flush 1046 has deps [JobId(1045), JobId(1044), JobId(1043)]
32578 Sep 22 23:21:53.247 INFO [lossy] skipping 1003
32579 Sep 22 23:21:53.247 INFO [lossy] skipping 1003
32580 Sep 22 23:21:53.279 DEBG Write :1003 deps:[JobId(1001)] res:true
32581 Sep 22 23:21:53.280 INFO [lossy] sleeping 1 second
32582 Sep 22 23:21:53.280 INFO [lossy] skipping 1003
32583 Sep 22 23:21:53.280 INFO [lossy] skipping 1004
32584 Sep 22 23:21:53.280 WARN returning error on write!
32585 Sep 22 23:21:53.280 DEBG Write :1003 deps:[JobId(1001)] res:false
32586 Sep 22 23:21:53.281 INFO [lossy] skipping 1004
32587 Sep 22 23:21:53.310 DEBG Write :1003 deps:[JobId(1001)] res:true
32588 Sep 22 23:21:53.324 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001)] res:true f:2 g:1
32589 Sep 22 23:21:53.324 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001)] res:true f:2 g:1
32590 Sep 22 23:21:53.653 DEBG IO Write 1047 has deps [JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32591 Sep 22 23:21:53.653 DEBG up_ds_listen was notified
32592 Sep 22 23:21:53.653 DEBG up_ds_listen process 1047
32593 Sep 22 23:21:53.653 DEBG [A] ack job 1047:48, : downstairs
32594 Sep 22 23:21:53.653 DEBG up_ds_listen checked 1 jobs, back to waiting
32595 Sep 22 23:21:53.983 DEBG IO Write 1048 has deps [JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32596 Sep 22 23:21:53.983 DEBG up_ds_listen was notified
32597 Sep 22 23:21:53.983 DEBG up_ds_listen process 1048
32598 Sep 22 23:21:53.983 DEBG [A] ack job 1048:49, : downstairs
32599 Sep 22 23:21:53.983 DEBG up_ds_listen checked 1 jobs, back to waiting
32600 Sep 22 23:21:53.984 DEBG IO Flush 1049 has deps [JobId(1048), JobId(1047), JobId(1046)]
32601 test test::test_pantry_scrub ... ok
32602 Sep 22 23:21:54.317 DEBG IO Write 1050 has deps [JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32603 Sep 22 23:21:54.317 DEBG up_ds_listen was notified
32604 Sep 22 23:21:54.317 DEBG up_ds_listen process 1050
32605 Sep 22 23:21:54.317 DEBG [A] ack job 1050:51, : downstairs
32606 Sep 22 23:21:54.317 DEBG up_ds_listen checked 1 jobs, back to waiting
32607 Sep 22 23:21:54.324 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001)] res:true f:2 g:1
32608 Sep 22 23:21:54.654 DEBG IO Write 1051 has deps [JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32609 Sep 22 23:21:54.654 DEBG up_ds_listen was notified
32610 Sep 22 23:21:54.654 DEBG up_ds_listen process 1051
32611 Sep 22 23:21:54.654 DEBG [A] ack job 1051:52, : downstairs
32612 Sep 22 23:21:54.654 DEBG up_ds_listen checked 1 jobs, back to waiting
32613 Sep 22 23:21:54.655 DEBG IO Flush 1052 has deps [JobId(1051), JobId(1050), JobId(1049)]
32614 Sep 22 23:21:54.986 DEBG IO Write 1053 has deps [JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32615 Sep 22 23:21:54.986 DEBG up_ds_listen was notified
32616 Sep 22 23:21:54.986 DEBG up_ds_listen process 1053
32617 Sep 22 23:21:54.986 DEBG [A] ack job 1053:54, : downstairs
32618 Sep 22 23:21:54.986 DEBG up_ds_listen checked 1 jobs, back to waiting
32619 Sep 22 23:21:55.320 DEBG IO Write 1054 has deps [JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32620 Sep 22 23:21:55.320 DEBG up_ds_listen was notified
32621 Sep 22 23:21:55.320 DEBG up_ds_listen process 1054
32622 Sep 22 23:21:55.321 DEBG [A] ack job 1054:55, : downstairs
32623 Sep 22 23:21:55.321 DEBG up_ds_listen checked 1 jobs, back to waiting
32624 Sep 22 23:21:55.321 DEBG IO Flush 1055 has deps [JobId(1054), JobId(1053), JobId(1052)]
32625 Sep 22 23:21:55.717 DEBG IO Write 1056 has deps [JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32626 Sep 22 23:21:55.717 DEBG up_ds_listen was notified
32627 Sep 22 23:21:55.717 DEBG up_ds_listen process 1056
32628 Sep 22 23:21:55.717 DEBG [A] ack job 1056:57, : downstairs
32629 Sep 22 23:21:55.717 DEBG up_ds_listen checked 1 jobs, back to waiting
32630 Sep 22 23:21:55.866 INFO [lossy] sleeping 1 second
32631 Sep 22 23:21:55.866 INFO [lossy] skipping 1005
32632 Sep 22 23:21:55.866 INFO [lossy] skipping 1005
32633 Sep 22 23:21:55.868 WARN returning error on write!
32634 Sep 22 23:21:55.868 DEBG Write :1005 deps:[JobId(1004), JobId(1001)] res:false
32635 Sep 22 23:21:55.869 WARN returning error on write!
32636 Sep 22 23:21:55.869 DEBG Write :1005 deps:[JobId(1004), JobId(1001)] res:false
32637 Sep 22 23:21:55.870 INFO [lossy] skipping 1005
32638 Sep 22 23:21:55.899 DEBG Write :1005 deps:[JobId(1004), JobId(1001)] res:true
32639 Sep 22 23:21:55.929 DEBG Write :1005 deps:[JobId(1004), JobId(1001)] res:true
32640 Sep 22 23:21:56.259 DEBG IO Write 1057 has deps [JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32641 Sep 22 23:21:56.260 DEBG IO Flush 1058 has deps [JobId(1057), JobId(1056), JobId(1055)]
32642 Sep 22 23:21:56.260 DEBG up_ds_listen was notified
32643 Sep 22 23:21:56.260 DEBG up_ds_listen process 1057
32644 Sep 22 23:21:56.260 DEBG [A] ack job 1057:58, : downstairs
32645 Sep 22 23:21:56.260 DEBG up_ds_listen checked 1 jobs, back to waiting
32646 Sep 22 23:21:56.593 DEBG IO Write 1059 has deps [JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32647 Sep 22 23:21:56.593 DEBG up_ds_listen was notified
32648 Sep 22 23:21:56.593 DEBG up_ds_listen process 1059
32649 Sep 22 23:21:56.593 DEBG [A] ack job 1059:60, : downstairs
32650 Sep 22 23:21:56.594 DEBG up_ds_listen checked 1 jobs, back to waiting
32651 Sep 22 23:21:56.932 DEBG IO Write 1060 has deps [JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32652 Sep 22 23:21:56.932 DEBG up_ds_listen was notified
32653 Sep 22 23:21:56.932 DEBG up_ds_listen process 1060
32654 Sep 22 23:21:56.932 DEBG [A] ack job 1060:61, : downstairs
32655 Sep 22 23:21:56.932 DEBG up_ds_listen checked 1 jobs, back to waiting
32656 Sep 22 23:21:56.933 DEBG IO Flush 1061 has deps [JobId(1060), JobId(1059), JobId(1058)]
32657 Sep 22 23:21:56.933 INFO [lossy] skipping 1005
32658 Sep 22 23:21:56.933 INFO [lossy] skipping 1005
32659 Sep 22 23:21:56.964 DEBG Write :1005 deps:[JobId(1004), JobId(1001)] res:true
32660 Sep 22 23:21:57.297 DEBG IO Write 1062 has deps [JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32661 Sep 22 23:21:57.298 DEBG up_ds_listen was notified
32662 Sep 22 23:21:57.298 DEBG up_ds_listen process 1062
32663 Sep 22 23:21:57.298 DEBG [A] ack job 1062:63, : downstairs
32664 Sep 22 23:21:57.298 DEBG up_ds_listen checked 1 jobs, back to waiting
32665 Sep 22 23:21:57.636 DEBG IO Write 1063 has deps [JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32666 Sep 22 23:21:57.636 DEBG up_ds_listen was notified
32667 Sep 22 23:21:57.636 DEBG up_ds_listen process 1063
32668 Sep 22 23:21:57.636 DEBG [A] ack job 1063:64, : downstairs
32669 Sep 22 23:21:57.636 DEBG up_ds_listen checked 1 jobs, back to waiting
32670 Sep 22 23:21:57.637 DEBG IO Flush 1064 has deps [JobId(1063), JobId(1062), JobId(1061)]
32671 Sep 22 23:21:57.974 DEBG IO Write 1065 has deps [JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32672 Sep 22 23:21:57.974 DEBG up_ds_listen was notified
32673 Sep 22 23:21:57.974 DEBG up_ds_listen process 1065
32674 Sep 22 23:21:57.974 DEBG [A] ack job 1065:66, : downstairs
32675 Sep 22 23:21:57.974 DEBG up_ds_listen checked 1 jobs, back to waiting
32676 Sep 22 23:21:58.115 DEBG IO Write 1000 has deps []
32677 Sep 22 23:21:58.115 INFO Checking if live repair is needed
32678 Sep 22 23:21:58.115 INFO No Live Repair required at this time
32679 Sep 22 23:21:58.115 DEBG IO Flush 1001 has deps [JobId(1000)]
32680 Sep 22 23:21:58.115 DEBG up_ds_listen was notified
32681 Sep 22 23:21:58.115 DEBG up_ds_listen process 1000
32682 Sep 22 23:21:58.118 DEBG [A] ack job 1000:1, : downstairs
32683 Sep 22 23:21:58.118 DEBG up_ds_listen checked 1 jobs, back to waiting
32684 Sep 22 23:21:58.314 DEBG IO Write 1066 has deps [JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32685 Sep 22 23:21:58.315 DEBG up_ds_listen was notified
32686 Sep 22 23:21:58.315 DEBG up_ds_listen process 1066
32687 Sep 22 23:21:58.315 DEBG [A] ack job 1066:67, : downstairs
32688 Sep 22 23:21:58.315 DEBG up_ds_listen checked 1 jobs, back to waiting
32689 Sep 22 23:21:58.315 DEBG IO Flush 1067 has deps [JobId(1066), JobId(1065), JobId(1064)]
32690 Sep 22 23:21:58.656 DEBG IO Write 1068 has deps [JobId(1067), JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32691 Sep 22 23:21:58.656 DEBG up_ds_listen was notified
32692 Sep 22 23:21:58.656 DEBG up_ds_listen process 1068
32693 Sep 22 23:21:58.656 DEBG [A] ack job 1068:69, : downstairs
32694 Sep 22 23:21:58.657 DEBG up_ds_listen checked 1 jobs, back to waiting
32695 Sep 22 23:21:59.059 DEBG IO Write 1069 has deps [JobId(1067), JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)]
32696 Sep 22 23:21:59.059 DEBG up_ds_listen was notified
32697 Sep 22 23:21:59.060 DEBG up_ds_listen process 1069
32698 Sep 22 23:21:59.060 DEBG [A] ack job 1069:70, : downstairs
32699 Sep 22 23:21:59.060 DEBG up_ds_listen checked 1 jobs, back to waiting
32700 Sep 22 23:21:59.214 DEBG IO Flush 1070 has deps [JobId(1069), JobId(1068), JobId(1067)]
32701 Sep 22 23:21:59.214 INFO [lossy] skipping 1006
32702 Sep 22 23:21:59.246 DEBG Write :1006 deps:[JobId(1004), JobId(1001)] res:true
32703 Sep 22 23:21:59.247 WARN returning error on flush!
32704 Sep 22 23:21:59.247 DEBG Flush :1007 extent_limit None deps:[JobId(1006), JobId(1005), JobId(1004)] res:false f:3 g:1
32705 Sep 22 23:21:59.278 DEBG Write :1006 deps:[JobId(1004), JobId(1001)] res:true
32706 Sep 22 23:21:59.279 INFO [lossy] skipping 1006
32707 Sep 22 23:21:59.279 INFO [lossy] skipping 1007
32708 Sep 22 23:21:59.279 INFO [lossy] skipping 1006
32709 Sep 22 23:21:59.280 WARN returning error on write!
32710 Sep 22 23:21:59.280 DEBG Write :1006 deps:[JobId(1004), JobId(1001)] res:false
32711 Sep 22 23:21:59.280 WARN returning error on write!
32712 Sep 22 23:21:59.280 DEBG Write :1006 deps:[JobId(1004), JobId(1001)] res:false
32713 Sep 22 23:21:59.281 WARN returning error on write!
32714 Sep 22 23:21:59.281 DEBG Write :1006 deps:[JobId(1004), JobId(1001)] res:false
32715 Sep 22 23:21:59.312 DEBG Write :1006 deps:[JobId(1004), JobId(1001)] res:true
32716 Sep 22 23:21:59.332 DEBG Flush :1007 extent_limit None deps:[JobId(1006), JobId(1005), JobId(1004)] res:true f:3 g:1
32717 Sep 22 23:21:59.332 DEBG Flush :1007 extent_limit None deps:[JobId(1006), JobId(1005), JobId(1004)] res:true f:3 g:1
32718 Sep 22 23:21:59.332 DEBG Flush :1007 extent_limit None deps:[JobId(1006), JobId(1005), JobId(1004)] res:true f:3 g:1
32719 Sep 22 23:21:59.333 DEBG IO Read 1071 has deps [JobId(1070)]
32720 Sep 22 23:21:59.538 INFO [lossy] sleeping 1 second
32721 Sep 22 23:21:59.569 DEBG Write :1008 deps:[JobId(1007), JobId(1004), JobId(1001)] res:true
32722 Sep 22 23:21:59.570 INFO [lossy] sleeping 1 second
32723 Sep 22 23:21:59.814 DEBG Write :1009 deps:[JobId(1007), JobId(1004), JobId(1001)] res:true
32724 Sep 22 23:21:59.822 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007)] res:true f:4 g:1
32725 Sep 22 23:21:59.822 DEBG IO Flush 1072 has deps [JobId(1071), JobId(1070)]
32726 Sep 22 23:22:00.038 INFO [lossy] sleeping 1 second
32727 Sep 22 23:22:00.107 ERRO [0] job id 1000 saw error GenericError("test error")
32728 Sep 22 23:22:00.107 ERRO [0] job id 1000 saw error GenericError("test error")
32729 Sep 22 23:22:00.107 ERRO [0] job id 1007 saw error GenericError("test error")
32730 Sep 22 23:22:00.107 ERRO [1] job id 1003 saw error GenericError("test error")
32731 Sep 22 23:22:00.107 ERRO [1] job id 1005 saw error GenericError("test error")
32732 Sep 22 23:22:00.107 ERRO [1] job id 1005 saw error GenericError("test error")
32733 Sep 22 23:22:00.107 ERRO [2] job id 1000 saw error GenericError("test error")
32734 Sep 22 23:22:00.107 ERRO [2] job id 1000 saw error GenericError("test error")
32735 Sep 22 23:22:00.107 ERRO [2] job id 1002 saw error GenericError("test error")
32736 Sep 22 23:22:00.107 ERRO [2] job id 1002 saw error GenericError("test error")
32737 Sep 22 23:22:00.107 ERRO [2] job id 1002 saw error GenericError("test error")
32738 Sep 22 23:22:00.107 ERRO [2] job id 1003 saw error GenericError("test error")
32739 Sep 22 23:22:00.107 ERRO [2] job id 1006 saw error GenericError("test error")
32740 Sep 22 23:22:00.107 ERRO [2] job id 1006 saw error GenericError("test error")
32741 Sep 22 23:22:00.107 ERRO [2] job id 1006 saw error GenericError("test error")
32742 Sep 22 23:22:00.108 DEBG up_ds_listen was notified
32743 Sep 22 23:22:00.108 DEBG up_ds_listen process 1001
32744 Sep 22 23:22:00.108 DEBG [A] ack job 1001:2, : downstairs
32745 Sep 22 23:22:00.109 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
32746 Sep 22 23:22:00.109 DEBG up_ds_listen process 1004
32747 Sep 22 23:22:00.109 DEBG [A] ack job 1004:5, : downstairs
32748 Sep 22 23:22:00.112 DEBG [rc] retire 1004 clears [JobId(1002), JobId(1003), JobId(1004)], : downstairs
32749 Sep 22 23:22:00.112 DEBG up_ds_listen process 1007
32750 Sep 22 23:22:00.113 DEBG [A] ack job 1007:8, : downstairs
32751 Sep 22 23:22:00.116 DEBG [rc] retire 1007 clears [JobId(1005), JobId(1006), JobId(1007)], : downstairs
32752 Sep 22 23:22:00.116 DEBG up_ds_listen checked 3 jobs, back to waiting
32753 Sep 22 23:22:00.116 DEBG up_ds_listen was notified
32754 Sep 22 23:22:00.116 DEBG up_ds_listen checked 0 jobs, back to waiting
32755 Sep 22 23:22:00.116 DEBG up_ds_listen was notified
32756 Sep 22 23:22:00.116 DEBG up_ds_listen checked 0 jobs, back to waiting
32757 Sep 22 23:22:00.730 DEBG Write :1008 deps:[JobId(1007), JobId(1004), JobId(1001)] res:true
32758 Sep 22 23:22:00.761 DEBG Write :1009 deps:[JobId(1007), JobId(1004), JobId(1001)] res:true
32759 Sep 22 23:22:00.769 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007)] res:true f:4 g:1
32760 Sep 22 23:22:00.770 WARN returning error on write!
32761 Sep 22 23:22:00.770 DEBG Write :1011 deps:[JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32762 Sep 22 23:22:00.801 DEBG Write :1012 deps:[JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32763 Sep 22 23:22:00.803 INFO [lossy] skipping 1014
32764 Sep 22 23:22:00.833 DEBG Write :1011 deps:[JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32765 Sep 22 23:22:00.842 DEBG Flush :1013 extent_limit None deps:[JobId(1012), JobId(1011), JobId(1010)] res:true f:5 g:1
32766 Sep 22 23:22:00.873 DEBG Write :1014 deps:[JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32767 Sep 22 23:22:00.875 INFO [lossy] sleeping 1 second
32768 Sep 22 23:22:00.876 WARN returning error on write!
32769 Sep 22 23:22:00.876 DEBG Write :1008 deps:[JobId(1007), JobId(1004), JobId(1001)] res:false
32770 Sep 22 23:22:00.878 WARN returning error on write!
32771 Sep 22 23:22:00.878 DEBG Write :1009 deps:[JobId(1007), JobId(1004), JobId(1001)] res:false
32772 Sep 22 23:22:00.878 INFO [lossy] skipping 1012
32773 Sep 22 23:22:00.878 INFO [lossy] skipping 1013
32774 Sep 22 23:22:00.908 DEBG Write :1008 deps:[JobId(1007), JobId(1004), JobId(1001)] res:true
32775 Sep 22 23:22:00.941 DEBG Write :1009 deps:[JobId(1007), JobId(1004), JobId(1001)] res:true
32776 Sep 22 23:22:00.949 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007)] res:true f:4 g:1
32777 Sep 22 23:22:00.980 DEBG Write :1011 deps:[JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32778 Sep 22 23:22:00.981 INFO [lossy] skipping 1012
32779 Sep 22 23:22:00.981 WARN 1013 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
32780 Sep 22 23:22:00.981 WARN 1014 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
32781 Sep 22 23:22:00.981 WARN 1015 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
32782 Sep 22 23:22:00.981 INFO [lossy] skipping 1012
32783 Sep 22 23:22:00.981 INFO [lossy] skipping 1012
32784 Sep 22 23:22:00.981 INFO [lossy] skipping 1012
32785 Sep 22 23:22:01.013 DEBG Write :1012 deps:[JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32786 Sep 22 23:22:01.014 INFO [lossy] sleeping 1 second
32787 Sep 22 23:22:01.223 WARN returning error on write!
32788 Sep 22 23:22:01.223 DEBG Write :1011 deps:[JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32789 Sep 22 23:22:01.223 INFO [lossy] skipping 1012
32790 Sep 22 23:22:01.223 INFO [lossy] skipping 1016
32791 Sep 22 23:22:01.224 WARN returning error on write!
32792 Sep 22 23:22:01.224 DEBG Write :1011 deps:[JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32793 Sep 22 23:22:01.256 DEBG Write :1012 deps:[JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32794 Sep 22 23:22:01.258 WARN returning error on write!
32795 Sep 22 23:22:01.258 DEBG Write :1011 deps:[JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32796 Sep 22 23:22:01.288 DEBG Write :1011 deps:[JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32797 Sep 22 23:22:01.296 DEBG Flush :1013 extent_limit None deps:[JobId(1012), JobId(1011), JobId(1010)] res:true f:5 g:1
32798 Sep 22 23:22:01.327 DEBG Write :1014 deps:[JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32799 Sep 22 23:22:01.360 DEBG Write :1015 deps:[JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32800 Sep 22 23:22:01.368 DEBG Flush :1016 extent_limit None deps:[JobId(1015), JobId(1014), JobId(1013)] res:true f:6 g:1
32801 Sep 22 23:22:01.368 INFO [lossy] sleeping 1 second
32802 Sep 22 23:22:01.453 INFO current number of open files limit 65536 is already the maximum
32803 Sep 22 23:22:01.453 INFO Created new region file "/tmp/downstairs-zrMnlo6G/region.json"
32804 Sep 22 23:22:01.760 INFO current number of open files limit 65536 is already the maximum
32805 Sep 22 23:22:01.760 INFO Opened existing region file "/tmp/downstairs-zrMnlo6G/region.json"
32806 Sep 22 23:22:01.760 INFO Database read version 1
32807 Sep 22 23:22:01.760 INFO Database write version 1
32808 Sep 22 23:22:01.812 INFO UUID: c3c61ac0-c12a-42ef-b3a2-a316e079f741
32809 Sep 22 23:22:01.812 INFO Blocks per extent:512 Total Extents: 188
32810 Sep 22 23:22:01.812 INFO Crucible Version: Crucible Version: 0.0.1
32811 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
32812 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
32813 rustc: 1.70.0 stable x86_64-unknown-illumos
32814 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
32815 Sep 22 23:22:01.812 INFO Upstairs <-> Downstairs Message Version: 4, task: main
32816 Sep 22 23:22:01.812 INFO Using address: 127.0.0.1:42129, task: main
32817 Sep 22 23:22:01.813 INFO Repair listens on 127.0.0.1:0, task: repair
32818 Sep 22 23:22:01.813 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:56184, task: repair
32819 Sep 22 23:22:01.813 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:56184, task: repair
32820 Sep 22 23:22:01.813 INFO listening, local_addr: 127.0.0.1:56184, task: repair
32821 Sep 22 23:22:01.813 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:56184, task: repair
32822 Sep 22 23:22:01.813 INFO Using repair address: 127.0.0.1:56184, task: main
32823 Sep 22 23:22:01.813 INFO No SSL acceptor configured, task: main
32824 Sep 22 23:22:01.821 INFO listening on 127.0.0.1:0, task: main
32825 Sep 22 23:22:01.821 WARN 9144e02c-c312-47c4-9b1c-f03618834608 request to replace downstairs 127.0.0.1:52165 with 127.0.0.1:42129
32826 Sep 22 23:22:01.821 INFO 9144e02c-c312-47c4-9b1c-f03618834608 found old target: 127.0.0.1:52165 at 0
32827 Sep 22 23:22:01.821 INFO 9144e02c-c312-47c4-9b1c-f03618834608 replacing old: 127.0.0.1:52165 at 0
32828 Sep 22 23:22:01.821 INFO [0] client skip 2 in process jobs because fault, : downstairs
32829 Sep 22 23:22:01.821 INFO [0] changed 2 jobs to fault skipped, : downstairs
32830 Sep 22 23:22:01.821 INFO [0] 9144e02c-c312-47c4-9b1c-f03618834608 (cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f) Active Active Active ds_transition to Replacing
32831 Sep 22 23:22:01.821 INFO [0] Transition from Active to Replacing
32832 Sep 22 23:22:02.026 DEBG Write :1015 deps:[JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32833 Sep 22 23:22:02.034 DEBG Flush :1016 extent_limit None deps:[JobId(1015), JobId(1014), JobId(1013)] res:true f:6 g:1
32834 Sep 22 23:22:02.035 WARN returning error on write!
32835 Sep 22 23:22:02.035 DEBG Write :1017 deps:[JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32836 Sep 22 23:22:02.035 INFO [lossy] skipping 1018
32837 Sep 22 23:22:02.035 INFO [lossy] skipping 1021
32838 Sep 22 23:22:02.035 INFO [lossy] skipping 1017
32839 Sep 22 23:22:02.065 DEBG Write :1018 deps:[JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32840 Sep 22 23:22:02.066 INFO [lossy] skipping 1021
32841 Sep 22 23:22:02.097 DEBG Write :1017 deps:[JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32842 Sep 22 23:22:02.105 DEBG Flush :1019 extent_limit None deps:[JobId(1018), JobId(1017), JobId(1016)] res:true f:7 g:1
32843 Sep 22 23:22:02.136 DEBG Write :1020 deps:[JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32844 Sep 22 23:22:02.137 INFO [lossy] skipping 1021
32845 Sep 22 23:22:02.137 INFO [lossy] skipping 1022
32846 Sep 22 23:22:02.168 DEBG Write :1021 deps:[JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32847 Sep 22 23:22:02.176 DEBG Flush :1022 extent_limit None deps:[JobId(1021), JobId(1020), JobId(1019)] res:true f:8 g:1
32848 Sep 22 23:22:02.176 INFO [lossy] sleeping 1 second
32849 Sep 22 23:22:02.182 DEBG Flush :1013 extent_limit None deps:[JobId(1012), JobId(1011), JobId(1010)] res:true f:5 g:1
32850 Sep 22 23:22:02.184 WARN returning error on write!
32851 Sep 22 23:22:02.184 DEBG Write :1014 deps:[JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32852 Sep 22 23:22:02.214 DEBG Write :1015 deps:[JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32853 Sep 22 23:22:02.216 WARN 1016 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
32854 Sep 22 23:22:02.216 INFO [lossy] skipping 1017
32855 Sep 22 23:22:02.216 INFO [lossy] skipping 1018
32856 Sep 22 23:22:02.216 INFO [lossy] skipping 1021
32857 Sep 22 23:22:02.216 INFO [lossy] skipping 1014
32858 Sep 22 23:22:02.216 INFO [lossy] skipping 1014
32859 Sep 22 23:22:02.246 DEBG Write :1014 deps:[JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32860 Sep 22 23:22:02.248 INFO [lossy] skipping 1016
32861 Sep 22 23:22:02.254 DEBG Flush :1016 extent_limit None deps:[JobId(1015), JobId(1014), JobId(1013)] res:true f:6 g:1
32862 Sep 22 23:22:02.285 DEBG Write :1017 deps:[JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32863 Sep 22 23:22:02.287 WARN returning error on write!
32864 Sep 22 23:22:02.287 DEBG Write :1018 deps:[JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32865 Sep 22 23:22:02.287 WARN 1019 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
32866 Sep 22 23:22:02.287 INFO [lossy] skipping 1020
32867 Sep 22 23:22:02.287 INFO [lossy] skipping 1021
32868 Sep 22 23:22:02.287 INFO [lossy] skipping 1018
32869 Sep 22 23:22:02.287 INFO [lossy] skipping 1020
32870 Sep 22 23:22:02.287 INFO [lossy] skipping 1021
32871 Sep 22 23:22:02.287 INFO [lossy] skipping 1018
32872 Sep 22 23:22:02.287 INFO [lossy] skipping 1020
32873 Sep 22 23:22:02.287 WARN 1021 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
32874 Sep 22 23:22:02.287 INFO [lossy] skipping 1018
32875 Sep 22 23:22:02.287 WARN 1020 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
32876 Sep 22 23:22:02.317 DEBG Write :1018 deps:[JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32877 Sep 22 23:22:02.325 DEBG Flush :1019 extent_limit None deps:[JobId(1018), JobId(1017), JobId(1016)] res:true f:7 g:1
32878 Sep 22 23:22:02.356 DEBG Write :1020 deps:[JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32879 Sep 22 23:22:02.388 DEBG Write :1021 deps:[JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32880 Sep 22 23:22:02.390 WARN returning error on flush!
32881 Sep 22 23:22:02.390 DEBG Flush :1022 extent_limit None deps:[JobId(1021), JobId(1020), JobId(1019)] res:false f:8 g:1
32882 Sep 22 23:22:02.396 DEBG Flush :1022 extent_limit None deps:[JobId(1021), JobId(1020), JobId(1019)] res:true f:8 g:1
32883 Sep 22 23:22:02.396 INFO [lossy] sleeping 1 second
32884 Sep 22 23:22:02.397 WARN returning error on write!
32885 Sep 22 23:22:02.397 DEBG Write :1017 deps:[JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32886 Sep 22 23:22:02.428 DEBG Write :1018 deps:[JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32887 Sep 22 23:22:02.429 INFO [lossy] skipping 1017
32888 Sep 22 23:22:02.459 DEBG Write :1017 deps:[JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32889 Sep 22 23:22:02.460 INFO [lossy] skipping 1019
32890 Sep 22 23:22:02.460 INFO [lossy] skipping 1021
32891 Sep 22 23:22:02.460 INFO [lossy] skipping 1022
32892 Sep 22 23:22:02.467 DEBG Flush :1019 extent_limit None deps:[JobId(1018), JobId(1017), JobId(1016)] res:true f:7 g:1
32893 Sep 22 23:22:02.467 INFO [lossy] skipping 1021
32894 Sep 22 23:22:02.467 WARN 1022 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
32895 Sep 22 23:22:02.497 DEBG Write :1021 deps:[JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32896 Sep 22 23:22:02.499 INFO [lossy] skipping 1020
32897 Sep 22 23:22:02.499 WARN 1022 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
32898 Sep 22 23:22:02.500 WARN returning error on write!
32899 Sep 22 23:22:02.500 DEBG Write :1020 deps:[JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32900 Sep 22 23:22:02.501 WARN returning error on write!
32901 Sep 22 23:22:02.501 DEBG Write :1020 deps:[JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32902 Sep 22 23:22:02.501 INFO [lossy] skipping 1020
32903 Sep 22 23:22:02.502 WARN returning error on write!
32904 Sep 22 23:22:02.502 DEBG Write :1020 deps:[JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32905 Sep 22 23:22:02.532 DEBG Write :1020 deps:[JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32906 Sep 22 23:22:02.534 WARN returning error on flush!
32907 Sep 22 23:22:02.534 DEBG Flush :1022 extent_limit None deps:[JobId(1021), JobId(1020), JobId(1019)] res:false f:8 g:1
32908 Sep 22 23:22:02.534 INFO [lossy] skipping 1022
32909 Sep 22 23:22:02.534 INFO [lossy] skipping 1022
32910 Sep 22 23:22:02.534 WARN returning error on flush!
32911 Sep 22 23:22:02.534 DEBG Flush :1022 extent_limit None deps:[JobId(1021), JobId(1020), JobId(1019)] res:false f:8 g:1
32912 Sep 22 23:22:02.540 DEBG Flush :1022 extent_limit None deps:[JobId(1021), JobId(1020), JobId(1019)] res:true f:8 g:1
32913 Sep 22 23:22:02.752 INFO [lossy] sleeping 1 second
32914 Sep 22 23:22:03.215 DEBG Write :1023 deps:[JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32915 Sep 22 23:22:03.246 DEBG Write :1024 deps:[JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32916 Sep 22 23:22:03.247 WARN returning error on flush!
32917 Sep 22 23:22:03.247 DEBG Flush :1025 extent_limit None deps:[JobId(1024), JobId(1023), JobId(1022)] res:false f:9 g:1
32918 Sep 22 23:22:03.247 INFO [lossy] skipping 1025
32919 Sep 22 23:22:03.253 DEBG Flush :1025 extent_limit None deps:[JobId(1024), JobId(1023), JobId(1022)] res:true f:9 g:1
32920 Sep 22 23:22:03.283 DEBG Write :1026 deps:[JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32921 Sep 22 23:22:03.284 INFO [lossy] sleeping 1 second
32922 Sep 22 23:22:03.530 DEBG Write :1023 deps:[JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32923 Sep 22 23:22:03.531 INFO [lossy] skipping 1024
32924 Sep 22 23:22:03.561 DEBG Write :1024 deps:[JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32925 Sep 22 23:22:03.569 DEBG Flush :1025 extent_limit None deps:[JobId(1024), JobId(1023), JobId(1022)] res:true f:9 g:1
32926 Sep 22 23:22:03.569 INFO [lossy] skipping 1026
32927 Sep 22 23:22:03.599 DEBG Write :1027 deps:[JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32928 Sep 22 23:22:03.600 WARN 1028 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
32929 Sep 22 23:22:03.600 INFO [lossy] skipping 1026
32930 Sep 22 23:22:03.601 WARN returning error on write!
32931 Sep 22 23:22:03.601 DEBG Write :1026 deps:[JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32932 Sep 22 23:22:03.602 WARN returning error on write!
32933 Sep 22 23:22:03.602 DEBG Write :1026 deps:[JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32934 Sep 22 23:22:03.602 WARN returning error on write!
32935 Sep 22 23:22:03.602 DEBG Write :1026 deps:[JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32936 Sep 22 23:22:03.633 DEBG Write :1026 deps:[JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32937 Sep 22 23:22:03.640 DEBG Flush :1028 extent_limit None deps:[JobId(1027), JobId(1026), JobId(1025)] res:true f:10 g:1
32938 Sep 22 23:22:03.640 INFO [lossy] sleeping 1 second
32939 Sep 22 23:22:03.886 DEBG Write :1023 deps:[JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32940 Sep 22 23:22:03.888 WARN returning error on write!
32941 Sep 22 23:22:03.888 DEBG Write :1024 deps:[JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32942 Sep 22 23:22:03.888 INFO [lossy] skipping 1025
32943 Sep 22 23:22:03.888 INFO [lossy] skipping 1026
32944 Sep 22 23:22:03.918 DEBG Write :1024 deps:[JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32945 Sep 22 23:22:03.926 DEBG Flush :1025 extent_limit None deps:[JobId(1024), JobId(1023), JobId(1022)] res:true f:9 g:1
32946 Sep 22 23:22:03.927 WARN returning error on write!
32947 Sep 22 23:22:03.927 DEBG Write :1026 deps:[JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32948 Sep 22 23:22:03.957 DEBG Write :1026 deps:[JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32949 Sep 22 23:22:03.988 DEBG Write :1027 deps:[JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32950 Sep 22 23:22:03.995 DEBG Flush :1028 extent_limit None deps:[JobId(1027), JobId(1026), JobId(1025)] res:true f:10 g:1
32951 Sep 22 23:22:03.996 WARN returning error on write!
32952 Sep 22 23:22:03.996 DEBG Write :1029 deps:[JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32953 Sep 22 23:22:03.997 INFO [lossy] skipping 1029
32954 Sep 22 23:22:03.997 INFO [lossy] skipping 1029
32955 Sep 22 23:22:04.027 DEBG Write :1029 deps:[JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32956 Sep 22 23:22:04.028 INFO [lossy] sleeping 1 second
32957 Sep 22 23:22:04.456 INFO [lossy] skipping 1027
32958 Sep 22 23:22:04.456 INFO [lossy] skipping 1030
32959 Sep 22 23:22:04.487 DEBG Write :1027 deps:[JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32960 Sep 22 23:22:04.494 DEBG Flush :1028 extent_limit None deps:[JobId(1027), JobId(1026), JobId(1025)] res:true f:10 g:1
32961 Sep 22 23:22:04.494 INFO [lossy] skipping 1029
32962 Sep 22 23:22:04.524 DEBG Write :1030 deps:[JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32963 Sep 22 23:22:04.525 WARN 1031 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
32964 Sep 22 23:22:04.526 WARN returning error on write!
32965 Sep 22 23:22:04.526 DEBG Write :1029 deps:[JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32966 Sep 22 23:22:04.555 DEBG Write :1029 deps:[JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32967 Sep 22 23:22:04.556 INFO [lossy] skipping 1031
32968 Sep 22 23:22:04.562 DEBG Flush :1031 extent_limit None deps:[JobId(1030), JobId(1029), JobId(1028)] res:true f:11 g:1
32969 Sep 22 23:22:04.562 INFO [lossy] sleeping 1 second
32970 Sep 22 23:22:04.806 DEBG Write :1029 deps:[JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32971 Sep 22 23:22:04.837 DEBG Write :1030 deps:[JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32972 Sep 22 23:22:04.844 DEBG Flush :1031 extent_limit None deps:[JobId(1030), JobId(1029), JobId(1028)] res:true f:11 g:1
32973 Sep 22 23:22:04.844 INFO [lossy] skipping 1032
32974 Sep 22 23:22:04.874 DEBG Write :1033 deps:[JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32975 Sep 22 23:22:04.905 DEBG Write :1032 deps:[JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32976 Sep 22 23:22:04.912 DEBG Flush :1034 extent_limit None deps:[JobId(1033), JobId(1032), JobId(1031)] res:true f:12 g:1
32977 Sep 22 23:22:04.912 INFO [lossy] sleeping 1 second
32978 Sep 22 23:22:05.156 DEBG Write :1030 deps:[JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32979 Sep 22 23:22:05.157 WARN returning error on flush!
32980 Sep 22 23:22:05.157 DEBG Flush :1031 extent_limit None deps:[JobId(1030), JobId(1029), JobId(1028)] res:false f:11 g:1
32981 Sep 22 23:22:05.164 DEBG Flush :1031 extent_limit None deps:[JobId(1030), JobId(1029), JobId(1028)] res:true f:11 g:1
32982 Sep 22 23:22:05.164 INFO [lossy] skipping 1032
32983 Sep 22 23:22:05.164 INFO [lossy] skipping 1033
32984 Sep 22 23:22:05.164 WARN 1034 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
32985 Sep 22 23:22:05.164 INFO [lossy] skipping 1035
32986 Sep 22 23:22:05.164 INFO [lossy] skipping 1032
32987 Sep 22 23:22:05.193 DEBG Write :1033 deps:[JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32988 Sep 22 23:22:05.194 WARN 1035 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
32989 Sep 22 23:22:05.194 INFO [lossy] skipping 1032
32990 Sep 22 23:22:05.224 DEBG Write :1032 deps:[JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
32991 Sep 22 23:22:05.225 INFO [lossy] sleeping 1 second
32992 Sep 22 23:22:05.714 WARN returning error on write!
32993 Sep 22 23:22:05.714 DEBG Write :1032 deps:[JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32994 Sep 22 23:22:05.715 INFO [lossy] skipping 1033
32995 Sep 22 23:22:05.715 INFO [lossy] skipping 1037
32996 Sep 22 23:22:05.715 INFO [lossy] skipping 1038
32997 Sep 22 23:22:05.715 WARN returning error on write!
32998 Sep 22 23:22:05.715 DEBG Write :1032 deps:[JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
32999 Sep 22 23:22:05.716 INFO [lossy] skipping 1033
33000 Sep 22 23:22:05.716 INFO [lossy] skipping 1037
33001 Sep 22 23:22:05.745 DEBG Write :1032 deps:[JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33002 Sep 22 23:22:05.746 INFO [lossy] skipping 1033
33003 Sep 22 23:22:05.746 INFO [lossy] skipping 1037
33004 Sep 22 23:22:05.747 WARN returning error on write!
33005 Sep 22 23:22:05.747 DEBG Write :1033 deps:[JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33006 Sep 22 23:22:05.777 DEBG Write :1033 deps:[JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33007 Sep 22 23:22:05.778 INFO [lossy] sleeping 1 second
33008 Sep 22 23:22:06.037 DEBG Write :1035 deps:[JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33009 Sep 22 23:22:06.038 WARN returning error on write!
33010 Sep 22 23:22:06.038 DEBG Write :1036 deps:[JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33011 Sep 22 23:22:06.039 INFO [lossy] skipping 1039
33012 Sep 22 23:22:06.068 DEBG Write :1036 deps:[JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33013 Sep 22 23:22:06.069 INFO [lossy] skipping 1039
33014 Sep 22 23:22:06.069 INFO [lossy] skipping 1039
33015 Sep 22 23:22:06.069 INFO [lossy] sleeping 1 second
33016 Sep 22 23:22:06.297 DEBG Flush :1034 extent_limit None deps:[JobId(1033), JobId(1032), JobId(1031)] res:true f:12 g:1
33017 Sep 22 23:22:06.328 DEBG Write :1035 deps:[JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33018 Sep 22 23:22:06.358 DEBG Write :1036 deps:[JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33019 Sep 22 23:22:06.365 DEBG Flush :1037 extent_limit None deps:[JobId(1036), JobId(1035), JobId(1034)] res:true f:13 g:1
33020 Sep 22 23:22:06.395 DEBG Write :1038 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33021 Sep 22 23:22:06.425 DEBG Write :1039 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33022 Sep 22 23:22:06.426 INFO [lossy] skipping 1040
33023 Sep 22 23:22:06.432 DEBG Flush :1040 extent_limit None deps:[JobId(1039), JobId(1038), JobId(1037)] res:true f:14 g:1
33024 Sep 22 23:22:06.433 WARN returning error on write!
33025 Sep 22 23:22:06.433 DEBG Write :1041 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33026 Sep 22 23:22:06.434 WARN returning error on write!
33027 Sep 22 23:22:06.434 DEBG Write :1041 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33028 Sep 22 23:22:06.464 DEBG Write :1041 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33029 Sep 22 23:22:06.465 INFO [lossy] sleeping 1 second
33030 Sep 22 23:22:06.906 INFO [lossy] skipping 1034
33031 Sep 22 23:22:06.906 INFO [lossy] skipping 1036
33032 Sep 22 23:22:06.906 INFO [lossy] skipping 1039
33033 Sep 22 23:22:06.906 INFO [lossy] skipping 1040
33034 Sep 22 23:22:06.906 INFO [lossy] skipping 1043
33035 Sep 22 23:22:06.912 DEBG Flush :1034 extent_limit None deps:[JobId(1033), JobId(1032), JobId(1031)] res:true f:12 g:1
33036 Sep 22 23:22:06.945 DEBG Write :1036 deps:[JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33037 Sep 22 23:22:06.946 INFO [lossy] skipping 1043
33038 Sep 22 23:22:06.976 DEBG Write :1035 deps:[JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33039 Sep 22 23:22:06.977 WARN returning error on flush!
33040 Sep 22 23:22:06.977 DEBG Flush :1037 extent_limit None deps:[JobId(1036), JobId(1035), JobId(1034)] res:false f:13 g:1
33041 Sep 22 23:22:06.977 INFO [lossy] skipping 1038
33042 Sep 22 23:22:06.977 WARN 1041 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
33043 Sep 22 23:22:06.977 WARN 1042 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
33044 Sep 22 23:22:06.977 INFO [lossy] skipping 1043
33045 Sep 22 23:22:06.977 WARN 1044 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
33046 Sep 22 23:22:06.983 DEBG Flush :1037 extent_limit None deps:[JobId(1036), JobId(1035), JobId(1034)] res:true f:13 g:1
33047 Sep 22 23:22:07.013 DEBG Write :1038 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33048 Sep 22 23:22:07.044 DEBG Write :1039 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33049 Sep 22 23:22:07.051 DEBG Flush :1040 extent_limit None deps:[JobId(1039), JobId(1038), JobId(1037)] res:true f:14 g:1
33050 Sep 22 23:22:07.052 WARN returning error on write!
33051 Sep 22 23:22:07.052 DEBG Write :1041 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33052 Sep 22 23:22:07.054 WARN returning error on write!
33053 Sep 22 23:22:07.054 DEBG Write :1042 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33054 Sep 22 23:22:07.055 WARN 1043 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
33055 Sep 22 23:22:07.055 WARN 1044 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33056 Sep 22 23:22:07.084 DEBG Write :1041 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33057 Sep 22 23:22:07.086 WARN returning error on write!
33058 Sep 22 23:22:07.086 DEBG Write :1042 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33059 Sep 22 23:22:07.117 DEBG Write :1042 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33060 Sep 22 23:22:07.118 WARN returning error on flush!
33061 Sep 22 23:22:07.118 DEBG Flush :1043 extent_limit None deps:[JobId(1042), JobId(1041), JobId(1040)] res:false f:15 g:1
33062 Sep 22 23:22:07.124 DEBG Flush :1043 extent_limit None deps:[JobId(1042), JobId(1041), JobId(1040)] res:true f:15 g:1
33063 Sep 22 23:22:07.155 DEBG Write :1044 deps:[JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33064 Sep 22 23:22:07.156 INFO [lossy] sleeping 1 second
33065 Sep 22 23:22:07.156 WARN returning error on flush!
33066 Sep 22 23:22:07.156 DEBG Flush :1037 extent_limit None deps:[JobId(1036), JobId(1035), JobId(1034)] res:false f:13 g:1
33067 Sep 22 23:22:07.157 INFO [lossy] skipping 1041
33068 Sep 22 23:22:07.157 INFO [lossy] skipping 1042
33069 Sep 22 23:22:07.157 INFO [lossy] skipping 1043
33070 Sep 22 23:22:07.163 DEBG Flush :1037 extent_limit None deps:[JobId(1036), JobId(1035), JobId(1034)] res:true f:13 g:1
33071 Sep 22 23:22:07.194 DEBG Write :1038 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33072 Sep 22 23:22:07.226 DEBG Write :1039 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33073 Sep 22 23:22:07.227 WARN returning error on flush!
33074 Sep 22 23:22:07.227 DEBG Flush :1040 extent_limit None deps:[JobId(1039), JobId(1038), JobId(1037)] res:false f:14 g:1
33075 Sep 22 23:22:07.227 INFO [lossy] skipping 1043
33076 Sep 22 23:22:07.227 INFO [lossy] skipping 1044
33077 Sep 22 23:22:07.234 DEBG Flush :1040 extent_limit None deps:[JobId(1039), JobId(1038), JobId(1037)] res:true f:14 g:1
33078 Sep 22 23:22:07.234 INFO [lossy] skipping 1043
33079 Sep 22 23:22:07.234 INFO [lossy] skipping 1044
33080 Sep 22 23:22:07.234 INFO [lossy] skipping 1043
33081 Sep 22 23:22:07.234 WARN 1044 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33082 Sep 22 23:22:07.234 WARN 1043 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
33083 Sep 22 23:22:07.265 DEBG Write :1041 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33084 Sep 22 23:22:07.267 WARN returning error on write!
33085 Sep 22 23:22:07.267 DEBG Write :1042 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33086 Sep 22 23:22:07.267 WARN 1043 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33087 Sep 22 23:22:07.267 INFO [lossy] skipping 1042
33088 Sep 22 23:22:07.268 WARN returning error on write!
33089 Sep 22 23:22:07.268 DEBG Write :1042 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33090 Sep 22 23:22:07.269 WARN returning error on write!
33091 Sep 22 23:22:07.269 DEBG Write :1042 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33092 Sep 22 23:22:07.269 INFO [lossy] skipping 1042
33093 Sep 22 23:22:07.300 DEBG Write :1042 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33094 Sep 22 23:22:07.308 DEBG Flush :1043 extent_limit None deps:[JobId(1042), JobId(1041), JobId(1040)] res:true f:15 g:1
33095 Sep 22 23:22:07.309 WARN returning error on write!
33096 Sep 22 23:22:07.309 DEBG Write :1044 deps:[JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33097 Sep 22 23:22:07.340 DEBG Write :1044 deps:[JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33098 Sep 22 23:22:07.341 INFO [lossy] sleeping 1 second
33099 Sep 22 23:22:07.577 INFO [lossy] skipping 1042
33100 Sep 22 23:22:07.577 INFO [lossy] skipping 1044
33101 Sep 22 23:22:07.578 WARN returning error on write!
33102 Sep 22 23:22:07.578 DEBG Write :1042 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33103 Sep 22 23:22:07.608 DEBG Write :1042 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33104 Sep 22 23:22:07.609 INFO [lossy] sleeping 1 second
33105 Sep 22 23:22:08.347 WARN returning error on write!
33106 Sep 22 23:22:08.347 DEBG Write :1045 deps:[JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33107 Sep 22 23:22:08.377 DEBG Write :1045 deps:[JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33108 Sep 22 23:22:08.385 DEBG Flush :1046 extent_limit None deps:[JobId(1045), JobId(1044), JobId(1043)] res:true f:16 g:1
33109 Sep 22 23:22:08.386 WARN returning error on write!
33110 Sep 22 23:22:08.386 DEBG Write :1047 deps:[JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33111 Sep 22 23:22:08.386 INFO [lossy] skipping 1048
33112 Sep 22 23:22:08.386 WARN 1049 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
33113 Sep 22 23:22:08.417 DEBG Write :1047 deps:[JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33114 Sep 22 23:22:08.449 DEBG Write :1048 deps:[JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33115 Sep 22 23:22:08.450 INFO [lossy] skipping 1049
33116 Sep 22 23:22:08.456 DEBG Flush :1049 extent_limit None deps:[JobId(1048), JobId(1047), JobId(1046)] res:true f:17 g:1
33117 Sep 22 23:22:08.457 INFO [lossy] sleeping 1 second
33118 Sep 22 23:22:08.458 WARN returning error on write!
33119 Sep 22 23:22:08.458 DEBG Write :1045 deps:[JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33120 Sep 22 23:22:08.458 INFO [lossy] skipping 1048
33121 Sep 22 23:22:08.488 DEBG Write :1045 deps:[JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33122 Sep 22 23:22:08.489 INFO [lossy] sleeping 1 second
33123 Sep 22 23:22:08.726 DEBG Flush :1043 extent_limit None deps:[JobId(1042), JobId(1041), JobId(1040)] res:true f:15 g:1
33124 Sep 22 23:22:08.764 DEBG Write :1044 deps:[JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33125 Sep 22 23:22:08.796 DEBG Write :1045 deps:[JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33126 Sep 22 23:22:08.797 INFO [lossy] skipping 1046
33127 Sep 22 23:22:08.797 INFO [lossy] skipping 1047
33128 Sep 22 23:22:08.797 INFO [lossy] skipping 1049
33129 Sep 22 23:22:08.797 INFO [lossy] skipping 1052
33130 Sep 22 23:22:08.797 INFO [lossy] skipping 1046
33131 Sep 22 23:22:08.797 INFO [lossy] skipping 1049
33132 Sep 22 23:22:08.797 INFO [lossy] skipping 1052
33133 Sep 22 23:22:08.797 WARN returning error on flush!
33134 Sep 22 23:22:08.797 DEBG Flush :1046 extent_limit None deps:[JobId(1045), JobId(1044), JobId(1043)] res:false f:16 g:1
33135 Sep 22 23:22:08.797 INFO [lossy] skipping 1049
33136 Sep 22 23:22:08.797 INFO [lossy] skipping 1052
33137 Sep 22 23:22:08.803 DEBG Flush :1046 extent_limit None deps:[JobId(1045), JobId(1044), JobId(1043)] res:true f:16 g:1
33138 Sep 22 23:22:08.803 INFO [lossy] skipping 1049
33139 Sep 22 23:22:08.803 INFO [lossy] skipping 1049
33140 Sep 22 23:22:08.803 INFO [lossy] skipping 1049
33141 Sep 22 23:22:08.804 WARN returning error on write!
33142 Sep 22 23:22:08.804 DEBG Write :1047 deps:[JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33143 Sep 22 23:22:08.804 INFO [lossy] skipping 1048
33144 Sep 22 23:22:08.804 WARN 1050 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33145 Sep 22 23:22:08.804 INFO [lossy] skipping 1051
33146 Sep 22 23:22:08.805 WARN returning error on write!
33147 Sep 22 23:22:08.805 DEBG Write :1047 deps:[JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33148 Sep 22 23:22:08.836 DEBG Write :1048 deps:[JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33149 Sep 22 23:22:08.837 WARN 1051 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33150 Sep 22 23:22:08.837 INFO [lossy] skipping 1047
33151 Sep 22 23:22:08.867 DEBG Write :1047 deps:[JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33152 Sep 22 23:22:08.869 INFO [lossy] sleeping 1 second
33153 Sep 22 23:22:09.530 WARN returning error on write!
33154 Sep 22 23:22:09.530 DEBG Write :1050 deps:[JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33155 Sep 22 23:22:09.562 DEBG Write :1051 deps:[JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33156 Sep 22 23:22:09.563 INFO [lossy] skipping 1052
33157 Sep 22 23:22:09.563 INFO [lossy] skipping 1055
33158 Sep 22 23:22:09.563 INFO [lossy] skipping 1056
33159 Sep 22 23:22:09.563 INFO [lossy] skipping 1050
33160 Sep 22 23:22:09.563 INFO [lossy] skipping 1056
33161 Sep 22 23:22:09.594 DEBG Write :1050 deps:[JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33162 Sep 22 23:22:09.595 INFO [lossy] skipping 1056
33163 Sep 22 23:22:09.595 WARN returning error on flush!
33164 Sep 22 23:22:09.595 DEBG Flush :1052 extent_limit None deps:[JobId(1051), JobId(1050), JobId(1049)] res:false f:18 g:1
33165 Sep 22 23:22:09.595 INFO [lossy] skipping 1054
33166 Sep 22 23:22:09.595 INFO [lossy] skipping 1046
33167 Sep 22 23:22:09.595 INFO [lossy] skipping 1049
33168 Sep 22 23:22:09.595 INFO [lossy] skipping 1054
33169 Sep 22 23:22:09.595 INFO [lossy] skipping 1056
33170 Sep 22 23:22:09.608 DEBG Flush :1052 extent_limit None deps:[JobId(1051), JobId(1050), JobId(1049)] res:true f:18 g:1
33171 Sep 22 23:22:09.608 INFO [lossy] skipping 1054
33172 Sep 22 23:22:09.639 DEBG Write :1054 deps:[JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33173 Sep 22 23:22:09.670 DEBG Write :1053 deps:[JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33174 Sep 22 23:22:09.672 DEBG Flush :1046 extent_limit None deps:[JobId(1045), JobId(1044), JobId(1043)] res:true f:16 g:1
33175 Sep 22 23:22:09.672 INFO [lossy] skipping 1049
33176 Sep 22 23:22:09.672 WARN 1049 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
33177 Sep 22 23:22:09.703 DEBG Write :1047 deps:[JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33178 Sep 22 23:22:09.735 DEBG Write :1048 deps:[JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33179 Sep 22 23:22:09.749 DEBG Flush :1055 extent_limit None deps:[JobId(1054), JobId(1053), JobId(1052)] res:true f:19 g:1
33180 Sep 22 23:22:09.780 DEBG Write :1056 deps:[JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33181 Sep 22 23:22:09.781 INFO [lossy] sleeping 1 second
33182 Sep 22 23:22:09.781 DEBG Flush :1049 extent_limit None deps:[JobId(1048), JobId(1047), JobId(1046)] res:true f:17 g:1
33183 Sep 22 23:22:09.813 DEBG Write :1050 deps:[JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33184 Sep 22 23:22:09.814 INFO [lossy] skipping 1051
33185 Sep 22 23:22:09.814 WARN 1052 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33186 Sep 22 23:22:09.814 WARN 1053 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33187 Sep 22 23:22:09.814 WARN 1054 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33188 Sep 22 23:22:09.814 WARN 1056 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
33189 Sep 22 23:22:09.846 DEBG Write :1051 deps:[JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33190 Sep 22 23:22:09.847 INFO [lossy] skipping 1052
33191 Sep 22 23:22:09.847 WARN returning error on flush!
33192 Sep 22 23:22:09.847 DEBG Flush :1052 extent_limit None deps:[JobId(1051), JobId(1050), JobId(1049)] res:false f:18 g:1
33193 Sep 22 23:22:09.847 INFO [lossy] skipping 1052
33194 Sep 22 23:22:09.854 DEBG Flush :1052 extent_limit None deps:[JobId(1051), JobId(1050), JobId(1049)] res:true f:18 g:1
33195 Sep 22 23:22:09.854 INFO [lossy] skipping 1053
33196 Sep 22 23:22:09.854 INFO [lossy] skipping 1054
33197 Sep 22 23:22:09.854 WARN 1055 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
33198 Sep 22 23:22:09.854 WARN 1056 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33199 Sep 22 23:22:09.855 WARN returning error on write!
33200 Sep 22 23:22:09.855 DEBG Write :1053 deps:[JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33201 Sep 22 23:22:09.887 DEBG Write :1054 deps:[JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33202 Sep 22 23:22:09.888 INFO [lossy] skipping 1053
33203 Sep 22 23:22:09.889 WARN returning error on write!
33204 Sep 22 23:22:09.889 DEBG Write :1053 deps:[JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33205 Sep 22 23:22:09.890 WARN returning error on write!
33206 Sep 22 23:22:09.890 DEBG Write :1053 deps:[JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33207 Sep 22 23:22:09.890 INFO [lossy] skipping 1053
33208 Sep 22 23:22:09.921 DEBG Write :1053 deps:[JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33209 Sep 22 23:22:09.922 INFO [lossy] skipping 1055
33210 Sep 22 23:22:09.928 DEBG Flush :1055 extent_limit None deps:[JobId(1054), JobId(1053), JobId(1052)] res:true f:19 g:1
33211 Sep 22 23:22:09.929 INFO [lossy] sleeping 1 second
33212 Sep 22 23:22:09.935 DEBG Flush :1049 extent_limit None deps:[JobId(1048), JobId(1047), JobId(1046)] res:true f:17 g:1
33213 Sep 22 23:22:09.937 WARN returning error on write!
33214 Sep 22 23:22:09.937 DEBG Write :1050 deps:[JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33215 Sep 22 23:22:09.938 WARN returning error on write!
33216 Sep 22 23:22:09.938 DEBG Write :1051 deps:[JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33217 Sep 22 23:22:09.938 WARN 1052 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
33218 Sep 22 23:22:09.938 INFO [lossy] skipping 1054
33219 Sep 22 23:22:09.969 DEBG Write :1050 deps:[JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33220 Sep 22 23:22:10.001 DEBG Write :1051 deps:[JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33221 Sep 22 23:22:10.002 INFO [lossy] skipping 1054
33222 Sep 22 23:22:10.009 DEBG Flush :1052 extent_limit None deps:[JobId(1051), JobId(1050), JobId(1049)] res:true f:18 g:1
33223 Sep 22 23:22:10.040 DEBG Write :1053 deps:[JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33224 Sep 22 23:22:10.072 DEBG Write :1054 deps:[JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33225 Sep 22 23:22:10.073 INFO [lossy] skipping 1055
33226 Sep 22 23:22:10.073 INFO [lossy] skipping 1056
33227 Sep 22 23:22:10.080 DEBG Flush :1055 extent_limit None deps:[JobId(1054), JobId(1053), JobId(1052)] res:true f:19 g:1
33228 Sep 22 23:22:10.081 WARN returning error on write!
33229 Sep 22 23:22:10.081 DEBG Write :1056 deps:[JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33230 Sep 22 23:22:10.081 INFO [lossy] skipping 1056
33231 Sep 22 23:22:10.111 DEBG Write :1056 deps:[JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33232 Sep 22 23:22:10.113 INFO [lossy] sleeping 1 second
33233 Sep 22 23:22:10.949 INFO [lossy] skipping 1057
33234 Sep 22 23:22:10.981 DEBG Write :1057 deps:[JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33235 Sep 22 23:22:10.982 INFO [lossy] sleeping 1 second
33236 Sep 22 23:22:11.013 DEBG Write :1056 deps:[JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33237 Sep 22 23:22:11.045 DEBG Write :1057 deps:[JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33238 Sep 22 23:22:11.046 WARN returning error on flush!
33239 Sep 22 23:22:11.046 DEBG Flush :1058 extent_limit None deps:[JobId(1057), JobId(1056), JobId(1055)] res:false f:20 g:1
33240 Sep 22 23:22:11.046 INFO [lossy] skipping 1059
33241 Sep 22 23:22:11.046 INFO [lossy] skipping 1060
33242 Sep 22 23:22:11.046 WARN returning error on flush!
33243 Sep 22 23:22:11.046 DEBG Flush :1058 extent_limit None deps:[JobId(1057), JobId(1056), JobId(1055)] res:false f:20 g:1
33244 Sep 22 23:22:11.052 DEBG Flush :1058 extent_limit None deps:[JobId(1057), JobId(1056), JobId(1055)] res:true f:20 g:1
33245 Sep 22 23:22:11.053 WARN returning error on write!
33246 Sep 22 23:22:11.053 DEBG Write :1059 deps:[JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33247 Sep 22 23:22:11.084 DEBG Write :1060 deps:[JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33248 Sep 22 23:22:11.085 INFO [lossy] skipping 1061
33249 Sep 22 23:22:11.085 WARN 1062 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33250 Sep 22 23:22:11.115 DEBG Write :1059 deps:[JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33251 Sep 22 23:22:11.123 DEBG Flush :1061 extent_limit None deps:[JobId(1060), JobId(1059), JobId(1058)] res:true f:21 g:1
33252 Sep 22 23:22:11.123 INFO [lossy] skipping 1062
33253 Sep 22 23:22:11.123 INFO [lossy] skipping 1062
33254 Sep 22 23:22:11.153 DEBG Write :1062 deps:[JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33255 Sep 22 23:22:11.154 INFO [lossy] sleeping 1 second
33256 Sep 22 23:22:11.155 WARN returning error on write!
33257 Sep 22 23:22:11.155 DEBG Write :1057 deps:[JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33258 Sep 22 23:22:11.156 INFO [lossy] skipping 1060
33259 Sep 22 23:22:11.186 DEBG Write :1057 deps:[JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33260 Sep 22 23:22:11.187 INFO [lossy] skipping 1060
33261 Sep 22 23:22:11.187 INFO [lossy] sleeping 1 second
33262 Sep 22 23:22:12.032 DEBG Flush :1058 extent_limit None deps:[JobId(1057), JobId(1056), JobId(1055)] res:true f:20 g:1
33263 Sep 22 23:22:12.063 DEBG Write :1059 deps:[JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33264 Sep 22 23:22:12.094 DEBG Write :1060 deps:[JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33265 Sep 22 23:22:12.101 DEBG Flush :1061 extent_limit None deps:[JobId(1060), JobId(1059), JobId(1058)] res:true f:21 g:1
33266 Sep 22 23:22:12.102 WARN returning error on write!
33267 Sep 22 23:22:12.102 DEBG Write :1062 deps:[JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33268 Sep 22 23:22:12.103 WARN returning error on write!
33269 Sep 22 23:22:12.103 DEBG Write :1063 deps:[JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33270 Sep 22 23:22:12.133 DEBG Write :1062 deps:[JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33271 Sep 22 23:22:12.164 DEBG Write :1063 deps:[JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33272 Sep 22 23:22:12.172 DEBG Flush :1064 extent_limit None deps:[JobId(1063), JobId(1062), JobId(1061)] res:true f:22 g:1
33273 Sep 22 23:22:12.172 INFO [lossy] skipping 1065
33274 Sep 22 23:22:12.172 INFO [lossy] skipping 1066
33275 Sep 22 23:22:12.172 WARN 1067 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
33276 Sep 22 23:22:12.172 WARN 1068 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33277 Sep 22 23:22:12.202 DEBG Write :1065 deps:[JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33278 Sep 22 23:22:12.234 DEBG Write :1066 deps:[JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33279 Sep 22 23:22:12.242 DEBG Flush :1067 extent_limit None deps:[JobId(1066), JobId(1065), JobId(1064)] res:true f:23 g:1
33280 Sep 22 23:22:12.273 DEBG Write :1068 deps:[JobId(1067), JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33281 Sep 22 23:22:12.274 INFO [lossy] sleeping 1 second
33282 Sep 22 23:22:12.305 DEBG Write :1063 deps:[JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33283 Sep 22 23:22:12.307 INFO [lossy] skipping 1064
33284 Sep 22 23:22:12.307 INFO [lossy] skipping 1066
33285 Sep 22 23:22:12.307 INFO [lossy] skipping 1064
33286 Sep 22 23:22:12.319 DEBG Flush :1064 extent_limit None deps:[JobId(1063), JobId(1062), JobId(1061)] res:true f:22 g:1
33287 Sep 22 23:22:12.320 WARN returning error on write!
33288 Sep 22 23:22:12.320 DEBG Write :1065 deps:[JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33289 Sep 22 23:22:12.351 DEBG Write :1066 deps:[JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33290 Sep 22 23:22:12.352 INFO [lossy] skipping 1067
33291 Sep 22 23:22:12.352 WARN 1068 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33292 Sep 22 23:22:12.353 WARN returning error on write!
33293 Sep 22 23:22:12.353 DEBG Write :1065 deps:[JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33294 Sep 22 23:22:12.353 WARN 1067 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33295 Sep 22 23:22:12.353 INFO [lossy] skipping 1065
33296 Sep 22 23:22:12.383 DEBG Write :1065 deps:[JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33297 Sep 22 23:22:12.385 DEBG Flush :1058 extent_limit None deps:[JobId(1057), JobId(1056), JobId(1055)] res:true f:20 g:1
33298 Sep 22 23:22:12.416 DEBG Write :1059 deps:[JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33299 Sep 22 23:22:12.448 DEBG Write :1060 deps:[JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33300 Sep 22 23:22:12.449 INFO [lossy] skipping 1061
33301 Sep 22 23:22:12.449 INFO [lossy] skipping 1062
33302 Sep 22 23:22:12.449 INFO [lossy] skipping 1067
33303 Sep 22 23:22:12.449 WARN returning error on flush!
33304 Sep 22 23:22:12.449 DEBG Flush :1061 extent_limit None deps:[JobId(1060), JobId(1059), JobId(1058)] res:false f:21 g:1
33305 Sep 22 23:22:12.449 WARN 1062 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33306 Sep 22 23:22:12.462 DEBG Flush :1067 extent_limit None deps:[JobId(1066), JobId(1065), JobId(1064)] res:true f:23 g:1
33307 Sep 22 23:22:12.493 DEBG Write :1068 deps:[JobId(1067), JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33308 Sep 22 23:22:12.494 INFO [lossy] sleeping 1 second
33309 Sep 22 23:22:12.494 DEBG Flush :1061 extent_limit None deps:[JobId(1060), JobId(1059), JobId(1058)] res:true f:21 g:1
33310 Sep 22 23:22:12.525 DEBG Write :1062 deps:[JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33311 Sep 22 23:22:12.527 WARN returning error on write!
33312 Sep 22 23:22:12.527 DEBG Write :1063 deps:[JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:false
33313 Sep 22 23:22:12.527 INFO [lossy] skipping 1064
33314 Sep 22 23:22:12.527 INFO [lossy] skipping 1065
33315 Sep 22 23:22:12.527 WARN 1066 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33316 Sep 22 23:22:12.527 INFO [lossy] skipping 1068
33317 Sep 22 23:22:12.527 INFO [lossy] skipping 1063
33318 Sep 22 23:22:12.528 WARN 1064 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33319 Sep 22 23:22:12.528 INFO [lossy] skipping 1065
33320 Sep 22 23:22:12.528 WARN 1068 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
33321 Sep 22 23:22:12.558 DEBG Write :1063 deps:[JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33322 Sep 22 23:22:12.559 WARN 1065 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
33323 Sep 22 23:22:12.566 DEBG Flush :1064 extent_limit None deps:[JobId(1063), JobId(1062), JobId(1061)] res:true f:22 g:1
33324 Sep 22 23:22:12.596 DEBG Write :1065 deps:[JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33325 Sep 22 23:22:12.629 DEBG Write :1066 deps:[JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33326 Sep 22 23:22:12.636 DEBG Flush :1067 extent_limit None deps:[JobId(1066), JobId(1065), JobId(1064)] res:true f:23 g:1
33327 Sep 22 23:22:12.636 INFO [lossy] skipping 1068
33328 Sep 22 23:22:12.636 INFO [lossy] skipping 1068
33329 Sep 22 23:22:12.636 INFO [lossy] skipping 1068
33330 Sep 22 23:22:12.636 INFO [lossy] skipping 1068
33331 Sep 22 23:22:12.667 DEBG Write :1068 deps:[JobId(1067), JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33332 Sep 22 23:22:12.669 INFO [lossy] sleeping 1 second
33333 Sep 22 23:22:12.754 DEBG Write :1000 deps:[] res:true
33334 Sep 22 23:22:12.822 ERRO [0] job id 1011 saw error GenericError("test error")
33335 Sep 22 23:22:12.822 ERRO [0] job id 1017 saw error GenericError("test error")
33336 Sep 22 23:22:12.822 ERRO [0] job id 1025 saw error GenericError("test error")
33337 Sep 22 23:22:12.822 ERRO [0] job id 1029 saw error GenericError("test error")
33338 Sep 22 23:22:12.823 ERRO [1] job id 1011 saw error GenericError("test error")
33339 Sep 22 23:22:12.823 ERRO [2] job id 1008 saw error GenericError("test error")
33340 Sep 22 23:22:12.823 DEBG up_ds_listen was notified
33341 Sep 22 23:22:12.823 DEBG up_ds_listen process 1010
33342 Sep 22 23:22:12.823 DEBG [A] ack job 1010:11, : downstairs
33343 Sep 22 23:22:12.824 DEBG up_ds_listen checked 1 jobs, back to waiting
33344 Sep 22 23:22:12.824 ERRO [1] job id 1011 saw error GenericError("test error")
33345 Sep 22 23:22:12.824 ERRO [2] job id 1009 saw error GenericError("test error")
33346 Sep 22 23:22:12.824 ERRO [0] job id 1032 saw error GenericError("test error")
33347 Sep 22 23:22:12.824 ERRO [1] job id 1011 saw error GenericError("test error")
33348 Sep 22 23:22:12.824 ERRO [0] job id 1032 saw error GenericError("test error")
33349 Sep 22 23:22:12.826 DEBG [rc] retire 1010 clears [JobId(1008), JobId(1009), JobId(1010)], : downstairs
33350 Sep 22 23:22:12.826 DEBG up_ds_listen was notified
33351 Sep 22 23:22:12.827 DEBG up_ds_listen process 1013
33352 Sep 22 23:22:12.827 DEBG [A] ack job 1013:14, : downstairs
33353 Sep 22 23:22:12.827 DEBG up_ds_listen checked 1 jobs, back to waiting
33354 Sep 22 23:22:12.827 ERRO [0] job id 1033 saw error GenericError("test error")
33355 Sep 22 23:22:12.830 DEBG [rc] retire 1013 clears [JobId(1011), JobId(1012), JobId(1013)], : downstairs
33356 Sep 22 23:22:12.830 ERRO [2] job id 1014 saw error GenericError("test error")
33357 Sep 22 23:22:12.830 DEBG up_ds_listen was notified
33358 Sep 22 23:22:12.830 DEBG up_ds_listen process 1016
33359 Sep 22 23:22:12.830 DEBG [A] ack job 1016:17, : downstairs
33360 Sep 22 23:22:12.830 DEBG up_ds_listen checked 1 jobs, back to waiting
33361 Sep 22 23:22:12.837 ERRO [1] job id 1017 saw error GenericError("test error")
33362 Sep 22 23:22:12.837 ERRO [0] job id 1037 saw error GenericError("test error")
33363 Sep 22 23:22:12.841 DEBG [rc] retire 1016 clears [JobId(1014), JobId(1015), JobId(1016)], : downstairs
33364 Sep 22 23:22:12.841 DEBG up_ds_listen was notified
33365 Sep 22 23:22:12.841 DEBG up_ds_listen process 1019
33366 Sep 22 23:22:12.841 DEBG [A] ack job 1019:20, : downstairs
33367 Sep 22 23:22:12.841 DEBG up_ds_listen checked 1 jobs, back to waiting
33368 Sep 22 23:22:12.841 ERRO [2] job id 1018 saw error GenericError("test error")
33369 Sep 22 23:22:12.841 ERRO [1] job id 1020 saw error GenericError("test error")
33370 Sep 22 23:22:12.841 ERRO [1] job id 1020 saw error GenericError("test error")
33371 Sep 22 23:22:12.844 DEBG [rc] retire 1019 clears [JobId(1017), JobId(1018), JobId(1019)], : downstairs
33372 Sep 22 23:22:12.844 ERRO [0] job id 1041 saw error GenericError("test error")
33373 Sep 22 23:22:12.844 ERRO [1] job id 1020 saw error GenericError("test error")
33374 Sep 22 23:22:12.844 ERRO [0] job id 1042 saw error GenericError("test error")
33375 Sep 22 23:22:12.844 ERRO [1] job id 1022 saw error GenericError("test error")
33376 Sep 22 23:22:12.844 ERRO [2] job id 1022 saw error GenericError("test error")
33377 Sep 22 23:22:12.844 ERRO [0] job id 1042 saw error GenericError("test error")
33378 Sep 22 23:22:12.844 ERRO [1] job id 1022 saw error GenericError("test error")
33379 Sep 22 23:22:12.845 DEBG up_ds_listen was notified
33380 Sep 22 23:22:12.845 DEBG up_ds_listen process 1022
33381 Sep 22 23:22:12.845 DEBG [A] ack job 1022:23, : downstairs
33382 Sep 22 23:22:12.845 DEBG up_ds_listen checked 1 jobs, back to waiting
33383 Sep 22 23:22:12.848 DEBG [rc] retire 1022 clears [JobId(1020), JobId(1021), JobId(1022)], : downstairs
33384 Sep 22 23:22:12.848 ERRO [0] job id 1043 saw error GenericError("test error")
33385 Sep 22 23:22:12.848 ERRO [1] job id 1024 saw error GenericError("test error")
33386 Sep 22 23:22:12.848 DEBG up_ds_listen was notified
33387 Sep 22 23:22:12.848 DEBG up_ds_listen process 1025
33388 Sep 22 23:22:12.848 DEBG [A] ack job 1025:26, : downstairs
33389 Sep 22 23:22:12.848 DEBG up_ds_listen checked 1 jobs, back to waiting
33390 Sep 22 23:22:12.848 ERRO [0] job id 1045 saw error GenericError("test error")
33391 Sep 22 23:22:12.848 ERRO [2] job id 1026 saw error GenericError("test error")
33392 Sep 22 23:22:12.851 DEBG [rc] retire 1025 clears [JobId(1023), JobId(1024), JobId(1025)], : downstairs
33393 Sep 22 23:22:12.851 ERRO [1] job id 1026 saw error GenericError("test error")
33394 Sep 22 23:22:12.851 ERRO [2] job id 1026 saw error GenericError("test error")
33395 Sep 22 23:22:12.851 ERRO [2] job id 1026 saw error GenericError("test error")
33396 Sep 22 23:22:12.851 ERRO [0] job id 1047 saw error GenericError("test error")
33397 Sep 22 23:22:12.852 DEBG up_ds_listen was notified
33398 Sep 22 23:22:12.852 DEBG up_ds_listen process 1028
33399 Sep 22 23:22:12.852 DEBG [A] ack job 1028:29, : downstairs
33400 Sep 22 23:22:12.855 DEBG [rc] retire 1028 clears [JobId(1026), JobId(1027), JobId(1028)], : downstairs
33401 Sep 22 23:22:12.855 DEBG up_ds_listen checked 1 jobs, back to waiting
33402 Sep 22 23:22:12.855 ERRO [1] job id 1029 saw error GenericError("test error")
33403 Sep 22 23:22:12.855 ERRO [0] job id 1050 saw error GenericError("test error")
33404 Sep 22 23:22:12.855 DEBG up_ds_listen was notified
33405 Sep 22 23:22:12.855 DEBG up_ds_listen process 1031
33406 Sep 22 23:22:12.855 DEBG [A] ack job 1031:32, : downstairs
33407 Sep 22 23:22:12.855 DEBG up_ds_listen checked 1 jobs, back to waiting
33408 Sep 22 23:22:12.855 ERRO [1] job id 1031 saw error GenericError("test error")
33409 Sep 22 23:22:12.858 DEBG [rc] retire 1031 clears [JobId(1029), JobId(1030), JobId(1031)], : downstairs
33410 Sep 22 23:22:12.858 ERRO [0] job id 1052 saw error GenericError("test error")
33411 Sep 22 23:22:12.858 DEBG up_ds_listen was notified
33412 Sep 22 23:22:12.858 DEBG up_ds_listen process 1034
33413 Sep 22 23:22:12.858 DEBG [A] ack job 1034:35, : downstairs
33414 Sep 22 23:22:12.858 DEBG up_ds_listen checked 1 jobs, back to waiting
33415 Sep 22 23:22:12.859 ERRO [2] job id 1036 saw error GenericError("test error")
33416 Sep 22 23:22:12.861 DEBG [rc] retire 1034 clears [JobId(1032), JobId(1033), JobId(1034)], : downstairs
33417 Sep 22 23:22:12.861 ERRO [2] job id 1037 saw error GenericError("test error")
33418 Sep 22 23:22:12.862 DEBG up_ds_listen was notified
33419 Sep 22 23:22:12.862 DEBG up_ds_listen process 1037
33420 Sep 22 23:22:12.862 DEBG [A] ack job 1037:38, : downstairs
33421 Sep 22 23:22:12.864 DEBG [rc] retire 1037 clears [JobId(1035), JobId(1036), JobId(1037)], : downstairs
33422 Sep 22 23:22:12.864 DEBG up_ds_listen checked 1 jobs, back to waiting
33423 Sep 22 23:22:12.865 ERRO [2] job id 1040 saw error GenericError("test error")
33424 Sep 22 23:22:12.865 DEBG up_ds_listen was notified
33425 Sep 22 23:22:12.865 DEBG up_ds_listen process 1040
33426 Sep 22 23:22:12.865 DEBG [A] ack job 1040:41, : downstairs
33427 Sep 22 23:22:12.865 DEBG up_ds_listen checked 1 jobs, back to waiting
33428 Sep 22 23:22:12.865 ERRO [1] job id 1041 saw error GenericError("test error")
33429 Sep 22 23:22:12.867 DEBG [rc] retire 1040 clears [JobId(1038), JobId(1039), JobId(1040)], : downstairs
33430 Sep 22 23:22:12.868 ERRO [1] job id 1041 saw error GenericError("test error")
33431 Sep 22 23:22:12.868 ERRO [0] job id 1062 saw error GenericError("test error")
33432 Sep 22 23:22:12.868 ERRO [2] job id 1042 saw error GenericError("test error")
33433 Sep 22 23:22:12.868 ERRO [0] job id 1063 saw error GenericError("test error")
33434 Sep 22 23:22:12.868 ERRO [1] job id 1042 saw error GenericError("test error")
33435 Sep 22 23:22:12.868 ERRO [2] job id 1042 saw error GenericError("test error")
33436 Sep 22 23:22:12.868 ERRO [2] job id 1042 saw error GenericError("test error")
33437 Sep 22 23:22:12.868 DEBG up_ds_listen was notified
33438 Sep 22 23:22:12.868 DEBG up_ds_listen process 1043
33439 Sep 22 23:22:12.868 DEBG [A] ack job 1043:44, : downstairs
33440 Sep 22 23:22:12.868 DEBG up_ds_listen checked 1 jobs, back to waiting
33441 Sep 22 23:22:12.871 DEBG [rc] retire 1043 clears [JobId(1041), JobId(1042), JobId(1043)], : downstairs
33442 Sep 22 23:22:12.871 ERRO [2] job id 1044 saw error GenericError("test error")
33443 Sep 22 23:22:12.871 ERRO [1] job id 1046 saw error GenericError("test error")
33444 Sep 22 23:22:12.871 ERRO [2] job id 1045 saw error GenericError("test error")
33445 Sep 22 23:22:12.871 DEBG up_ds_listen was notified
33446 Sep 22 23:22:12.871 DEBG up_ds_listen process 1046
33447 Sep 22 23:22:12.871 DEBG [A] ack job 1046:47, : downstairs
33448 Sep 22 23:22:12.871 DEBG up_ds_listen checked 1 jobs, back to waiting
33449 Sep 22 23:22:12.871 ERRO [1] job id 1047 saw error GenericError("test error")
33450 Sep 22 23:22:12.871 ERRO [1] job id 1047 saw error GenericError("test error")
33451 Sep 22 23:22:12.874 DEBG [rc] retire 1046 clears [JobId(1044), JobId(1045), JobId(1046)], : downstairs
33452 Sep 22 23:22:12.874 DEBG up_ds_listen was notified
33453 Sep 22 23:22:12.874 DEBG up_ds_listen process 1049
33454 Sep 22 23:22:12.874 DEBG [A] ack job 1049:50, : downstairs
33455 Sep 22 23:22:12.877 DEBG [rc] retire 1049 clears [JobId(1047), JobId(1048), JobId(1049)], : downstairs
33456 Sep 22 23:22:12.877 DEBG up_ds_listen checked 1 jobs, back to waiting
33457 Sep 22 23:22:12.877 ERRO [1] job id 1050 saw error GenericError("test error")
33458 Sep 22 23:22:12.877 ERRO [1] job id 1051 saw error GenericError("test error")
33459 Sep 22 23:22:12.877 ERRO [2] job id 1052 saw error GenericError("test error")
33460 Sep 22 23:22:12.877 DEBG up_ds_listen was notified
33461 Sep 22 23:22:12.877 DEBG up_ds_listen process 1052
33462 Sep 22 23:22:12.877 DEBG [A] ack job 1052:53, : downstairs
33463 Sep 22 23:22:12.877 DEBG up_ds_listen checked 1 jobs, back to waiting
33464 Sep 22 23:22:12.877 ERRO [2] job id 1053 saw error GenericError("test error")
33465 Sep 22 23:22:12.880 DEBG [rc] retire 1052 clears [JobId(1050), JobId(1051), JobId(1052)], : downstairs
33466 Sep 22 23:22:12.880 ERRO [2] job id 1053 saw error GenericError("test error")
33467 Sep 22 23:22:12.880 ERRO [2] job id 1053 saw error GenericError("test error")
33468 Sep 22 23:22:12.880 DEBG up_ds_listen was notified
33469 Sep 22 23:22:12.880 DEBG up_ds_listen process 1055
33470 Sep 22 23:22:12.880 DEBG [A] ack job 1055:56, : downstairs
33471 Sep 22 23:22:12.880 DEBG up_ds_listen checked 1 jobs, back to waiting
33472 Sep 22 23:22:12.880 ERRO [1] job id 1056 saw error GenericError("test error")
33473 Sep 22 23:22:12.883 DEBG [rc] retire 1055 clears [JobId(1053), JobId(1054), JobId(1055)], : downstairs
33474 Sep 22 23:22:12.884 ERRO [1] job id 1057 saw error GenericError("test error")
33475 Sep 22 23:22:12.884 ERRO [2] job id 1058 saw error GenericError("test error")
33476 Sep 22 23:22:12.884 DEBG up_ds_listen was notified
33477 Sep 22 23:22:12.884 DEBG up_ds_listen process 1058
33478 Sep 22 23:22:12.884 DEBG [A] ack job 1058:59, : downstairs
33479 Sep 22 23:22:12.884 DEBG up_ds_listen checked 1 jobs, back to waiting
33480 Sep 22 23:22:12.884 ERRO [2] job id 1058 saw error GenericError("test error")
33481 Sep 22 23:22:12.887 DEBG [rc] retire 1058 clears [JobId(1056), JobId(1057), JobId(1058)], : downstairs
33482 Sep 22 23:22:12.887 ERRO [1] job id 1061 saw error GenericError("test error")
33483 Sep 22 23:22:12.887 ERRO [2] job id 1059 saw error GenericError("test error")
33484 Sep 22 23:22:12.887 DEBG up_ds_listen was notified
33485 Sep 22 23:22:12.887 DEBG up_ds_listen process 1061
33486 Sep 22 23:22:12.887 DEBG [A] ack job 1061:62, : downstairs
33487 Sep 22 23:22:12.887 DEBG up_ds_listen checked 1 jobs, back to waiting
33488 Sep 22 23:22:12.887 ERRO [1] job id 1063 saw error GenericError("test error")
33489 Sep 22 23:22:12.890 DEBG [rc] retire 1061 clears [JobId(1059), JobId(1060), JobId(1061)], : downstairs
33490 Sep 22 23:22:12.890 DEBG up_ds_listen was notified
33491 Sep 22 23:22:12.890 DEBG up_ds_listen process 1064
33492 Sep 22 23:22:12.890 DEBG [A] ack job 1064:65, : downstairs
33493 Sep 22 23:22:12.890 DEBG up_ds_listen checked 1 jobs, back to waiting
33494 Sep 22 23:22:12.893 DEBG [rc] retire 1064 clears [JobId(1062), JobId(1063), JobId(1064)], : downstairs
33495 Sep 22 23:22:12.893 ERRO [2] job id 1065 saw error GenericError("test error")
33496 Sep 22 23:22:12.893 DEBG up_ds_listen was notified
33497 Sep 22 23:22:12.893 DEBG up_ds_listen process 1067
33498 Sep 22 23:22:12.893 DEBG [A] ack job 1067:68, : downstairs
33499 Sep 22 23:22:12.893 DEBG up_ds_listen checked 1 jobs, back to waiting
33500 Sep 22 23:22:12.893 ERRO [2] job id 1065 saw error GenericError("test error")
33501 Sep 22 23:22:12.896 DEBG [rc] retire 1067 clears [JobId(1065), JobId(1066), JobId(1067)], : downstairs
33502 Sep 22 23:22:13.195 DEBG Write :1000 deps:[] res:true
33503 Sep 22 23:22:13.307 DEBG Write :1069 deps:[JobId(1067), JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33504 Sep 22 23:22:13.308 WARN returning error on flush!
33505 Sep 22 23:22:13.308 DEBG Flush :1070 extent_limit None deps:[JobId(1069), JobId(1068), JobId(1067)] res:false f:24 g:1
33506 Sep 22 23:22:13.308 INFO [lossy] skipping 1071
33507 Sep 22 23:22:13.308 INFO [lossy] skipping 1070
33508 Sep 22 23:22:13.315 DEBG Flush :1070 extent_limit None deps:[JobId(1069), JobId(1068), JobId(1067)] res:true f:24 g:1
33509 Sep 22 23:22:13.321 DEBG Read :1071 deps:[JobId(1070)] res:true
33510 Sep 22 23:22:13.343 ERRO [0] job id 1070 saw error GenericError("test error")
33511 Sep 22 23:22:13.345 DEBG Flush :1072 extent_limit None deps:[JobId(1071), JobId(1070)] res:true f:25 g:1
33512 Sep 22 23:22:13.345 INFO [lossy] sleeping 1 second
33513 Sep 22 23:22:13.636 DEBG Write :1000 deps:[] res:true
33514 Sep 22 23:22:13.727 DEBG [0] Read AckReady 1071, : downstairs
33515 Sep 22 23:22:13.728 DEBG up_ds_listen was notified
33516 Sep 22 23:22:13.728 DEBG up_ds_listen process 1071
33517 Sep 22 23:22:13.729 DEBG [A] ack job 1071:72, : downstairs
33518 Sep 22 23:22:13.783 DEBG up_ds_listen checked 1 jobs, back to waiting
33519 Sep 22 23:22:13.815 DEBG Write :1069 deps:[JobId(1067), JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33520 Sep 22 23:22:13.816 INFO [lossy] skipping 1069
33521 Sep 22 23:22:13.816 INFO [lossy] skipping 1071
33522 Sep 22 23:22:13.847 DEBG Write :1069 deps:[JobId(1067), JobId(1064), JobId(1061), JobId(1058), JobId(1055), JobId(1052), JobId(1049), JobId(1046), JobId(1043), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1025), JobId(1022), JobId(1019), JobId(1016), JobId(1013), JobId(1010), JobId(1007), JobId(1004), JobId(1001)] res:true
33523 Sep 22 23:22:13.848 WARN returning error on flush!
33524 Sep 22 23:22:13.848 DEBG Flush :1070 extent_limit None deps:[JobId(1069), JobId(1068), JobId(1067)] res:false f:24 g:1
33525 Sep 22 23:22:13.848 INFO [lossy] skipping 1072
33526 Sep 22 23:22:13.848 INFO [lossy] skipping 1070
33527 Sep 22 23:22:13.861 DEBG Flush :1070 extent_limit None deps:[JobId(1069), JobId(1068), JobId(1067)] res:true f:24 g:1
33528 Sep 22 23:22:13.861 INFO [lossy] skipping 1071
33529 Sep 22 23:22:13.861 INFO [lossy] skipping 1071
33530 Sep 22 23:22:13.867 DEBG Read :1071 deps:[JobId(1070)] res:true
33531 Sep 22 23:22:13.888 DEBG Flush :1070 extent_limit None deps:[JobId(1069), JobId(1068), JobId(1067)] res:true f:24 g:1
33532 Sep 22 23:22:13.895 DEBG Read :1071 deps:[JobId(1070)] res:true
33533 Sep 22 23:22:13.917 DEBG IO Read 1073 has deps [JobId(1072)]
33534 Sep 22 23:22:13.917 ERRO [1] job id 1070 saw error GenericError("test error")
33535 Sep 22 23:22:13.923 DEBG up_ds_listen was notified
33536 Sep 22 23:22:13.923 DEBG up_ds_listen process 1070
33537 Sep 22 23:22:13.923 DEBG [A] ack job 1070:71, : downstairs
33538 Sep 22 23:22:13.924 WARN 9144e02c-c312-47c4-9b1c-f03618834608 request to replace downstairs 127.0.0.1:52165 with 127.0.0.1:42129
33539 Sep 22 23:22:13.924 INFO 9144e02c-c312-47c4-9b1c-f03618834608 found new target: 127.0.0.1:42129 at 0
33540 Sep 22 23:22:13.925 DEBG [rc] retire 1070 clears [JobId(1068), JobId(1069), JobId(1070)], : downstairs
33541 Sep 22 23:22:13.925 DEBG up_ds_listen checked 1 jobs, back to waiting
33542 Sep 22 23:22:13.934 DEBG Flush :1072 extent_limit None deps:[JobId(1071), JobId(1070)] res:true f:25 g:1
33543 Sep 22 23:22:13.934 INFO [lossy] sleeping 1 second
33544 Waited for some repair work, proceeding with test
33545 Sep 22 23:22:13.936 INFO [lossy] sleeping 1 second
33546 Sep 22 23:22:13.966 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
33547 Sep 22 23:22:14.008 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
33548 Sep 22 23:22:14.008 WARN [0] will exit pm_task, this downstairs Replacing
33549 Sep 22 23:22:14.008 ERRO 127.0.0.1:52165: proc: [0] client work task ended, Ok(Err([0] This downstairs now in Replacing)), so we end too, looper: 0
33550 Sep 22 23:22:14.008 INFO [0] 9144e02c-c312-47c4-9b1c-f03618834608 Gone missing, transition from Replacing to Replaced
33551 Sep 22 23:22:14.008 INFO [0] 9144e02c-c312-47c4-9b1c-f03618834608 connection to 127.0.0.1:52165 closed, looper: 0
33552 Sep 22 23:22:14.008 INFO [0] 127.0.0.1:52165 task reports connection:false
33553 Sep 22 23:22:14.008 INFO 9144e02c-c312-47c4-9b1c-f03618834608 Replaced Active Active
33554 Sep 22 23:22:14.008 INFO [0] 127.0.0.1:52165 task reports offline
33555 Sep 22 23:22:14.009 INFO Upstairs starts
33556 Sep 22 23:22:14.009 INFO Crucible Version: BuildInfo {
33557 version: "0.0.1",
33558 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
33559 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
33560 git_branch: "main",
33561 rustc_semver: "1.70.0",
33562 rustc_channel: "stable",
33563 rustc_host_triple: "x86_64-unknown-illumos",
33564 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
33565 cargo_triple: "x86_64-unknown-illumos",
33566 debug: true,
33567 opt_level: 0,
33568 }
33569 Sep 22 23:22:14.009 INFO Upstairs <-> Downstairs Message Version: 4
33570 Sep 22 23:22:14.009 INFO Crucible stats registered with UUID: 9144e02c-c312-47c4-9b1c-f03618834608
33571 Sep 22 23:22:14.009 INFO Crucible 9144e02c-c312-47c4-9b1c-f03618834608 has session id: 163c9a5f-e237-4960-a267-a823f80aaaa9
33572 Sep 22 23:22:14.009 WARN upstairs UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } disconnected, 0 jobs left, task: main
33573 Sep 22 23:22:14.009 WARN upstairs UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } was previously active, clearing, task: main
33574 Sep 22 23:22:14.009 INFO connection (127.0.0.1:33221): all done
33575 Sep 22 23:22:14.009 INFO [0] connecting to 127.0.0.1:42129, looper: 0
33576 Sep 22 23:22:14.009 INFO [1] connecting to 127.0.0.1:48339, looper: 1
33577 Sep 22 23:22:14.009 INFO [2] connecting to 127.0.0.1:33021, looper: 2
33578 Sep 22 23:22:14.009 INFO up_listen starts, task: up_listen
33579 Sep 22 23:22:14.009 INFO Wait for all three downstairs to come online
33580 Sep 22 23:22:14.009 INFO Flush timeout: 0.5
33581 Sep 22 23:22:14.009 DEBG up_ds_listen was notified
33582 Sep 22 23:22:14.010 DEBG up_ds_listen process 1001
33583 Sep 22 23:22:14.010 DEBG [A] ack job 1001:2, : downstairs
33584 Sep 22 23:22:14.079 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
33585 Sep 22 23:22:14.079 DEBG up_ds_listen checked 1 jobs, back to waiting
33586 Sep 22 23:22:14.079 INFO [0] 9144e02c-c312-47c4-9b1c-f03618834608 looper connected, looper: 0
33587 Sep 22 23:22:14.079 INFO [0] Proc runs for 127.0.0.1:42129 in state New
33588 Sep 22 23:22:14.079 INFO [1] 9144e02c-c312-47c4-9b1c-f03618834608 looper connected, looper: 1
33589 Sep 22 23:22:14.079 INFO [1] Proc runs for 127.0.0.1:48339 in state New
33590 Sep 22 23:22:14.079 INFO [2] 9144e02c-c312-47c4-9b1c-f03618834608 looper connected, looper: 2
33591 Sep 22 23:22:14.079 INFO [2] Proc runs for 127.0.0.1:33021 in state New
33592 Sep 22 23:22:14.079 INFO accepted connection from 127.0.0.1:33177, task: main
33593 Sep 22 23:22:14.079 INFO accepted connection from 127.0.0.1:52771, task: main
33594 Sep 22 23:22:14.079 INFO accepted connection from 127.0.0.1:38863, task: main
33595 Sep 22 23:22:14.080 INFO Connection request from 9144e02c-c312-47c4-9b1c-f03618834608 with version 4, task: proc
33596 Sep 22 23:22:14.080 INFO upstairs UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: ec8dc28c-8140-4e2d-b538-f84f8abf147b, gen: 2 } connected, version 4, task: proc
33597 Sep 22 23:22:14.080 INFO Connection request from 9144e02c-c312-47c4-9b1c-f03618834608 with version 4, task: proc
33598 Sep 22 23:22:14.080 INFO upstairs UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: ec8dc28c-8140-4e2d-b538-f84f8abf147b, gen: 2 } connected, version 4, task: proc
33599 Sep 22 23:22:14.080 INFO Connection request from 9144e02c-c312-47c4-9b1c-f03618834608 with version 4, task: proc
33600 Sep 22 23:22:14.080 INFO upstairs UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: ec8dc28c-8140-4e2d-b538-f84f8abf147b, gen: 2 } connected, version 4, task: proc
33601 The guest has requested activation
33602 Sep 22 23:22:14.080 INFO [0] 9144e02c-c312-47c4-9b1c-f03618834608 (ec8dc28c-8140-4e2d-b538-f84f8abf147b) New New New ds_transition to WaitActive
33603 Sep 22 23:22:14.080 INFO [0] Transition from New to WaitActive
33604 Sep 22 23:22:14.080 INFO [1] 9144e02c-c312-47c4-9b1c-f03618834608 (ec8dc28c-8140-4e2d-b538-f84f8abf147b) WaitActive New New ds_transition to WaitActive
33605 Sep 22 23:22:14.080 INFO [1] Transition from New to WaitActive
33606 Sep 22 23:22:14.080 INFO [2] 9144e02c-c312-47c4-9b1c-f03618834608 (ec8dc28c-8140-4e2d-b538-f84f8abf147b) WaitActive WaitActive New ds_transition to WaitActive
33607 Sep 22 23:22:14.080 INFO [2] Transition from New to WaitActive
33608 Sep 22 23:22:14.080 INFO 9144e02c-c312-47c4-9b1c-f03618834608 active request set
33609 Sep 22 23:22:14.080 INFO [0] received activate with gen 2
33610 Sep 22 23:22:14.080 INFO [0] client got ds_active_rx, promote! session ec8dc28c-8140-4e2d-b538-f84f8abf147b
33611 Sep 22 23:22:14.080 INFO [1] received activate with gen 2
33612 Sep 22 23:22:14.080 INFO [1] client got ds_active_rx, promote! session ec8dc28c-8140-4e2d-b538-f84f8abf147b
33613 Sep 22 23:22:14.081 INFO [2] received activate with gen 2
33614 Sep 22 23:22:14.081 INFO [2] client got ds_active_rx, promote! session ec8dc28c-8140-4e2d-b538-f84f8abf147b
33615 Sep 22 23:22:14.081 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } to UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: ec8dc28c-8140-4e2d-b538-f84f8abf147b, gen: 2 }
33616 Sep 22 23:22:14.081 WARN Signaling to UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } thread that UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: ec8dc28c-8140-4e2d-b538-f84f8abf147b, gen: 2 } is being promoted (read-write)
33617 Sep 22 23:22:14.081 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } to UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: ec8dc28c-8140-4e2d-b538-f84f8abf147b, gen: 2 }
33618 Sep 22 23:22:14.081 WARN Signaling to UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } thread that UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: ec8dc28c-8140-4e2d-b538-f84f8abf147b, gen: 2 } is being promoted (read-write)
33619 Sep 22 23:22:14.081 INFO UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: ec8dc28c-8140-4e2d-b538-f84f8abf147b, gen: 2 } is now active (read-write)
33620 Sep 22 23:22:14.081 WARN Another upstairs UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: ec8dc28c-8140-4e2d-b538-f84f8abf147b, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 }, task: main
33621 Sep 22 23:22:14.081 INFO UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: ec8dc28c-8140-4e2d-b538-f84f8abf147b, gen: 2 } is now active (read-write)
33622 Sep 22 23:22:14.081 WARN Another upstairs UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: ec8dc28c-8140-4e2d-b538-f84f8abf147b, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 }, task: main
33623 Sep 22 23:22:14.082 INFO UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: ec8dc28c-8140-4e2d-b538-f84f8abf147b, gen: 2 } is now active (read-write)
33624 Sep 22 23:22:14.082 INFO connection (127.0.0.1:35948): all done
33625 Sep 22 23:22:14.082 INFO connection (127.0.0.1:45718): all done
33626 Sep 22 23:22:14.082 ERRO [1] 9144e02c-c312-47c4-9b1c-f03618834608 (cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f) cmd_loop saw YouAreNoLongerActive 9144e02c-c312-47c4-9b1c-f03618834608 ec8dc28c-8140-4e2d-b538-f84f8abf147b 2
33627 Sep 22 23:22:14.082 INFO [1] 9144e02c-c312-47c4-9b1c-f03618834608 (cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f) Replaced Active Active ds_transition to Disabled
33628 Sep 22 23:22:14.082 INFO [1] Transition from Active to Disabled
33629 Sep 22 23:22:14.082 INFO 9144e02c-c312-47c4-9b1c-f03618834608 set inactive, session cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f
33630 Sep 22 23:22:14.082 ERRO 127.0.0.1:48339: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1, looper: 1
33631 Sep 22 23:22:14.082 INFO [1] 9144e02c-c312-47c4-9b1c-f03618834608 Gone missing, transition from Disabled to Disconnected
33632 Sep 22 23:22:14.082 INFO [1] 9144e02c-c312-47c4-9b1c-f03618834608 connection to 127.0.0.1:48339 closed, looper: 1
33633 Sep 22 23:22:14.082 ERRO [2] 9144e02c-c312-47c4-9b1c-f03618834608 (cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f) cmd_loop saw YouAreNoLongerActive 9144e02c-c312-47c4-9b1c-f03618834608 ec8dc28c-8140-4e2d-b538-f84f8abf147b 2
33634 Sep 22 23:22:14.082 INFO [2] 9144e02c-c312-47c4-9b1c-f03618834608 (cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f) Replaced Disconnected Active ds_transition to Disabled
33635 Sep 22 23:22:14.082 INFO [2] Transition from Active to Disabled
33636 Sep 22 23:22:14.082 INFO 9144e02c-c312-47c4-9b1c-f03618834608 set inactive, session cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f
33637 Sep 22 23:22:14.082 ERRO 127.0.0.1:33021: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1, looper: 2
33638 Sep 22 23:22:14.082 INFO [2] 9144e02c-c312-47c4-9b1c-f03618834608 Gone missing, transition from Disabled to Disconnected
33639 Sep 22 23:22:14.082 INFO [2] 9144e02c-c312-47c4-9b1c-f03618834608 connection to 127.0.0.1:33021 closed, looper: 2
33640 Sep 22 23:22:14.082 WARN [1] pm_task rx.recv() is None
33641 Sep 22 23:22:14.083 INFO [1] 127.0.0.1:48339 task reports connection:false
33642 Sep 22 23:22:14.083 INFO 9144e02c-c312-47c4-9b1c-f03618834608 Replaced Disconnected Disconnected
33643 Sep 22 23:22:14.083 INFO [1] 127.0.0.1:48339 task reports offline
33644 Sep 22 23:22:14.083 INFO [2] 127.0.0.1:33021 task reports connection:false
33645 Sep 22 23:22:14.083 INFO 9144e02c-c312-47c4-9b1c-f03618834608 Replaced Disconnected Disconnected
33646 Sep 22 23:22:14.083 INFO [2] 127.0.0.1:33021 task reports offline
33647 Sep 22 23:22:14.083 WARN [2] pm_task rx.recv() is None
33648 Sep 22 23:22:14.083 INFO [0] downstairs client at 127.0.0.1:42129 has UUID c3c61ac0-c12a-42ef-b3a2-a316e079f741
33649 Sep 22 23:22:14.083 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: c3c61ac0-c12a-42ef-b3a2-a316e079f741, encrypted: true, database_read_version: 1, database_write_version: 1 }
33650 Sep 22 23:22:14.083 INFO 9144e02c-c312-47c4-9b1c-f03618834608 WaitActive WaitActive WaitActive
33651 Sep 22 23:22:14.083 INFO [1] downstairs client at 127.0.0.1:48339 has UUID 0b1643bf-b244-4616-9246-4922ec6009af
33652 Sep 22 23:22:14.083 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 0b1643bf-b244-4616-9246-4922ec6009af, encrypted: true, database_read_version: 1, database_write_version: 1 }
33653 Sep 22 23:22:14.083 INFO 9144e02c-c312-47c4-9b1c-f03618834608 WaitActive WaitActive WaitActive
33654 Sep 22 23:22:14.083 INFO [2] downstairs client at 127.0.0.1:33021 has UUID 99fe08ae-482d-4baf-90c5-1e5bdfb86144
33655 Sep 22 23:22:14.083 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 99fe08ae-482d-4baf-90c5-1e5bdfb86144, encrypted: true, database_read_version: 1, database_write_version: 1 }
33656 Sep 22 23:22:14.083 INFO 9144e02c-c312-47c4-9b1c-f03618834608 WaitActive WaitActive WaitActive
33657 Sep 22 23:22:14.093 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
33658 Sep 22 23:22:14.094 INFO Current flush_numbers [0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
33659 Sep 22 23:22:14.095 INFO Current flush_numbers [0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
33660 Sep 22 23:22:14.108 INFO Downstairs has completed Negotiation, task: proc
33661 Sep 22 23:22:14.109 INFO Downstairs has completed Negotiation, task: proc
33662 Sep 22 23:22:14.110 INFO Downstairs has completed Negotiation, task: proc
33663 Sep 22 23:22:14.110 INFO [0] 9144e02c-c312-47c4-9b1c-f03618834608 (ec8dc28c-8140-4e2d-b538-f84f8abf147b) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
33664 Sep 22 23:22:14.110 INFO [0] Transition from WaitActive to WaitQuorum
33665 Sep 22 23:22:14.110 WARN [0] new RM replaced this: None
33666 Sep 22 23:22:14.110 INFO [0] Starts reconcile loop
33667 Sep 22 23:22:14.110 INFO [1] 9144e02c-c312-47c4-9b1c-f03618834608 (ec8dc28c-8140-4e2d-b538-f84f8abf147b) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
33668 Sep 22 23:22:14.110 INFO [1] Transition from WaitActive to WaitQuorum
33669 Sep 22 23:22:14.110 WARN [1] new RM replaced this: None
33670 Sep 22 23:22:14.110 INFO [1] Starts reconcile loop
33671 Sep 22 23:22:14.111 INFO [2] 9144e02c-c312-47c4-9b1c-f03618834608 (ec8dc28c-8140-4e2d-b538-f84f8abf147b) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
33672 Sep 22 23:22:14.111 INFO [2] Transition from WaitActive to WaitQuorum
33673 Sep 22 23:22:14.111 WARN [2] new RM replaced this: None
33674 Sep 22 23:22:14.111 INFO [2] Starts reconcile loop
33675 Sep 22 23:22:14.111 INFO [0] 127.0.0.1:42129 task reports connection:true
33676 Sep 22 23:22:14.111 INFO 9144e02c-c312-47c4-9b1c-f03618834608 WaitQuorum WaitQuorum WaitQuorum
33677 Sep 22 23:22:14.111 INFO [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
33678 Sep 22 23:22:14.111 INFO [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
33679 Sep 22 23:22:14.111 INFO [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
33680 Sep 22 23:22:14.111 INFO [1]R flush_numbers[0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
33681 Sep 22 23:22:14.111 INFO [1]R generation[0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
33682 Sep 22 23:22:14.111 INFO [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
33683 Sep 22 23:22:14.111 INFO [2]R flush_numbers[0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
33684 Sep 22 23:22:14.111 INFO [2]R generation[0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
33685 Sep 22 23:22:14.111 INFO [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
33686 Sep 22 23:22:14.111 INFO Max found gen is 2
33687 Sep 22 23:22:14.111 INFO Generation requested: 2 >= found:2
33688 Sep 22 23:22:14.111 INFO Next flush: 2
33689 Sep 22 23:22:14.111 INFO Extent 0 has flush number mismatch, : mend
33690 Sep 22 23:22:14.111 INFO First source client ID for extent 0, mrl: flush_mismatch, : mend
33691 Sep 22 23:22:14.111 INFO extent:0 gens: 0 1 1, mrl: flush_mismatch, : mend
33692 Sep 22 23:22:14.111 INFO extent:0 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33693 Sep 22 23:22:14.111 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33694 Sep 22 23:22:14.111 INFO extent:0 dirty: false false false, mrl: flush_mismatch, : mend
33695 Sep 22 23:22:14.111 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33696 Sep 22 23:22:14.111 INFO find dest for source 1 for extent at index 0, mrl: flush_mismatch, : mend
33697 Sep 22 23:22:14.111 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33698 Sep 22 23:22:14.111 INFO Extent 1 has flush number mismatch, : mend
33699 Sep 22 23:22:14.111 INFO First source client ID for extent 1, mrl: flush_mismatch, : mend
33700 Sep 22 23:22:14.111 INFO extent:1 gens: 0 1 1, mrl: flush_mismatch, : mend
33701 Sep 22 23:22:14.111 INFO extent:1 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33702 Sep 22 23:22:14.111 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33703 Sep 22 23:22:14.111 INFO extent:1 dirty: false false false, mrl: flush_mismatch, : mend
33704 Sep 22 23:22:14.111 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33705 Sep 22 23:22:14.111 INFO find dest for source 1 for extent at index 1, mrl: flush_mismatch, : mend
33706 Sep 22 23:22:14.111 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33707 Sep 22 23:22:14.111 INFO Extent 2 has flush number mismatch, : mend
33708 Sep 22 23:22:14.111 INFO First source client ID for extent 2, mrl: flush_mismatch, : mend
33709 Sep 22 23:22:14.111 INFO extent:2 gens: 0 1 1, mrl: flush_mismatch, : mend
33710 Sep 22 23:22:14.111 INFO extent:2 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33711 Sep 22 23:22:14.111 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33712 Sep 22 23:22:14.111 INFO extent:2 dirty: false false false, mrl: flush_mismatch, : mend
33713 Sep 22 23:22:14.111 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33714 Sep 22 23:22:14.111 INFO find dest for source 1 for extent at index 2, mrl: flush_mismatch, : mend
33715 Sep 22 23:22:14.111 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33716 Sep 22 23:22:14.111 INFO Extent 3 has flush number mismatch, : mend
33717 Sep 22 23:22:14.111 INFO First source client ID for extent 3, mrl: flush_mismatch, : mend
33718 Sep 22 23:22:14.111 INFO extent:3 gens: 0 1 1, mrl: flush_mismatch, : mend
33719 Sep 22 23:22:14.111 INFO extent:3 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33720 Sep 22 23:22:14.111 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33721 Sep 22 23:22:14.111 INFO extent:3 dirty: false false false, mrl: flush_mismatch, : mend
33722 Sep 22 23:22:14.111 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33723 Sep 22 23:22:14.111 INFO find dest for source 1 for extent at index 3, mrl: flush_mismatch, : mend
33724 Sep 22 23:22:14.111 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33725 Sep 22 23:22:14.111 INFO Extent 4 has flush number mismatch, : mend
33726 Sep 22 23:22:14.111 INFO First source client ID for extent 4, mrl: flush_mismatch, : mend
33727 Sep 22 23:22:14.111 INFO extent:4 gens: 0 1 1, mrl: flush_mismatch, : mend
33728 Sep 22 23:22:14.112 INFO extent:4 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33729 Sep 22 23:22:14.112 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33730 Sep 22 23:22:14.112 INFO extent:4 dirty: false false false, mrl: flush_mismatch, : mend
33731 Sep 22 23:22:14.112 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33732 Sep 22 23:22:14.112 INFO find dest for source 1 for extent at index 4, mrl: flush_mismatch, : mend
33733 Sep 22 23:22:14.112 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33734 Sep 22 23:22:14.112 INFO Extent 5 has flush number mismatch, : mend
33735 Sep 22 23:22:14.112 INFO First source client ID for extent 5, mrl: flush_mismatch, : mend
33736 Sep 22 23:22:14.112 INFO extent:5 gens: 0 1 1, mrl: flush_mismatch, : mend
33737 Sep 22 23:22:14.112 INFO extent:5 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33738 Sep 22 23:22:14.112 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33739 Sep 22 23:22:14.112 INFO extent:5 dirty: false false false, mrl: flush_mismatch, : mend
33740 Sep 22 23:22:14.112 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33741 Sep 22 23:22:14.112 INFO find dest for source 1 for extent at index 5, mrl: flush_mismatch, : mend
33742 Sep 22 23:22:14.112 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33743 Sep 22 23:22:14.112 INFO Extent 6 has flush number mismatch, : mend
33744 Sep 22 23:22:14.112 INFO First source client ID for extent 6, mrl: flush_mismatch, : mend
33745 Sep 22 23:22:14.112 INFO extent:6 gens: 0 1 1, mrl: flush_mismatch, : mend
33746 Sep 22 23:22:14.112 INFO extent:6 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33747 Sep 22 23:22:14.112 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33748 Sep 22 23:22:14.112 INFO extent:6 dirty: false false false, mrl: flush_mismatch, : mend
33749 Sep 22 23:22:14.112 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33750 Sep 22 23:22:14.112 INFO find dest for source 1 for extent at index 6, mrl: flush_mismatch, : mend
33751 Sep 22 23:22:14.112 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33752 Sep 22 23:22:14.112 INFO Extent 7 has flush number mismatch, : mend
33753 Sep 22 23:22:14.112 INFO First source client ID for extent 7, mrl: flush_mismatch, : mend
33754 Sep 22 23:22:14.112 INFO extent:7 gens: 0 1 1, mrl: flush_mismatch, : mend
33755 Sep 22 23:22:14.112 INFO extent:7 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33756 Sep 22 23:22:14.112 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33757 Sep 22 23:22:14.112 INFO extent:7 dirty: false false false, mrl: flush_mismatch, : mend
33758 Sep 22 23:22:14.112 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33759 Sep 22 23:22:14.112 INFO find dest for source 1 for extent at index 7, mrl: flush_mismatch, : mend
33760 Sep 22 23:22:14.112 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33761 Sep 22 23:22:14.112 INFO Extent 8 has flush number mismatch, : mend
33762 Sep 22 23:22:14.112 INFO First source client ID for extent 8, mrl: flush_mismatch, : mend
33763 Sep 22 23:22:14.112 INFO extent:8 gens: 0 1 1, mrl: flush_mismatch, : mend
33764 Sep 22 23:22:14.112 INFO extent:8 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33765 Sep 22 23:22:14.112 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33766 Sep 22 23:22:14.112 INFO extent:8 dirty: false false false, mrl: flush_mismatch, : mend
33767 Sep 22 23:22:14.112 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33768 Sep 22 23:22:14.112 INFO find dest for source 1 for extent at index 8, mrl: flush_mismatch, : mend
33769 Sep 22 23:22:14.112 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33770 Sep 22 23:22:14.112 INFO Extent 9 has flush number mismatch, : mend
33771 Sep 22 23:22:14.112 INFO First source client ID for extent 9, mrl: flush_mismatch, : mend
33772 Sep 22 23:22:14.112 INFO extent:9 gens: 0 1 1, mrl: flush_mismatch, : mend
33773 Sep 22 23:22:14.112 INFO extent:9 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33774 Sep 22 23:22:14.112 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33775 Sep 22 23:22:14.112 INFO extent:9 dirty: false false false, mrl: flush_mismatch, : mend
33776 Sep 22 23:22:14.112 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33777 Sep 22 23:22:14.112 INFO find dest for source 1 for extent at index 9, mrl: flush_mismatch, : mend
33778 Sep 22 23:22:14.112 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33779 Sep 22 23:22:14.112 INFO Extent 10 has flush number mismatch, : mend
33780 Sep 22 23:22:14.112 INFO First source client ID for extent 10, mrl: flush_mismatch, : mend
33781 Sep 22 23:22:14.112 INFO extent:10 gens: 0 1 1, mrl: flush_mismatch, : mend
33782 Sep 22 23:22:14.112 INFO extent:10 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33783 Sep 22 23:22:14.112 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33784 Sep 22 23:22:14.112 INFO extent:10 dirty: false false false, mrl: flush_mismatch, : mend
33785 Sep 22 23:22:14.112 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33786 Sep 22 23:22:14.112 INFO find dest for source 1 for extent at index 10, mrl: flush_mismatch, : mend
33787 Sep 22 23:22:14.112 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33788 Sep 22 23:22:14.112 INFO Extent 11 has flush number mismatch, : mend
33789 Sep 22 23:22:14.112 INFO First source client ID for extent 11, mrl: flush_mismatch, : mend
33790 Sep 22 23:22:14.112 INFO extent:11 gens: 0 1 1, mrl: flush_mismatch, : mend
33791 Sep 22 23:22:14.112 INFO extent:11 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33792 Sep 22 23:22:14.112 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33793 Sep 22 23:22:14.112 INFO extent:11 dirty: false false false, mrl: flush_mismatch, : mend
33794 Sep 22 23:22:14.112 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33795 Sep 22 23:22:14.112 INFO find dest for source 1 for extent at index 11, mrl: flush_mismatch, : mend
33796 Sep 22 23:22:14.112 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33797 Sep 22 23:22:14.112 INFO Extent 12 has flush number mismatch, : mend
33798 Sep 22 23:22:14.112 INFO First source client ID for extent 12, mrl: flush_mismatch, : mend
33799 Sep 22 23:22:14.112 INFO extent:12 gens: 0 1 1, mrl: flush_mismatch, : mend
33800 Sep 22 23:22:14.112 INFO extent:12 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33801 Sep 22 23:22:14.112 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33802 Sep 22 23:22:14.112 INFO extent:12 dirty: false false false, mrl: flush_mismatch, : mend
33803 Sep 22 23:22:14.112 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33804 Sep 22 23:22:14.112 INFO find dest for source 1 for extent at index 12, mrl: flush_mismatch, : mend
33805 Sep 22 23:22:14.112 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33806 Sep 22 23:22:14.112 INFO Extent 13 has flush number mismatch, : mend
33807 Sep 22 23:22:14.112 INFO First source client ID for extent 13, mrl: flush_mismatch, : mend
33808 Sep 22 23:22:14.112 INFO extent:13 gens: 0 1 1, mrl: flush_mismatch, : mend
33809 Sep 22 23:22:14.112 INFO extent:13 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33810 Sep 22 23:22:14.112 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33811 Sep 22 23:22:14.112 INFO extent:13 dirty: false false false, mrl: flush_mismatch, : mend
33812 Sep 22 23:22:14.112 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33813 Sep 22 23:22:14.112 INFO find dest for source 1 for extent at index 13, mrl: flush_mismatch, : mend
33814 Sep 22 23:22:14.112 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33815 Sep 22 23:22:14.112 INFO Extent 14 has flush number mismatch, : mend
33816 Sep 22 23:22:14.112 INFO First source client ID for extent 14, mrl: flush_mismatch, : mend
33817 Sep 22 23:22:14.112 INFO extent:14 gens: 0 1 1, mrl: flush_mismatch, : mend
33818 Sep 22 23:22:14.113 INFO extent:14 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33819 Sep 22 23:22:14.113 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33820 Sep 22 23:22:14.113 INFO extent:14 dirty: false false false, mrl: flush_mismatch, : mend
33821 Sep 22 23:22:14.113 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33822 Sep 22 23:22:14.113 INFO find dest for source 1 for extent at index 14, mrl: flush_mismatch, : mend
33823 Sep 22 23:22:14.113 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33824 Sep 22 23:22:14.113 INFO Extent 15 has flush number mismatch, : mend
33825 Sep 22 23:22:14.113 INFO First source client ID for extent 15, mrl: flush_mismatch, : mend
33826 Sep 22 23:22:14.113 INFO extent:15 gens: 0 1 1, mrl: flush_mismatch, : mend
33827 Sep 22 23:22:14.113 INFO extent:15 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33828 Sep 22 23:22:14.113 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33829 Sep 22 23:22:14.113 INFO extent:15 dirty: false false false, mrl: flush_mismatch, : mend
33830 Sep 22 23:22:14.113 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33831 Sep 22 23:22:14.113 INFO find dest for source 1 for extent at index 15, mrl: flush_mismatch, : mend
33832 Sep 22 23:22:14.113 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33833 Sep 22 23:22:14.113 INFO Extent 16 has flush number mismatch, : mend
33834 Sep 22 23:22:14.113 INFO First source client ID for extent 16, mrl: flush_mismatch, : mend
33835 Sep 22 23:22:14.113 INFO extent:16 gens: 0 1 1, mrl: flush_mismatch, : mend
33836 Sep 22 23:22:14.113 INFO extent:16 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33837 Sep 22 23:22:14.113 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33838 Sep 22 23:22:14.113 INFO extent:16 dirty: false false false, mrl: flush_mismatch, : mend
33839 Sep 22 23:22:14.113 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33840 Sep 22 23:22:14.113 INFO find dest for source 1 for extent at index 16, mrl: flush_mismatch, : mend
33841 Sep 22 23:22:14.113 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33842 Sep 22 23:22:14.113 INFO Extent 17 has flush number mismatch, : mend
33843 Sep 22 23:22:14.113 INFO First source client ID for extent 17, mrl: flush_mismatch, : mend
33844 Sep 22 23:22:14.113 INFO extent:17 gens: 0 1 1, mrl: flush_mismatch, : mend
33845 Sep 22 23:22:14.113 INFO extent:17 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33846 Sep 22 23:22:14.113 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33847 Sep 22 23:22:14.113 INFO extent:17 dirty: false false false, mrl: flush_mismatch, : mend
33848 Sep 22 23:22:14.113 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33849 Sep 22 23:22:14.113 INFO find dest for source 1 for extent at index 17, mrl: flush_mismatch, : mend
33850 Sep 22 23:22:14.113 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33851 Sep 22 23:22:14.113 INFO Extent 18 has flush number mismatch, : mend
33852 Sep 22 23:22:14.113 INFO First source client ID for extent 18, mrl: flush_mismatch, : mend
33853 Sep 22 23:22:14.113 INFO extent:18 gens: 0 1 1, mrl: flush_mismatch, : mend
33854 Sep 22 23:22:14.113 INFO extent:18 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33855 Sep 22 23:22:14.113 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33856 Sep 22 23:22:14.113 INFO extent:18 dirty: false false false, mrl: flush_mismatch, : mend
33857 Sep 22 23:22:14.113 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33858 Sep 22 23:22:14.113 INFO find dest for source 1 for extent at index 18, mrl: flush_mismatch, : mend
33859 Sep 22 23:22:14.113 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33860 Sep 22 23:22:14.113 INFO Extent 19 has flush number mismatch, : mend
33861 Sep 22 23:22:14.113 INFO First source client ID for extent 19, mrl: flush_mismatch, : mend
33862 Sep 22 23:22:14.113 INFO extent:19 gens: 0 1 1, mrl: flush_mismatch, : mend
33863 Sep 22 23:22:14.113 INFO extent:19 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33864 Sep 22 23:22:14.113 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33865 Sep 22 23:22:14.113 INFO extent:19 dirty: false false false, mrl: flush_mismatch, : mend
33866 Sep 22 23:22:14.113 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33867 Sep 22 23:22:14.113 INFO find dest for source 1 for extent at index 19, mrl: flush_mismatch, : mend
33868 Sep 22 23:22:14.113 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33869 Sep 22 23:22:14.113 INFO Extent 20 has flush number mismatch, : mend
33870 Sep 22 23:22:14.113 INFO First source client ID for extent 20, mrl: flush_mismatch, : mend
33871 Sep 22 23:22:14.113 INFO extent:20 gens: 0 1 1, mrl: flush_mismatch, : mend
33872 Sep 22 23:22:14.113 INFO extent:20 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33873 Sep 22 23:22:14.113 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33874 Sep 22 23:22:14.113 INFO extent:20 dirty: false false false, mrl: flush_mismatch, : mend
33875 Sep 22 23:22:14.113 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33876 Sep 22 23:22:14.113 INFO find dest for source 1 for extent at index 20, mrl: flush_mismatch, : mend
33877 Sep 22 23:22:14.113 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33878 Sep 22 23:22:14.113 INFO Extent 21 has flush number mismatch, : mend
33879 Sep 22 23:22:14.113 INFO First source client ID for extent 21, mrl: flush_mismatch, : mend
33880 Sep 22 23:22:14.113 INFO extent:21 gens: 0 1 1, mrl: flush_mismatch, : mend
33881 Sep 22 23:22:14.113 INFO extent:21 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33882 Sep 22 23:22:14.113 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33883 Sep 22 23:22:14.113 INFO extent:21 dirty: false false false, mrl: flush_mismatch, : mend
33884 Sep 22 23:22:14.113 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33885 Sep 22 23:22:14.113 INFO find dest for source 1 for extent at index 21, mrl: flush_mismatch, : mend
33886 Sep 22 23:22:14.113 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33887 Sep 22 23:22:14.113 INFO Extent 22 has flush number mismatch, : mend
33888 Sep 22 23:22:14.113 INFO First source client ID for extent 22, mrl: flush_mismatch, : mend
33889 Sep 22 23:22:14.113 INFO extent:22 gens: 0 1 1, mrl: flush_mismatch, : mend
33890 Sep 22 23:22:14.113 INFO extent:22 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33891 Sep 22 23:22:14.113 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33892 Sep 22 23:22:14.113 INFO extent:22 dirty: false false false, mrl: flush_mismatch, : mend
33893 Sep 22 23:22:14.113 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33894 Sep 22 23:22:14.113 INFO find dest for source 1 for extent at index 22, mrl: flush_mismatch, : mend
33895 Sep 22 23:22:14.113 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33896 Sep 22 23:22:14.113 INFO Extent 23 has flush number mismatch, : mend
33897 Sep 22 23:22:14.113 INFO First source client ID for extent 23, mrl: flush_mismatch, : mend
33898 Sep 22 23:22:14.113 INFO extent:23 gens: 0 1 1, mrl: flush_mismatch, : mend
33899 Sep 22 23:22:14.113 INFO extent:23 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33900 Sep 22 23:22:14.113 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33901 Sep 22 23:22:14.113 INFO extent:23 dirty: false false false, mrl: flush_mismatch, : mend
33902 Sep 22 23:22:14.113 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33903 Sep 22 23:22:14.113 INFO find dest for source 1 for extent at index 23, mrl: flush_mismatch, : mend
33904 Sep 22 23:22:14.113 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33905 Sep 22 23:22:14.113 INFO Extent 24 has flush number mismatch, : mend
33906 Sep 22 23:22:14.113 INFO First source client ID for extent 24, mrl: flush_mismatch, : mend
33907 Sep 22 23:22:14.113 INFO extent:24 gens: 0 1 1, mrl: flush_mismatch, : mend
33908 Sep 22 23:22:14.113 INFO extent:24 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33909 Sep 22 23:22:14.114 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33910 Sep 22 23:22:14.114 INFO extent:24 dirty: false false false, mrl: flush_mismatch, : mend
33911 Sep 22 23:22:14.114 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33912 Sep 22 23:22:14.114 INFO find dest for source 1 for extent at index 24, mrl: flush_mismatch, : mend
33913 Sep 22 23:22:14.114 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33914 Sep 22 23:22:14.114 INFO Extent 25 has flush number mismatch, : mend
33915 Sep 22 23:22:14.114 INFO First source client ID for extent 25, mrl: flush_mismatch, : mend
33916 Sep 22 23:22:14.114 INFO extent:25 gens: 0 1 1, mrl: flush_mismatch, : mend
33917 Sep 22 23:22:14.114 INFO extent:25 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33918 Sep 22 23:22:14.114 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33919 Sep 22 23:22:14.114 INFO extent:25 dirty: false false false, mrl: flush_mismatch, : mend
33920 Sep 22 23:22:14.114 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33921 Sep 22 23:22:14.114 INFO find dest for source 1 for extent at index 25, mrl: flush_mismatch, : mend
33922 Sep 22 23:22:14.114 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33923 Sep 22 23:22:14.114 INFO Extent 26 has flush number mismatch, : mend
33924 Sep 22 23:22:14.114 INFO First source client ID for extent 26, mrl: flush_mismatch, : mend
33925 Sep 22 23:22:14.114 INFO extent:26 gens: 0 1 1, mrl: flush_mismatch, : mend
33926 Sep 22 23:22:14.114 INFO extent:26 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33927 Sep 22 23:22:14.114 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33928 Sep 22 23:22:14.114 INFO extent:26 dirty: false false false, mrl: flush_mismatch, : mend
33929 Sep 22 23:22:14.114 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33930 Sep 22 23:22:14.114 INFO find dest for source 1 for extent at index 26, mrl: flush_mismatch, : mend
33931 Sep 22 23:22:14.114 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33932 Sep 22 23:22:14.114 INFO Extent 27 has flush number mismatch, : mend
33933 Sep 22 23:22:14.114 INFO First source client ID for extent 27, mrl: flush_mismatch, : mend
33934 Sep 22 23:22:14.114 INFO extent:27 gens: 0 1 1, mrl: flush_mismatch, : mend
33935 Sep 22 23:22:14.114 INFO extent:27 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33936 Sep 22 23:22:14.114 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33937 Sep 22 23:22:14.114 INFO extent:27 dirty: false false false, mrl: flush_mismatch, : mend
33938 Sep 22 23:22:14.114 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33939 Sep 22 23:22:14.114 INFO find dest for source 1 for extent at index 27, mrl: flush_mismatch, : mend
33940 Sep 22 23:22:14.114 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33941 Sep 22 23:22:14.114 INFO Extent 28 has flush number mismatch, : mend
33942 Sep 22 23:22:14.114 INFO First source client ID for extent 28, mrl: flush_mismatch, : mend
33943 Sep 22 23:22:14.114 INFO extent:28 gens: 0 1 1, mrl: flush_mismatch, : mend
33944 Sep 22 23:22:14.114 INFO extent:28 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33945 Sep 22 23:22:14.114 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33946 Sep 22 23:22:14.114 INFO extent:28 dirty: false false false, mrl: flush_mismatch, : mend
33947 Sep 22 23:22:14.114 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33948 Sep 22 23:22:14.114 INFO find dest for source 1 for extent at index 28, mrl: flush_mismatch, : mend
33949 Sep 22 23:22:14.114 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33950 Sep 22 23:22:14.114 INFO Extent 29 has flush number mismatch, : mend
33951 Sep 22 23:22:14.114 INFO First source client ID for extent 29, mrl: flush_mismatch, : mend
33952 Sep 22 23:22:14.114 INFO extent:29 gens: 0 1 1, mrl: flush_mismatch, : mend
33953 Sep 22 23:22:14.114 INFO extent:29 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33954 Sep 22 23:22:14.114 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33955 Sep 22 23:22:14.114 INFO extent:29 dirty: false false false, mrl: flush_mismatch, : mend
33956 Sep 22 23:22:14.114 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33957 Sep 22 23:22:14.114 INFO find dest for source 1 for extent at index 29, mrl: flush_mismatch, : mend
33958 Sep 22 23:22:14.114 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33959 Sep 22 23:22:14.114 INFO Extent 30 has flush number mismatch, : mend
33960 Sep 22 23:22:14.114 INFO First source client ID for extent 30, mrl: flush_mismatch, : mend
33961 Sep 22 23:22:14.114 INFO extent:30 gens: 0 1 1, mrl: flush_mismatch, : mend
33962 Sep 22 23:22:14.114 INFO extent:30 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33963 Sep 22 23:22:14.114 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33964 Sep 22 23:22:14.114 INFO extent:30 dirty: false false false, mrl: flush_mismatch, : mend
33965 Sep 22 23:22:14.114 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33966 Sep 22 23:22:14.114 INFO find dest for source 1 for extent at index 30, mrl: flush_mismatch, : mend
33967 Sep 22 23:22:14.114 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33968 Sep 22 23:22:14.114 INFO Extent 31 has flush number mismatch, : mend
33969 Sep 22 23:22:14.114 INFO First source client ID for extent 31, mrl: flush_mismatch, : mend
33970 Sep 22 23:22:14.114 INFO extent:31 gens: 0 1 1, mrl: flush_mismatch, : mend
33971 Sep 22 23:22:14.114 INFO extent:31 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33972 Sep 22 23:22:14.114 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33973 Sep 22 23:22:14.114 INFO extent:31 dirty: false false false, mrl: flush_mismatch, : mend
33974 Sep 22 23:22:14.114 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33975 Sep 22 23:22:14.114 INFO find dest for source 1 for extent at index 31, mrl: flush_mismatch, : mend
33976 Sep 22 23:22:14.114 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33977 Sep 22 23:22:14.114 INFO Extent 32 has flush number mismatch, : mend
33978 Sep 22 23:22:14.114 INFO First source client ID for extent 32, mrl: flush_mismatch, : mend
33979 Sep 22 23:22:14.114 INFO extent:32 gens: 0 1 1, mrl: flush_mismatch, : mend
33980 Sep 22 23:22:14.114 INFO extent:32 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33981 Sep 22 23:22:14.114 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33982 Sep 22 23:22:14.114 INFO extent:32 dirty: false false false, mrl: flush_mismatch, : mend
33983 Sep 22 23:22:14.114 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33984 Sep 22 23:22:14.114 INFO find dest for source 1 for extent at index 32, mrl: flush_mismatch, : mend
33985 Sep 22 23:22:14.114 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33986 Sep 22 23:22:14.114 INFO Extent 33 has flush number mismatch, : mend
33987 Sep 22 23:22:14.114 INFO First source client ID for extent 33, mrl: flush_mismatch, : mend
33988 Sep 22 23:22:14.114 INFO extent:33 gens: 0 1 1, mrl: flush_mismatch, : mend
33989 Sep 22 23:22:14.114 INFO extent:33 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33990 Sep 22 23:22:14.114 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33991 Sep 22 23:22:14.114 INFO extent:33 dirty: false false false, mrl: flush_mismatch, : mend
33992 Sep 22 23:22:14.114 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33993 Sep 22 23:22:14.114 INFO find dest for source 1 for extent at index 33, mrl: flush_mismatch, : mend
33994 Sep 22 23:22:14.114 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33995 Sep 22 23:22:14.114 INFO Extent 34 has flush number mismatch, : mend
33996 Sep 22 23:22:14.114 INFO First source client ID for extent 34, mrl: flush_mismatch, : mend
33997 Sep 22 23:22:14.114 INFO extent:34 gens: 0 1 1, mrl: flush_mismatch, : mend
33998 Sep 22 23:22:14.115 INFO extent:34 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33999 Sep 22 23:22:14.115 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34000 Sep 22 23:22:14.115 INFO extent:34 dirty: false false false, mrl: flush_mismatch, : mend
34001 Sep 22 23:22:14.115 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34002 Sep 22 23:22:14.115 INFO find dest for source 1 for extent at index 34, mrl: flush_mismatch, : mend
34003 Sep 22 23:22:14.115 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34004 Sep 22 23:22:14.115 INFO Extent 35 has flush number mismatch, : mend
34005 Sep 22 23:22:14.115 INFO First source client ID for extent 35, mrl: flush_mismatch, : mend
34006 Sep 22 23:22:14.115 INFO extent:35 gens: 0 1 1, mrl: flush_mismatch, : mend
34007 Sep 22 23:22:14.115 INFO extent:35 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34008 Sep 22 23:22:14.115 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34009 Sep 22 23:22:14.115 INFO extent:35 dirty: false false false, mrl: flush_mismatch, : mend
34010 Sep 22 23:22:14.115 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34011 Sep 22 23:22:14.115 INFO find dest for source 1 for extent at index 35, mrl: flush_mismatch, : mend
34012 Sep 22 23:22:14.115 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34013 Sep 22 23:22:14.115 INFO Extent 36 has flush number mismatch, : mend
34014 Sep 22 23:22:14.115 INFO First source client ID for extent 36, mrl: flush_mismatch, : mend
34015 Sep 22 23:22:14.115 INFO extent:36 gens: 0 1 1, mrl: flush_mismatch, : mend
34016 Sep 22 23:22:14.115 INFO extent:36 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34017 Sep 22 23:22:14.115 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34018 Sep 22 23:22:14.115 INFO extent:36 dirty: false false false, mrl: flush_mismatch, : mend
34019 Sep 22 23:22:14.115 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34020 Sep 22 23:22:14.115 INFO find dest for source 1 for extent at index 36, mrl: flush_mismatch, : mend
34021 Sep 22 23:22:14.115 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34022 Sep 22 23:22:14.115 INFO Extent 37 has flush number mismatch, : mend
34023 Sep 22 23:22:14.115 INFO First source client ID for extent 37, mrl: flush_mismatch, : mend
34024 Sep 22 23:22:14.115 INFO extent:37 gens: 0 1 1, mrl: flush_mismatch, : mend
34025 Sep 22 23:22:14.115 INFO extent:37 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34026 Sep 22 23:22:14.115 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34027 Sep 22 23:22:14.115 INFO extent:37 dirty: false false false, mrl: flush_mismatch, : mend
34028 Sep 22 23:22:14.115 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34029 Sep 22 23:22:14.115 INFO find dest for source 1 for extent at index 37, mrl: flush_mismatch, : mend
34030 Sep 22 23:22:14.115 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34031 Sep 22 23:22:14.115 INFO Extent 38 has flush number mismatch, : mend
34032 Sep 22 23:22:14.115 INFO First source client ID for extent 38, mrl: flush_mismatch, : mend
34033 Sep 22 23:22:14.115 INFO extent:38 gens: 0 1 1, mrl: flush_mismatch, : mend
34034 Sep 22 23:22:14.115 INFO extent:38 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34035 Sep 22 23:22:14.115 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34036 Sep 22 23:22:14.115 INFO extent:38 dirty: false false false, mrl: flush_mismatch, : mend
34037 Sep 22 23:22:14.115 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34038 Sep 22 23:22:14.115 INFO find dest for source 1 for extent at index 38, mrl: flush_mismatch, : mend
34039 Sep 22 23:22:14.115 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34040 Sep 22 23:22:14.115 INFO Extent 39 has flush number mismatch, : mend
34041 Sep 22 23:22:14.115 INFO First source client ID for extent 39, mrl: flush_mismatch, : mend
34042 Sep 22 23:22:14.115 INFO extent:39 gens: 0 1 1, mrl: flush_mismatch, : mend
34043 Sep 22 23:22:14.115 INFO extent:39 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34044 Sep 22 23:22:14.115 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34045 Sep 22 23:22:14.115 INFO extent:39 dirty: false false false, mrl: flush_mismatch, : mend
34046 Sep 22 23:22:14.115 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34047 Sep 22 23:22:14.115 INFO find dest for source 1 for extent at index 39, mrl: flush_mismatch, : mend
34048 Sep 22 23:22:14.115 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34049 Sep 22 23:22:14.115 INFO Extent 40 has flush number mismatch, : mend
34050 Sep 22 23:22:14.115 INFO First source client ID for extent 40, mrl: flush_mismatch, : mend
34051 Sep 22 23:22:14.115 INFO extent:40 gens: 0 1 1, mrl: flush_mismatch, : mend
34052 Sep 22 23:22:14.115 INFO extent:40 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34053 Sep 22 23:22:14.115 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34054 Sep 22 23:22:14.115 INFO extent:40 dirty: false false false, mrl: flush_mismatch, : mend
34055 Sep 22 23:22:14.115 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34056 Sep 22 23:22:14.115 INFO find dest for source 1 for extent at index 40, mrl: flush_mismatch, : mend
34057 Sep 22 23:22:14.115 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34058 Sep 22 23:22:14.115 INFO Extent 41 has flush number mismatch, : mend
34059 Sep 22 23:22:14.115 INFO First source client ID for extent 41, mrl: flush_mismatch, : mend
34060 Sep 22 23:22:14.115 INFO extent:41 gens: 0 1 1, mrl: flush_mismatch, : mend
34061 Sep 22 23:22:14.115 INFO extent:41 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34062 Sep 22 23:22:14.115 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34063 Sep 22 23:22:14.115 INFO extent:41 dirty: false false false, mrl: flush_mismatch, : mend
34064 Sep 22 23:22:14.115 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34065 Sep 22 23:22:14.115 INFO find dest for source 1 for extent at index 41, mrl: flush_mismatch, : mend
34066 Sep 22 23:22:14.115 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34067 Sep 22 23:22:14.115 INFO Extent 42 has flush number mismatch, : mend
34068 Sep 22 23:22:14.115 INFO First source client ID for extent 42, mrl: flush_mismatch, : mend
34069 Sep 22 23:22:14.115 INFO extent:42 gens: 0 1 1, mrl: flush_mismatch, : mend
34070 Sep 22 23:22:14.115 INFO extent:42 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34071 Sep 22 23:22:14.115 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34072 Sep 22 23:22:14.115 INFO extent:42 dirty: false false false, mrl: flush_mismatch, : mend
34073 Sep 22 23:22:14.115 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34074 Sep 22 23:22:14.115 INFO find dest for source 1 for extent at index 42, mrl: flush_mismatch, : mend
34075 Sep 22 23:22:14.115 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34076 Sep 22 23:22:14.115 INFO Extent 43 has flush number mismatch, : mend
34077 Sep 22 23:22:14.115 INFO First source client ID for extent 43, mrl: flush_mismatch, : mend
34078 Sep 22 23:22:14.115 INFO extent:43 gens: 0 1 1, mrl: flush_mismatch, : mend
34079 Sep 22 23:22:14.115 INFO extent:43 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34080 Sep 22 23:22:14.115 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34081 Sep 22 23:22:14.115 INFO extent:43 dirty: false false false, mrl: flush_mismatch, : mend
34082 Sep 22 23:22:14.115 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34083 Sep 22 23:22:14.115 INFO find dest for source 1 for extent at index 43, mrl: flush_mismatch, : mend
34084 Sep 22 23:22:14.115 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34085 Sep 22 23:22:14.115 INFO Extent 44 has flush number mismatch, : mend
34086 Sep 22 23:22:14.115 INFO First source client ID for extent 44, mrl: flush_mismatch, : mend
34087 Sep 22 23:22:14.115 INFO extent:44 gens: 0 1 1, mrl: flush_mismatch, : mend
34088 Sep 22 23:22:14.115 INFO extent:44 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34089 Sep 22 23:22:14.116 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34090 Sep 22 23:22:14.116 INFO extent:44 dirty: false false false, mrl: flush_mismatch, : mend
34091 Sep 22 23:22:14.116 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34092 Sep 22 23:22:14.116 INFO find dest for source 1 for extent at index 44, mrl: flush_mismatch, : mend
34093 Sep 22 23:22:14.116 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34094 Sep 22 23:22:14.116 INFO Extent 45 has flush number mismatch, : mend
34095 Sep 22 23:22:14.116 INFO First source client ID for extent 45, mrl: flush_mismatch, : mend
34096 Sep 22 23:22:14.116 INFO extent:45 gens: 0 1 1, mrl: flush_mismatch, : mend
34097 Sep 22 23:22:14.116 INFO extent:45 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34098 Sep 22 23:22:14.116 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34099 Sep 22 23:22:14.116 INFO extent:45 dirty: false false false, mrl: flush_mismatch, : mend
34100 Sep 22 23:22:14.116 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34101 Sep 22 23:22:14.116 INFO find dest for source 1 for extent at index 45, mrl: flush_mismatch, : mend
34102 Sep 22 23:22:14.116 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34103 Sep 22 23:22:14.116 INFO Extent 46 has flush number mismatch, : mend
34104 Sep 22 23:22:14.116 INFO First source client ID for extent 46, mrl: flush_mismatch, : mend
34105 Sep 22 23:22:14.116 INFO extent:46 gens: 0 1 1, mrl: flush_mismatch, : mend
34106 Sep 22 23:22:14.116 INFO extent:46 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34107 Sep 22 23:22:14.116 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34108 Sep 22 23:22:14.116 INFO extent:46 dirty: false false false, mrl: flush_mismatch, : mend
34109 Sep 22 23:22:14.116 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34110 Sep 22 23:22:14.116 INFO find dest for source 1 for extent at index 46, mrl: flush_mismatch, : mend
34111 Sep 22 23:22:14.116 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34112 Sep 22 23:22:14.116 INFO Extent 47 has flush number mismatch, : mend
34113 Sep 22 23:22:14.116 INFO First source client ID for extent 47, mrl: flush_mismatch, : mend
34114 Sep 22 23:22:14.116 INFO extent:47 gens: 0 1 1, mrl: flush_mismatch, : mend
34115 Sep 22 23:22:14.116 INFO extent:47 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34116 Sep 22 23:22:14.116 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34117 Sep 22 23:22:14.116 INFO extent:47 dirty: false false false, mrl: flush_mismatch, : mend
34118 Sep 22 23:22:14.116 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34119 Sep 22 23:22:14.116 INFO find dest for source 1 for extent at index 47, mrl: flush_mismatch, : mend
34120 Sep 22 23:22:14.116 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34121 Sep 22 23:22:14.116 INFO Extent 48 has flush number mismatch, : mend
34122 Sep 22 23:22:14.116 INFO First source client ID for extent 48, mrl: flush_mismatch, : mend
34123 Sep 22 23:22:14.116 INFO extent:48 gens: 0 1 1, mrl: flush_mismatch, : mend
34124 Sep 22 23:22:14.116 INFO extent:48 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34125 Sep 22 23:22:14.116 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34126 Sep 22 23:22:14.116 INFO extent:48 dirty: false false false, mrl: flush_mismatch, : mend
34127 Sep 22 23:22:14.116 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34128 Sep 22 23:22:14.116 INFO find dest for source 1 for extent at index 48, mrl: flush_mismatch, : mend
34129 Sep 22 23:22:14.116 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34130 Sep 22 23:22:14.116 INFO Extent 49 has flush number mismatch, : mend
34131 Sep 22 23:22:14.116 INFO First source client ID for extent 49, mrl: flush_mismatch, : mend
34132 Sep 22 23:22:14.116 INFO extent:49 gens: 0 1 1, mrl: flush_mismatch, : mend
34133 Sep 22 23:22:14.116 INFO extent:49 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34134 Sep 22 23:22:14.116 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34135 Sep 22 23:22:14.116 INFO extent:49 dirty: false false false, mrl: flush_mismatch, : mend
34136 Sep 22 23:22:14.116 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34137 Sep 22 23:22:14.116 INFO find dest for source 1 for extent at index 49, mrl: flush_mismatch, : mend
34138 Sep 22 23:22:14.116 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34139 Sep 22 23:22:14.116 INFO Extent 50 has flush number mismatch, : mend
34140 Sep 22 23:22:14.116 INFO First source client ID for extent 50, mrl: flush_mismatch, : mend
34141 Sep 22 23:22:14.116 INFO extent:50 gens: 0 1 1, mrl: flush_mismatch, : mend
34142 Sep 22 23:22:14.116 INFO extent:50 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34143 Sep 22 23:22:14.116 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34144 Sep 22 23:22:14.116 INFO extent:50 dirty: false false false, mrl: flush_mismatch, : mend
34145 Sep 22 23:22:14.116 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34146 Sep 22 23:22:14.116 INFO find dest for source 1 for extent at index 50, mrl: flush_mismatch, : mend
34147 Sep 22 23:22:14.116 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34148 Sep 22 23:22:14.116 INFO Extent 51 has flush number mismatch, : mend
34149 Sep 22 23:22:14.116 INFO First source client ID for extent 51, mrl: flush_mismatch, : mend
34150 Sep 22 23:22:14.116 INFO extent:51 gens: 0 1 1, mrl: flush_mismatch, : mend
34151 Sep 22 23:22:14.116 INFO extent:51 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34152 Sep 22 23:22:14.116 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34153 Sep 22 23:22:14.116 INFO extent:51 dirty: false false false, mrl: flush_mismatch, : mend
34154 Sep 22 23:22:14.116 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34155 Sep 22 23:22:14.116 INFO find dest for source 1 for extent at index 51, mrl: flush_mismatch, : mend
34156 Sep 22 23:22:14.116 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34157 Sep 22 23:22:14.116 INFO Extent 52 has flush number mismatch, : mend
34158 Sep 22 23:22:14.116 INFO First source client ID for extent 52, mrl: flush_mismatch, : mend
34159 Sep 22 23:22:14.116 INFO extent:52 gens: 0 1 1, mrl: flush_mismatch, : mend
34160 Sep 22 23:22:14.116 INFO extent:52 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34161 Sep 22 23:22:14.116 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34162 Sep 22 23:22:14.116 INFO extent:52 dirty: false false false, mrl: flush_mismatch, : mend
34163 Sep 22 23:22:14.116 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34164 Sep 22 23:22:14.116 INFO find dest for source 1 for extent at index 52, mrl: flush_mismatch, : mend
34165 Sep 22 23:22:14.116 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34166 Sep 22 23:22:14.116 INFO Extent 53 has flush number mismatch, : mend
34167 Sep 22 23:22:14.116 INFO First source client ID for extent 53, mrl: flush_mismatch, : mend
34168 Sep 22 23:22:14.116 INFO extent:53 gens: 0 1 1, mrl: flush_mismatch, : mend
34169 Sep 22 23:22:14.116 INFO extent:53 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34170 Sep 22 23:22:14.116 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34171 Sep 22 23:22:14.116 INFO extent:53 dirty: false false false, mrl: flush_mismatch, : mend
34172 Sep 22 23:22:14.116 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34173 Sep 22 23:22:14.116 INFO find dest for source 1 for extent at index 53, mrl: flush_mismatch, : mend
34174 Sep 22 23:22:14.116 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34175 Sep 22 23:22:14.116 INFO Extent 54 has flush number mismatch, : mend
34176 Sep 22 23:22:14.116 INFO First source client ID for extent 54, mrl: flush_mismatch, : mend
34177 Sep 22 23:22:14.116 INFO extent:54 gens: 0 1 1, mrl: flush_mismatch, : mend
34178 Sep 22 23:22:14.116 INFO extent:54 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34179 Sep 22 23:22:14.116 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34180 Sep 22 23:22:14.117 INFO extent:54 dirty: false false false, mrl: flush_mismatch, : mend
34181 Sep 22 23:22:14.117 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34182 Sep 22 23:22:14.117 INFO find dest for source 1 for extent at index 54, mrl: flush_mismatch, : mend
34183 Sep 22 23:22:14.117 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34184 Sep 22 23:22:14.117 INFO Extent 55 has flush number mismatch, : mend
34185 Sep 22 23:22:14.117 INFO First source client ID for extent 55, mrl: flush_mismatch, : mend
34186 Sep 22 23:22:14.117 INFO extent:55 gens: 0 1 1, mrl: flush_mismatch, : mend
34187 Sep 22 23:22:14.117 INFO extent:55 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34188 Sep 22 23:22:14.117 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34189 Sep 22 23:22:14.117 INFO extent:55 dirty: false false false, mrl: flush_mismatch, : mend
34190 Sep 22 23:22:14.117 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34191 Sep 22 23:22:14.117 INFO find dest for source 1 for extent at index 55, mrl: flush_mismatch, : mend
34192 Sep 22 23:22:14.117 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34193 Sep 22 23:22:14.117 INFO Extent 56 has flush number mismatch, : mend
34194 Sep 22 23:22:14.117 INFO First source client ID for extent 56, mrl: flush_mismatch, : mend
34195 Sep 22 23:22:14.117 INFO extent:56 gens: 0 1 1, mrl: flush_mismatch, : mend
34196 Sep 22 23:22:14.117 INFO extent:56 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34197 Sep 22 23:22:14.117 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34198 Sep 22 23:22:14.117 INFO extent:56 dirty: false false false, mrl: flush_mismatch, : mend
34199 Sep 22 23:22:14.117 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34200 Sep 22 23:22:14.117 INFO find dest for source 1 for extent at index 56, mrl: flush_mismatch, : mend
34201 Sep 22 23:22:14.117 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34202 Sep 22 23:22:14.117 INFO Extent 57 has flush number mismatch, : mend
34203 Sep 22 23:22:14.117 INFO First source client ID for extent 57, mrl: flush_mismatch, : mend
34204 Sep 22 23:22:14.117 INFO extent:57 gens: 0 1 1, mrl: flush_mismatch, : mend
34205 Sep 22 23:22:14.117 INFO extent:57 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34206 Sep 22 23:22:14.117 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34207 Sep 22 23:22:14.117 INFO extent:57 dirty: false false false, mrl: flush_mismatch, : mend
34208 Sep 22 23:22:14.117 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34209 Sep 22 23:22:14.117 INFO find dest for source 1 for extent at index 57, mrl: flush_mismatch, : mend
34210 Sep 22 23:22:14.117 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34211 Sep 22 23:22:14.117 INFO Extent 58 has flush number mismatch, : mend
34212 Sep 22 23:22:14.117 INFO First source client ID for extent 58, mrl: flush_mismatch, : mend
34213 Sep 22 23:22:14.117 INFO extent:58 gens: 0 1 1, mrl: flush_mismatch, : mend
34214 Sep 22 23:22:14.117 INFO extent:58 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34215 Sep 22 23:22:14.117 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34216 Sep 22 23:22:14.117 INFO extent:58 dirty: false false false, mrl: flush_mismatch, : mend
34217 Sep 22 23:22:14.117 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34218 Sep 22 23:22:14.117 INFO find dest for source 1 for extent at index 58, mrl: flush_mismatch, : mend
34219 Sep 22 23:22:14.117 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34220 Sep 22 23:22:14.117 INFO Extent 59 has flush number mismatch, : mend
34221 Sep 22 23:22:14.117 INFO First source client ID for extent 59, mrl: flush_mismatch, : mend
34222 Sep 22 23:22:14.117 INFO extent:59 gens: 0 1 1, mrl: flush_mismatch, : mend
34223 Sep 22 23:22:14.117 INFO extent:59 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34224 Sep 22 23:22:14.117 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34225 Sep 22 23:22:14.117 INFO extent:59 dirty: false false false, mrl: flush_mismatch, : mend
34226 Sep 22 23:22:14.117 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34227 Sep 22 23:22:14.117 INFO find dest for source 1 for extent at index 59, mrl: flush_mismatch, : mend
34228 Sep 22 23:22:14.117 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34229 Sep 22 23:22:14.117 INFO Extent 60 has flush number mismatch, : mend
34230 Sep 22 23:22:14.117 INFO First source client ID for extent 60, mrl: flush_mismatch, : mend
34231 Sep 22 23:22:14.117 INFO extent:60 gens: 0 1 1, mrl: flush_mismatch, : mend
34232 Sep 22 23:22:14.117 INFO extent:60 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34233 Sep 22 23:22:14.117 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34234 Sep 22 23:22:14.117 INFO extent:60 dirty: false false false, mrl: flush_mismatch, : mend
34235 Sep 22 23:22:14.117 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34236 Sep 22 23:22:14.117 INFO find dest for source 1 for extent at index 60, mrl: flush_mismatch, : mend
34237 Sep 22 23:22:14.117 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34238 Sep 22 23:22:14.117 INFO Extent 61 has flush number mismatch, : mend
34239 Sep 22 23:22:14.117 INFO First source client ID for extent 61, mrl: flush_mismatch, : mend
34240 Sep 22 23:22:14.117 INFO extent:61 gens: 0 1 1, mrl: flush_mismatch, : mend
34241 Sep 22 23:22:14.117 INFO extent:61 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34242 Sep 22 23:22:14.117 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34243 Sep 22 23:22:14.117 INFO extent:61 dirty: false false false, mrl: flush_mismatch, : mend
34244 Sep 22 23:22:14.117 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34245 Sep 22 23:22:14.117 INFO find dest for source 1 for extent at index 61, mrl: flush_mismatch, : mend
34246 Sep 22 23:22:14.117 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34247 Sep 22 23:22:14.117 INFO Extent 62 has flush number mismatch, : mend
34248 Sep 22 23:22:14.117 INFO First source client ID for extent 62, mrl: flush_mismatch, : mend
34249 Sep 22 23:22:14.117 INFO extent:62 gens: 0 1 1, mrl: flush_mismatch, : mend
34250 Sep 22 23:22:14.117 INFO extent:62 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34251 Sep 22 23:22:14.117 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34252 Sep 22 23:22:14.117 INFO extent:62 dirty: false false false, mrl: flush_mismatch, : mend
34253 Sep 22 23:22:14.117 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34254 Sep 22 23:22:14.117 INFO find dest for source 1 for extent at index 62, mrl: flush_mismatch, : mend
34255 Sep 22 23:22:14.117 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34256 Sep 22 23:22:14.117 INFO Extent 63 has flush number mismatch, : mend
34257 Sep 22 23:22:14.117 INFO First source client ID for extent 63, mrl: flush_mismatch, : mend
34258 Sep 22 23:22:14.117 INFO extent:63 gens: 0 1 1, mrl: flush_mismatch, : mend
34259 Sep 22 23:22:14.117 INFO extent:63 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34260 Sep 22 23:22:14.117 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34261 Sep 22 23:22:14.117 INFO extent:63 dirty: false false false, mrl: flush_mismatch, : mend
34262 Sep 22 23:22:14.117 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34263 Sep 22 23:22:14.117 INFO find dest for source 1 for extent at index 63, mrl: flush_mismatch, : mend
34264 Sep 22 23:22:14.117 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34265 Sep 22 23:22:14.117 INFO Extent 64 has flush number mismatch, : mend
34266 Sep 22 23:22:14.117 INFO First source client ID for extent 64, mrl: flush_mismatch, : mend
34267 Sep 22 23:22:14.118 INFO extent:64 gens: 0 1 1, mrl: flush_mismatch, : mend
34268 Sep 22 23:22:14.118 INFO extent:64 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34269 Sep 22 23:22:14.118 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34270 Sep 22 23:22:14.118 INFO extent:64 dirty: false false false, mrl: flush_mismatch, : mend
34271 Sep 22 23:22:14.118 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34272 Sep 22 23:22:14.118 INFO find dest for source 1 for extent at index 64, mrl: flush_mismatch, : mend
34273 Sep 22 23:22:14.118 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34274 Sep 22 23:22:14.118 INFO Extent 65 has flush number mismatch, : mend
34275 Sep 22 23:22:14.118 INFO First source client ID for extent 65, mrl: flush_mismatch, : mend
34276 Sep 22 23:22:14.118 INFO extent:65 gens: 0 1 1, mrl: flush_mismatch, : mend
34277 Sep 22 23:22:14.118 INFO extent:65 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34278 Sep 22 23:22:14.118 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34279 Sep 22 23:22:14.118 INFO extent:65 dirty: false false false, mrl: flush_mismatch, : mend
34280 Sep 22 23:22:14.118 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34281 Sep 22 23:22:14.118 INFO find dest for source 1 for extent at index 65, mrl: flush_mismatch, : mend
34282 Sep 22 23:22:14.118 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34283 Sep 22 23:22:14.118 INFO Extent 66 has flush number mismatch, : mend
34284 Sep 22 23:22:14.118 INFO First source client ID for extent 66, mrl: flush_mismatch, : mend
34285 Sep 22 23:22:14.118 INFO extent:66 gens: 0 1 1, mrl: flush_mismatch, : mend
34286 Sep 22 23:22:14.118 INFO extent:66 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34287 Sep 22 23:22:14.118 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34288 Sep 22 23:22:14.118 INFO extent:66 dirty: false false false, mrl: flush_mismatch, : mend
34289 Sep 22 23:22:14.118 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34290 Sep 22 23:22:14.118 INFO find dest for source 1 for extent at index 66, mrl: flush_mismatch, : mend
34291 Sep 22 23:22:14.118 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34292 Sep 22 23:22:14.118 INFO Extent 67 has flush number mismatch, : mend
34293 Sep 22 23:22:14.118 INFO First source client ID for extent 67, mrl: flush_mismatch, : mend
34294 Sep 22 23:22:14.118 INFO extent:67 gens: 0 1 1, mrl: flush_mismatch, : mend
34295 Sep 22 23:22:14.118 INFO extent:67 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34296 Sep 22 23:22:14.118 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34297 Sep 22 23:22:14.118 INFO extent:67 dirty: false false false, mrl: flush_mismatch, : mend
34298 Sep 22 23:22:14.118 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34299 Sep 22 23:22:14.118 INFO find dest for source 1 for extent at index 67, mrl: flush_mismatch, : mend
34300 Sep 22 23:22:14.118 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34301 Sep 22 23:22:14.118 INFO Extent 68 has flush number mismatch, : mend
34302 Sep 22 23:22:14.118 INFO First source client ID for extent 68, mrl: flush_mismatch, : mend
34303 Sep 22 23:22:14.118 INFO extent:68 gens: 0 1 1, mrl: flush_mismatch, : mend
34304 Sep 22 23:22:14.118 INFO extent:68 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34305 Sep 22 23:22:14.118 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34306 Sep 22 23:22:14.118 INFO extent:68 dirty: false false false, mrl: flush_mismatch, : mend
34307 Sep 22 23:22:14.118 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34308 Sep 22 23:22:14.118 INFO find dest for source 1 for extent at index 68, mrl: flush_mismatch, : mend
34309 Sep 22 23:22:14.118 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34310 Sep 22 23:22:14.118 INFO Extent 69 has flush number mismatch, : mend
34311 Sep 22 23:22:14.118 INFO First source client ID for extent 69, mrl: flush_mismatch, : mend
34312 Sep 22 23:22:14.118 INFO extent:69 gens: 0 1 1, mrl: flush_mismatch, : mend
34313 Sep 22 23:22:14.118 INFO extent:69 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34314 Sep 22 23:22:14.118 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34315 Sep 22 23:22:14.118 INFO extent:69 dirty: false false false, mrl: flush_mismatch, : mend
34316 Sep 22 23:22:14.118 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34317 Sep 22 23:22:14.118 INFO find dest for source 1 for extent at index 69, mrl: flush_mismatch, : mend
34318 Sep 22 23:22:14.118 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34319 Sep 22 23:22:14.118 INFO Extent 70 has flush number mismatch, : mend
34320 Sep 22 23:22:14.118 INFO First source client ID for extent 70, mrl: flush_mismatch, : mend
34321 Sep 22 23:22:14.118 INFO extent:70 gens: 0 1 1, mrl: flush_mismatch, : mend
34322 Sep 22 23:22:14.118 INFO extent:70 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34323 Sep 22 23:22:14.118 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34324 Sep 22 23:22:14.118 INFO extent:70 dirty: false false false, mrl: flush_mismatch, : mend
34325 Sep 22 23:22:14.118 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34326 Sep 22 23:22:14.118 INFO find dest for source 1 for extent at index 70, mrl: flush_mismatch, : mend
34327 Sep 22 23:22:14.118 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34328 Sep 22 23:22:14.118 INFO Extent 71 has flush number mismatch, : mend
34329 Sep 22 23:22:14.118 INFO First source client ID for extent 71, mrl: flush_mismatch, : mend
34330 Sep 22 23:22:14.118 INFO extent:71 gens: 0 1 1, mrl: flush_mismatch, : mend
34331 Sep 22 23:22:14.118 INFO extent:71 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34332 Sep 22 23:22:14.118 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34333 Sep 22 23:22:14.118 INFO extent:71 dirty: false false false, mrl: flush_mismatch, : mend
34334 Sep 22 23:22:14.118 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34335 Sep 22 23:22:14.118 INFO find dest for source 1 for extent at index 71, mrl: flush_mismatch, : mend
34336 Sep 22 23:22:14.118 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34337 Sep 22 23:22:14.118 INFO Extent 72 has flush number mismatch, : mend
34338 Sep 22 23:22:14.118 INFO First source client ID for extent 72, mrl: flush_mismatch, : mend
34339 Sep 22 23:22:14.118 INFO extent:72 gens: 0 1 1, mrl: flush_mismatch, : mend
34340 Sep 22 23:22:14.118 INFO extent:72 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34341 Sep 22 23:22:14.118 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34342 Sep 22 23:22:14.118 INFO extent:72 dirty: false false false, mrl: flush_mismatch, : mend
34343 Sep 22 23:22:14.118 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34344 Sep 22 23:22:14.118 INFO find dest for source 1 for extent at index 72, mrl: flush_mismatch, : mend
34345 Sep 22 23:22:14.118 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34346 Sep 22 23:22:14.118 INFO Extent 73 has flush number mismatch, : mend
34347 Sep 22 23:22:14.118 INFO First source client ID for extent 73, mrl: flush_mismatch, : mend
34348 Sep 22 23:22:14.118 INFO extent:73 gens: 0 1 1, mrl: flush_mismatch, : mend
34349 Sep 22 23:22:14.118 INFO extent:73 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34350 Sep 22 23:22:14.118 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34351 Sep 22 23:22:14.118 INFO extent:73 dirty: false false false, mrl: flush_mismatch, : mend
34352 Sep 22 23:22:14.118 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34353 Sep 22 23:22:14.118 INFO find dest for source 1 for extent at index 73, mrl: flush_mismatch, : mend
34354 Sep 22 23:22:14.118 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34355 Sep 22 23:22:14.118 INFO Extent 74 has flush number mismatch, : mend
34356 Sep 22 23:22:14.118 INFO First source client ID for extent 74, mrl: flush_mismatch, : mend
34357 Sep 22 23:22:14.119 INFO extent:74 gens: 0 1 1, mrl: flush_mismatch, : mend
34358 Sep 22 23:22:14.119 INFO extent:74 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34359 Sep 22 23:22:14.119 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34360 Sep 22 23:22:14.119 INFO extent:74 dirty: false false false, mrl: flush_mismatch, : mend
34361 Sep 22 23:22:14.119 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34362 Sep 22 23:22:14.119 INFO find dest for source 1 for extent at index 74, mrl: flush_mismatch, : mend
34363 Sep 22 23:22:14.119 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34364 Sep 22 23:22:14.119 INFO Extent 75 has flush number mismatch, : mend
34365 Sep 22 23:22:14.119 INFO First source client ID for extent 75, mrl: flush_mismatch, : mend
34366 Sep 22 23:22:14.119 INFO extent:75 gens: 0 1 1, mrl: flush_mismatch, : mend
34367 Sep 22 23:22:14.119 INFO extent:75 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34368 Sep 22 23:22:14.119 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34369 Sep 22 23:22:14.119 INFO extent:75 dirty: false false false, mrl: flush_mismatch, : mend
34370 Sep 22 23:22:14.119 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34371 Sep 22 23:22:14.119 INFO find dest for source 1 for extent at index 75, mrl: flush_mismatch, : mend
34372 Sep 22 23:22:14.119 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34373 Sep 22 23:22:14.119 INFO Extent 76 has flush number mismatch, : mend
34374 Sep 22 23:22:14.119 INFO First source client ID for extent 76, mrl: flush_mismatch, : mend
34375 Sep 22 23:22:14.119 INFO extent:76 gens: 0 1 1, mrl: flush_mismatch, : mend
34376 Sep 22 23:22:14.119 INFO extent:76 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34377 Sep 22 23:22:14.119 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34378 Sep 22 23:22:14.119 INFO extent:76 dirty: false false false, mrl: flush_mismatch, : mend
34379 Sep 22 23:22:14.119 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34380 Sep 22 23:22:14.119 INFO find dest for source 1 for extent at index 76, mrl: flush_mismatch, : mend
34381 Sep 22 23:22:14.119 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34382 Sep 22 23:22:14.119 INFO Extent 77 has flush number mismatch, : mend
34383 Sep 22 23:22:14.119 INFO First source client ID for extent 77, mrl: flush_mismatch, : mend
34384 Sep 22 23:22:14.119 INFO extent:77 gens: 0 1 1, mrl: flush_mismatch, : mend
34385 Sep 22 23:22:14.119 INFO extent:77 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34386 Sep 22 23:22:14.119 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34387 Sep 22 23:22:14.119 INFO extent:77 dirty: false false false, mrl: flush_mismatch, : mend
34388 Sep 22 23:22:14.119 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34389 Sep 22 23:22:14.119 INFO find dest for source 1 for extent at index 77, mrl: flush_mismatch, : mend
34390 Sep 22 23:22:14.119 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34391 Sep 22 23:22:14.119 INFO Extent 78 has flush number mismatch, : mend
34392 Sep 22 23:22:14.119 INFO First source client ID for extent 78, mrl: flush_mismatch, : mend
34393 Sep 22 23:22:14.119 INFO extent:78 gens: 0 1 1, mrl: flush_mismatch, : mend
34394 Sep 22 23:22:14.119 INFO extent:78 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34395 Sep 22 23:22:14.119 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34396 Sep 22 23:22:14.119 INFO extent:78 dirty: false false false, mrl: flush_mismatch, : mend
34397 Sep 22 23:22:14.119 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34398 Sep 22 23:22:14.119 INFO find dest for source 1 for extent at index 78, mrl: flush_mismatch, : mend
34399 Sep 22 23:22:14.119 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34400 Sep 22 23:22:14.119 INFO Extent 79 has flush number mismatch, : mend
34401 Sep 22 23:22:14.119 INFO First source client ID for extent 79, mrl: flush_mismatch, : mend
34402 Sep 22 23:22:14.119 INFO extent:79 gens: 0 1 1, mrl: flush_mismatch, : mend
34403 Sep 22 23:22:14.119 INFO extent:79 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34404 Sep 22 23:22:14.119 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34405 Sep 22 23:22:14.119 INFO extent:79 dirty: false false false, mrl: flush_mismatch, : mend
34406 Sep 22 23:22:14.119 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34407 Sep 22 23:22:14.119 INFO find dest for source 1 for extent at index 79, mrl: flush_mismatch, : mend
34408 Sep 22 23:22:14.119 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34409 Sep 22 23:22:14.119 INFO Extent 80 has flush number mismatch, : mend
34410 Sep 22 23:22:14.119 INFO First source client ID for extent 80, mrl: flush_mismatch, : mend
34411 Sep 22 23:22:14.119 INFO extent:80 gens: 0 1 1, mrl: flush_mismatch, : mend
34412 Sep 22 23:22:14.119 INFO extent:80 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34413 Sep 22 23:22:14.119 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34414 Sep 22 23:22:14.119 INFO extent:80 dirty: false false false, mrl: flush_mismatch, : mend
34415 Sep 22 23:22:14.119 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34416 Sep 22 23:22:14.119 INFO find dest for source 1 for extent at index 80, mrl: flush_mismatch, : mend
34417 Sep 22 23:22:14.119 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34418 Sep 22 23:22:14.119 INFO Extent 81 has flush number mismatch, : mend
34419 Sep 22 23:22:14.119 INFO First source client ID for extent 81, mrl: flush_mismatch, : mend
34420 Sep 22 23:22:14.119 INFO extent:81 gens: 0 1 1, mrl: flush_mismatch, : mend
34421 Sep 22 23:22:14.119 INFO extent:81 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34422 Sep 22 23:22:14.119 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34423 Sep 22 23:22:14.119 INFO extent:81 dirty: false false false, mrl: flush_mismatch, : mend
34424 Sep 22 23:22:14.119 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34425 Sep 22 23:22:14.119 INFO find dest for source 1 for extent at index 81, mrl: flush_mismatch, : mend
34426 Sep 22 23:22:14.119 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34427 Sep 22 23:22:14.119 INFO Extent 82 has flush number mismatch, : mend
34428 Sep 22 23:22:14.119 INFO First source client ID for extent 82, mrl: flush_mismatch, : mend
34429 Sep 22 23:22:14.119 INFO extent:82 gens: 0 1 1, mrl: flush_mismatch, : mend
34430 Sep 22 23:22:14.119 INFO extent:82 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34431 Sep 22 23:22:14.119 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34432 Sep 22 23:22:14.119 INFO extent:82 dirty: false false false, mrl: flush_mismatch, : mend
34433 Sep 22 23:22:14.119 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34434 Sep 22 23:22:14.119 INFO find dest for source 1 for extent at index 82, mrl: flush_mismatch, : mend
34435 Sep 22 23:22:14.119 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34436 Sep 22 23:22:14.119 INFO Extent 83 has flush number mismatch, : mend
34437 Sep 22 23:22:14.119 INFO First source client ID for extent 83, mrl: flush_mismatch, : mend
34438 Sep 22 23:22:14.119 INFO extent:83 gens: 0 1 1, mrl: flush_mismatch, : mend
34439 Sep 22 23:22:14.119 INFO extent:83 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34440 Sep 22 23:22:14.119 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34441 Sep 22 23:22:14.119 INFO extent:83 dirty: false false false, mrl: flush_mismatch, : mend
34442 Sep 22 23:22:14.119 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34443 Sep 22 23:22:14.119 INFO find dest for source 1 for extent at index 83, mrl: flush_mismatch, : mend
34444 Sep 22 23:22:14.119 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34445 Sep 22 23:22:14.119 INFO Extent 84 has flush number mismatch, : mend
34446 Sep 22 23:22:14.119 INFO First source client ID for extent 84, mrl: flush_mismatch, : mend
34447 Sep 22 23:22:14.119 INFO extent:84 gens: 0 1 1, mrl: flush_mismatch, : mend
34448 Sep 22 23:22:14.119 INFO extent:84 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34449 Sep 22 23:22:14.120 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34450 Sep 22 23:22:14.120 INFO extent:84 dirty: false false false, mrl: flush_mismatch, : mend
34451 Sep 22 23:22:14.120 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34452 Sep 22 23:22:14.120 INFO find dest for source 1 for extent at index 84, mrl: flush_mismatch, : mend
34453 Sep 22 23:22:14.120 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34454 Sep 22 23:22:14.120 INFO Extent 85 has flush number mismatch, : mend
34455 Sep 22 23:22:14.120 INFO First source client ID for extent 85, mrl: flush_mismatch, : mend
34456 Sep 22 23:22:14.120 INFO extent:85 gens: 0 1 1, mrl: flush_mismatch, : mend
34457 Sep 22 23:22:14.120 INFO extent:85 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34458 Sep 22 23:22:14.120 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34459 Sep 22 23:22:14.120 INFO extent:85 dirty: false false false, mrl: flush_mismatch, : mend
34460 Sep 22 23:22:14.120 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34461 Sep 22 23:22:14.120 INFO find dest for source 1 for extent at index 85, mrl: flush_mismatch, : mend
34462 Sep 22 23:22:14.120 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34463 Sep 22 23:22:14.120 INFO Extent 86 has flush number mismatch, : mend
34464 Sep 22 23:22:14.120 INFO First source client ID for extent 86, mrl: flush_mismatch, : mend
34465 Sep 22 23:22:14.120 INFO extent:86 gens: 0 1 1, mrl: flush_mismatch, : mend
34466 Sep 22 23:22:14.120 INFO extent:86 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34467 Sep 22 23:22:14.120 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34468 Sep 22 23:22:14.120 INFO extent:86 dirty: false false false, mrl: flush_mismatch, : mend
34469 Sep 22 23:22:14.120 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34470 Sep 22 23:22:14.120 INFO find dest for source 1 for extent at index 86, mrl: flush_mismatch, : mend
34471 Sep 22 23:22:14.120 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34472 Sep 22 23:22:14.120 INFO Extent 87 has flush number mismatch, : mend
34473 Sep 22 23:22:14.120 INFO First source client ID for extent 87, mrl: flush_mismatch, : mend
34474 Sep 22 23:22:14.120 INFO extent:87 gens: 0 1 1, mrl: flush_mismatch, : mend
34475 Sep 22 23:22:14.120 INFO extent:87 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34476 Sep 22 23:22:14.120 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34477 Sep 22 23:22:14.120 INFO extent:87 dirty: false false false, mrl: flush_mismatch, : mend
34478 Sep 22 23:22:14.120 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34479 Sep 22 23:22:14.120 INFO find dest for source 1 for extent at index 87, mrl: flush_mismatch, : mend
34480 Sep 22 23:22:14.120 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34481 Sep 22 23:22:14.120 INFO Extent 88 has flush number mismatch, : mend
34482 Sep 22 23:22:14.120 INFO First source client ID for extent 88, mrl: flush_mismatch, : mend
34483 Sep 22 23:22:14.120 INFO extent:88 gens: 0 1 1, mrl: flush_mismatch, : mend
34484 Sep 22 23:22:14.120 INFO extent:88 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34485 Sep 22 23:22:14.120 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34486 Sep 22 23:22:14.120 INFO extent:88 dirty: false false false, mrl: flush_mismatch, : mend
34487 Sep 22 23:22:14.120 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34488 Sep 22 23:22:14.120 INFO find dest for source 1 for extent at index 88, mrl: flush_mismatch, : mend
34489 Sep 22 23:22:14.120 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34490 Sep 22 23:22:14.120 INFO Extent 89 has flush number mismatch, : mend
34491 Sep 22 23:22:14.120 INFO First source client ID for extent 89, mrl: flush_mismatch, : mend
34492 Sep 22 23:22:14.120 INFO extent:89 gens: 0 1 1, mrl: flush_mismatch, : mend
34493 Sep 22 23:22:14.120 INFO extent:89 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34494 Sep 22 23:22:14.120 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34495 Sep 22 23:22:14.120 INFO extent:89 dirty: false false false, mrl: flush_mismatch, : mend
34496 Sep 22 23:22:14.120 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34497 Sep 22 23:22:14.120 INFO find dest for source 1 for extent at index 89, mrl: flush_mismatch, : mend
34498 Sep 22 23:22:14.120 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34499 Sep 22 23:22:14.120 INFO Extent 90 has flush number mismatch, : mend
34500 Sep 22 23:22:14.120 INFO First source client ID for extent 90, mrl: flush_mismatch, : mend
34501 Sep 22 23:22:14.120 INFO extent:90 gens: 0 1 1, mrl: flush_mismatch, : mend
34502 Sep 22 23:22:14.120 INFO extent:90 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34503 Sep 22 23:22:14.120 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34504 Sep 22 23:22:14.120 INFO extent:90 dirty: false false false, mrl: flush_mismatch, : mend
34505 Sep 22 23:22:14.120 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34506 Sep 22 23:22:14.120 INFO find dest for source 1 for extent at index 90, mrl: flush_mismatch, : mend
34507 Sep 22 23:22:14.120 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34508 Sep 22 23:22:14.120 INFO Extent 91 has flush number mismatch, : mend
34509 Sep 22 23:22:14.120 INFO First source client ID for extent 91, mrl: flush_mismatch, : mend
34510 Sep 22 23:22:14.120 INFO extent:91 gens: 0 1 1, mrl: flush_mismatch, : mend
34511 Sep 22 23:22:14.120 INFO extent:91 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34512 Sep 22 23:22:14.120 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34513 Sep 22 23:22:14.120 INFO extent:91 dirty: false false false, mrl: flush_mismatch, : mend
34514 Sep 22 23:22:14.120 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34515 Sep 22 23:22:14.120 INFO find dest for source 1 for extent at index 91, mrl: flush_mismatch, : mend
34516 Sep 22 23:22:14.120 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34517 Sep 22 23:22:14.120 INFO Extent 92 has flush number mismatch, : mend
34518 Sep 22 23:22:14.120 INFO First source client ID for extent 92, mrl: flush_mismatch, : mend
34519 Sep 22 23:22:14.120 INFO extent:92 gens: 0 1 1, mrl: flush_mismatch, : mend
34520 Sep 22 23:22:14.120 INFO extent:92 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34521 Sep 22 23:22:14.120 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34522 Sep 22 23:22:14.120 INFO extent:92 dirty: false false false, mrl: flush_mismatch, : mend
34523 Sep 22 23:22:14.120 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34524 Sep 22 23:22:14.120 INFO find dest for source 1 for extent at index 92, mrl: flush_mismatch, : mend
34525 Sep 22 23:22:14.120 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34526 Sep 22 23:22:14.120 INFO Extent 93 has flush number mismatch, : mend
34527 Sep 22 23:22:14.120 INFO First source client ID for extent 93, mrl: flush_mismatch, : mend
34528 Sep 22 23:22:14.120 INFO extent:93 gens: 0 1 1, mrl: flush_mismatch, : mend
34529 Sep 22 23:22:14.120 INFO extent:93 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34530 Sep 22 23:22:14.120 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34531 Sep 22 23:22:14.120 INFO extent:93 dirty: false false false, mrl: flush_mismatch, : mend
34532 Sep 22 23:22:14.120 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34533 Sep 22 23:22:14.120 INFO find dest for source 1 for extent at index 93, mrl: flush_mismatch, : mend
34534 Sep 22 23:22:14.120 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34535 Sep 22 23:22:14.120 INFO Extent 94 has flush number mismatch, : mend
34536 Sep 22 23:22:14.120 INFO First source client ID for extent 94, mrl: flush_mismatch, : mend
34537 Sep 22 23:22:14.120 INFO extent:94 gens: 0 1 1, mrl: flush_mismatch, : mend
34538 Sep 22 23:22:14.120 INFO extent:94 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34539 Sep 22 23:22:14.120 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34540 Sep 22 23:22:14.121 INFO extent:94 dirty: false false false, mrl: flush_mismatch, : mend
34541 Sep 22 23:22:14.121 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34542 Sep 22 23:22:14.121 INFO find dest for source 1 for extent at index 94, mrl: flush_mismatch, : mend
34543 Sep 22 23:22:14.121 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34544 Sep 22 23:22:14.121 INFO Extent 95 has flush number mismatch, : mend
34545 Sep 22 23:22:14.121 INFO First source client ID for extent 95, mrl: flush_mismatch, : mend
34546 Sep 22 23:22:14.121 INFO extent:95 gens: 0 1 1, mrl: flush_mismatch, : mend
34547 Sep 22 23:22:14.121 INFO extent:95 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34548 Sep 22 23:22:14.121 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34549 Sep 22 23:22:14.121 INFO extent:95 dirty: false false false, mrl: flush_mismatch, : mend
34550 Sep 22 23:22:14.121 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34551 Sep 22 23:22:14.121 INFO find dest for source 1 for extent at index 95, mrl: flush_mismatch, : mend
34552 Sep 22 23:22:14.121 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34553 Sep 22 23:22:14.121 INFO Extent 96 has flush number mismatch, : mend
34554 Sep 22 23:22:14.121 INFO First source client ID for extent 96, mrl: flush_mismatch, : mend
34555 Sep 22 23:22:14.121 INFO extent:96 gens: 0 1 1, mrl: flush_mismatch, : mend
34556 Sep 22 23:22:14.121 INFO extent:96 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34557 Sep 22 23:22:14.121 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34558 Sep 22 23:22:14.121 INFO extent:96 dirty: false false false, mrl: flush_mismatch, : mend
34559 Sep 22 23:22:14.121 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34560 Sep 22 23:22:14.121 INFO find dest for source 1 for extent at index 96, mrl: flush_mismatch, : mend
34561 Sep 22 23:22:14.121 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34562 Sep 22 23:22:14.121 INFO Extent 97 has flush number mismatch, : mend
34563 Sep 22 23:22:14.121 INFO First source client ID for extent 97, mrl: flush_mismatch, : mend
34564 Sep 22 23:22:14.121 INFO extent:97 gens: 0 1 1, mrl: flush_mismatch, : mend
34565 Sep 22 23:22:14.121 INFO extent:97 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34566 Sep 22 23:22:14.121 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34567 Sep 22 23:22:14.121 INFO extent:97 dirty: false false false, mrl: flush_mismatch, : mend
34568 Sep 22 23:22:14.121 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34569 Sep 22 23:22:14.121 INFO find dest for source 1 for extent at index 97, mrl: flush_mismatch, : mend
34570 Sep 22 23:22:14.121 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34571 Sep 22 23:22:14.121 INFO Extent 98 has flush number mismatch, : mend
34572 Sep 22 23:22:14.121 INFO First source client ID for extent 98, mrl: flush_mismatch, : mend
34573 Sep 22 23:22:14.121 INFO extent:98 gens: 0 1 1, mrl: flush_mismatch, : mend
34574 Sep 22 23:22:14.121 INFO extent:98 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34575 Sep 22 23:22:14.121 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34576 Sep 22 23:22:14.121 INFO extent:98 dirty: false false false, mrl: flush_mismatch, : mend
34577 Sep 22 23:22:14.121 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34578 Sep 22 23:22:14.121 INFO find dest for source 1 for extent at index 98, mrl: flush_mismatch, : mend
34579 Sep 22 23:22:14.121 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34580 Sep 22 23:22:14.121 INFO Extent 99 has flush number mismatch, : mend
34581 Sep 22 23:22:14.121 INFO First source client ID for extent 99, mrl: flush_mismatch, : mend
34582 Sep 22 23:22:14.121 INFO extent:99 gens: 0 1 1, mrl: flush_mismatch, : mend
34583 Sep 22 23:22:14.121 INFO extent:99 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34584 Sep 22 23:22:14.121 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34585 Sep 22 23:22:14.121 INFO extent:99 dirty: false false false, mrl: flush_mismatch, : mend
34586 Sep 22 23:22:14.121 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34587 Sep 22 23:22:14.121 INFO find dest for source 1 for extent at index 99, mrl: flush_mismatch, : mend
34588 Sep 22 23:22:14.121 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34589 Sep 22 23:22:14.121 INFO Extent 100 has flush number mismatch, : mend
34590 Sep 22 23:22:14.121 INFO First source client ID for extent 100, mrl: flush_mismatch, : mend
34591 Sep 22 23:22:14.121 INFO extent:100 gens: 0 1 1, mrl: flush_mismatch, : mend
34592 Sep 22 23:22:14.121 INFO extent:100 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34593 Sep 22 23:22:14.121 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34594 Sep 22 23:22:14.121 INFO extent:100 dirty: false false false, mrl: flush_mismatch, : mend
34595 Sep 22 23:22:14.121 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34596 Sep 22 23:22:14.121 INFO find dest for source 1 for extent at index 100, mrl: flush_mismatch, : mend
34597 Sep 22 23:22:14.121 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34598 Sep 22 23:22:14.121 INFO Extent 101 has flush number mismatch, : mend
34599 Sep 22 23:22:14.121 INFO First source client ID for extent 101, mrl: flush_mismatch, : mend
34600 Sep 22 23:22:14.121 INFO extent:101 gens: 0 1 1, mrl: flush_mismatch, : mend
34601 Sep 22 23:22:14.121 INFO extent:101 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34602 Sep 22 23:22:14.121 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34603 Sep 22 23:22:14.121 INFO extent:101 dirty: false false false, mrl: flush_mismatch, : mend
34604 Sep 22 23:22:14.121 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34605 Sep 22 23:22:14.121 INFO find dest for source 1 for extent at index 101, mrl: flush_mismatch, : mend
34606 Sep 22 23:22:14.121 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34607 Sep 22 23:22:14.121 INFO Extent 102 has flush number mismatch, : mend
34608 Sep 22 23:22:14.121 INFO First source client ID for extent 102, mrl: flush_mismatch, : mend
34609 Sep 22 23:22:14.121 INFO extent:102 gens: 0 1 1, mrl: flush_mismatch, : mend
34610 Sep 22 23:22:14.121 INFO extent:102 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34611 Sep 22 23:22:14.121 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34612 Sep 22 23:22:14.121 INFO extent:102 dirty: false false false, mrl: flush_mismatch, : mend
34613 Sep 22 23:22:14.121 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34614 Sep 22 23:22:14.121 INFO find dest for source 1 for extent at index 102, mrl: flush_mismatch, : mend
34615 Sep 22 23:22:14.121 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34616 Sep 22 23:22:14.121 INFO Extent 103 has flush number mismatch, : mend
34617 Sep 22 23:22:14.121 INFO First source client ID for extent 103, mrl: flush_mismatch, : mend
34618 Sep 22 23:22:14.121 INFO extent:103 gens: 0 1 1, mrl: flush_mismatch, : mend
34619 Sep 22 23:22:14.121 INFO extent:103 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34620 Sep 22 23:22:14.121 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34621 Sep 22 23:22:14.121 INFO extent:103 dirty: false false false, mrl: flush_mismatch, : mend
34622 Sep 22 23:22:14.121 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34623 Sep 22 23:22:14.121 INFO find dest for source 1 for extent at index 103, mrl: flush_mismatch, : mend
34624 Sep 22 23:22:14.121 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34625 Sep 22 23:22:14.121 INFO Extent 104 has flush number mismatch, : mend
34626 Sep 22 23:22:14.121 INFO First source client ID for extent 104, mrl: flush_mismatch, : mend
34627 Sep 22 23:22:14.121 INFO extent:104 gens: 0 1 1, mrl: flush_mismatch, : mend
34628 Sep 22 23:22:14.121 INFO extent:104 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34629 Sep 22 23:22:14.121 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34630 Sep 22 23:22:14.121 INFO extent:104 dirty: false false false, mrl: flush_mismatch, : mend
34631 Sep 22 23:22:14.122 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34632 Sep 22 23:22:14.122 INFO find dest for source 1 for extent at index 104, mrl: flush_mismatch, : mend
34633 Sep 22 23:22:14.122 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34634 Sep 22 23:22:14.122 INFO Extent 105 has flush number mismatch, : mend
34635 Sep 22 23:22:14.122 INFO First source client ID for extent 105, mrl: flush_mismatch, : mend
34636 Sep 22 23:22:14.122 INFO extent:105 gens: 0 1 1, mrl: flush_mismatch, : mend
34637 Sep 22 23:22:14.122 INFO extent:105 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34638 Sep 22 23:22:14.122 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34639 Sep 22 23:22:14.122 INFO extent:105 dirty: false false false, mrl: flush_mismatch, : mend
34640 Sep 22 23:22:14.122 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34641 Sep 22 23:22:14.122 INFO find dest for source 1 for extent at index 105, mrl: flush_mismatch, : mend
34642 Sep 22 23:22:14.122 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34643 Sep 22 23:22:14.122 INFO Extent 106 has flush number mismatch, : mend
34644 Sep 22 23:22:14.122 INFO First source client ID for extent 106, mrl: flush_mismatch, : mend
34645 Sep 22 23:22:14.122 INFO extent:106 gens: 0 1 1, mrl: flush_mismatch, : mend
34646 Sep 22 23:22:14.122 INFO extent:106 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34647 Sep 22 23:22:14.122 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34648 Sep 22 23:22:14.122 INFO extent:106 dirty: false false false, mrl: flush_mismatch, : mend
34649 Sep 22 23:22:14.122 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34650 Sep 22 23:22:14.122 INFO find dest for source 1 for extent at index 106, mrl: flush_mismatch, : mend
34651 Sep 22 23:22:14.122 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34652 Sep 22 23:22:14.122 INFO Extent 107 has flush number mismatch, : mend
34653 Sep 22 23:22:14.122 INFO First source client ID for extent 107, mrl: flush_mismatch, : mend
34654 Sep 22 23:22:14.122 INFO extent:107 gens: 0 1 1, mrl: flush_mismatch, : mend
34655 Sep 22 23:22:14.122 INFO extent:107 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34656 Sep 22 23:22:14.122 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34657 Sep 22 23:22:14.122 INFO extent:107 dirty: false false false, mrl: flush_mismatch, : mend
34658 Sep 22 23:22:14.122 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34659 Sep 22 23:22:14.122 INFO find dest for source 1 for extent at index 107, mrl: flush_mismatch, : mend
34660 Sep 22 23:22:14.122 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34661 Sep 22 23:22:14.122 INFO Extent 108 has flush number mismatch, : mend
34662 Sep 22 23:22:14.122 INFO First source client ID for extent 108, mrl: flush_mismatch, : mend
34663 Sep 22 23:22:14.122 INFO extent:108 gens: 0 1 1, mrl: flush_mismatch, : mend
34664 Sep 22 23:22:14.122 INFO extent:108 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34665 Sep 22 23:22:14.122 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34666 Sep 22 23:22:14.122 INFO extent:108 dirty: false false false, mrl: flush_mismatch, : mend
34667 Sep 22 23:22:14.122 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34668 Sep 22 23:22:14.122 INFO find dest for source 1 for extent at index 108, mrl: flush_mismatch, : mend
34669 Sep 22 23:22:14.122 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34670 Sep 22 23:22:14.122 INFO Extent 109 has flush number mismatch, : mend
34671 Sep 22 23:22:14.122 INFO First source client ID for extent 109, mrl: flush_mismatch, : mend
34672 Sep 22 23:22:14.122 INFO extent:109 gens: 0 1 1, mrl: flush_mismatch, : mend
34673 Sep 22 23:22:14.122 INFO extent:109 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34674 Sep 22 23:22:14.122 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34675 Sep 22 23:22:14.122 INFO extent:109 dirty: false false false, mrl: flush_mismatch, : mend
34676 Sep 22 23:22:14.122 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34677 Sep 22 23:22:14.122 INFO find dest for source 1 for extent at index 109, mrl: flush_mismatch, : mend
34678 Sep 22 23:22:14.122 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34679 Sep 22 23:22:14.122 INFO Extent 110 has flush number mismatch, : mend
34680 Sep 22 23:22:14.122 INFO First source client ID for extent 110, mrl: flush_mismatch, : mend
34681 Sep 22 23:22:14.122 INFO extent:110 gens: 0 1 1, mrl: flush_mismatch, : mend
34682 Sep 22 23:22:14.122 INFO extent:110 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34683 Sep 22 23:22:14.122 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34684 Sep 22 23:22:14.122 INFO extent:110 dirty: false false false, mrl: flush_mismatch, : mend
34685 Sep 22 23:22:14.122 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34686 Sep 22 23:22:14.122 INFO find dest for source 1 for extent at index 110, mrl: flush_mismatch, : mend
34687 Sep 22 23:22:14.122 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34688 Sep 22 23:22:14.122 INFO Extent 111 has flush number mismatch, : mend
34689 Sep 22 23:22:14.122 INFO First source client ID for extent 111, mrl: flush_mismatch, : mend
34690 Sep 22 23:22:14.122 INFO extent:111 gens: 0 1 1, mrl: flush_mismatch, : mend
34691 Sep 22 23:22:14.122 INFO extent:111 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34692 Sep 22 23:22:14.122 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34693 Sep 22 23:22:14.122 INFO extent:111 dirty: false false false, mrl: flush_mismatch, : mend
34694 Sep 22 23:22:14.122 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34695 Sep 22 23:22:14.122 INFO find dest for source 1 for extent at index 111, mrl: flush_mismatch, : mend
34696 Sep 22 23:22:14.122 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34697 Sep 22 23:22:14.122 INFO Extent 112 has flush number mismatch, : mend
34698 Sep 22 23:22:14.122 INFO First source client ID for extent 112, mrl: flush_mismatch, : mend
34699 Sep 22 23:22:14.122 INFO extent:112 gens: 0 1 1, mrl: flush_mismatch, : mend
34700 Sep 22 23:22:14.122 INFO extent:112 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34701 Sep 22 23:22:14.122 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34702 Sep 22 23:22:14.122 INFO extent:112 dirty: false false false, mrl: flush_mismatch, : mend
34703 Sep 22 23:22:14.122 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34704 Sep 22 23:22:14.122 INFO find dest for source 1 for extent at index 112, mrl: flush_mismatch, : mend
34705 Sep 22 23:22:14.122 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34706 Sep 22 23:22:14.122 INFO Extent 113 has flush number mismatch, : mend
34707 Sep 22 23:22:14.122 INFO First source client ID for extent 113, mrl: flush_mismatch, : mend
34708 Sep 22 23:22:14.122 INFO extent:113 gens: 0 1 1, mrl: flush_mismatch, : mend
34709 Sep 22 23:22:14.122 INFO extent:113 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34710 Sep 22 23:22:14.122 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34711 Sep 22 23:22:14.122 INFO extent:113 dirty: false false false, mrl: flush_mismatch, : mend
34712 Sep 22 23:22:14.122 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34713 Sep 22 23:22:14.122 INFO find dest for source 1 for extent at index 113, mrl: flush_mismatch, : mend
34714 Sep 22 23:22:14.122 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34715 Sep 22 23:22:14.122 INFO Extent 114 has flush number mismatch, : mend
34716 Sep 22 23:22:14.122 INFO First source client ID for extent 114, mrl: flush_mismatch, : mend
34717 Sep 22 23:22:14.122 INFO extent:114 gens: 0 1 1, mrl: flush_mismatch, : mend
34718 Sep 22 23:22:14.123 INFO extent:114 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34719 Sep 22 23:22:14.123 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34720 Sep 22 23:22:14.123 INFO extent:114 dirty: false false false, mrl: flush_mismatch, : mend
34721 Sep 22 23:22:14.123 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34722 Sep 22 23:22:14.123 INFO find dest for source 1 for extent at index 114, mrl: flush_mismatch, : mend
34723 Sep 22 23:22:14.123 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34724 Sep 22 23:22:14.123 INFO Extent 115 has flush number mismatch, : mend
34725 Sep 22 23:22:14.123 INFO First source client ID for extent 115, mrl: flush_mismatch, : mend
34726 Sep 22 23:22:14.123 INFO extent:115 gens: 0 1 1, mrl: flush_mismatch, : mend
34727 Sep 22 23:22:14.123 INFO extent:115 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34728 Sep 22 23:22:14.123 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34729 Sep 22 23:22:14.123 INFO extent:115 dirty: false false false, mrl: flush_mismatch, : mend
34730 Sep 22 23:22:14.123 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34731 Sep 22 23:22:14.123 INFO find dest for source 1 for extent at index 115, mrl: flush_mismatch, : mend
34732 Sep 22 23:22:14.123 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34733 Sep 22 23:22:14.123 INFO Extent 116 has flush number mismatch, : mend
34734 Sep 22 23:22:14.123 INFO First source client ID for extent 116, mrl: flush_mismatch, : mend
34735 Sep 22 23:22:14.123 INFO extent:116 gens: 0 1 1, mrl: flush_mismatch, : mend
34736 Sep 22 23:22:14.123 INFO extent:116 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34737 Sep 22 23:22:14.123 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34738 Sep 22 23:22:14.123 INFO extent:116 dirty: false false false, mrl: flush_mismatch, : mend
34739 Sep 22 23:22:14.123 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34740 Sep 22 23:22:14.123 INFO find dest for source 1 for extent at index 116, mrl: flush_mismatch, : mend
34741 Sep 22 23:22:14.123 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34742 Sep 22 23:22:14.123 INFO Extent 117 has flush number mismatch, : mend
34743 Sep 22 23:22:14.123 INFO First source client ID for extent 117, mrl: flush_mismatch, : mend
34744 Sep 22 23:22:14.123 INFO extent:117 gens: 0 1 1, mrl: flush_mismatch, : mend
34745 Sep 22 23:22:14.123 INFO extent:117 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34746 Sep 22 23:22:14.123 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34747 Sep 22 23:22:14.123 INFO extent:117 dirty: false false false, mrl: flush_mismatch, : mend
34748 Sep 22 23:22:14.123 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34749 Sep 22 23:22:14.123 INFO find dest for source 1 for extent at index 117, mrl: flush_mismatch, : mend
34750 Sep 22 23:22:14.123 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34751 Sep 22 23:22:14.123 INFO Extent 118 has flush number mismatch, : mend
34752 Sep 22 23:22:14.123 INFO First source client ID for extent 118, mrl: flush_mismatch, : mend
34753 Sep 22 23:22:14.123 INFO extent:118 gens: 0 1 1, mrl: flush_mismatch, : mend
34754 Sep 22 23:22:14.123 INFO extent:118 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34755 Sep 22 23:22:14.123 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34756 Sep 22 23:22:14.123 INFO extent:118 dirty: false false false, mrl: flush_mismatch, : mend
34757 Sep 22 23:22:14.123 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34758 Sep 22 23:22:14.123 INFO find dest for source 1 for extent at index 118, mrl: flush_mismatch, : mend
34759 Sep 22 23:22:14.123 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34760 Sep 22 23:22:14.123 INFO Extent 119 has flush number mismatch, : mend
34761 Sep 22 23:22:14.123 INFO First source client ID for extent 119, mrl: flush_mismatch, : mend
34762 Sep 22 23:22:14.123 INFO extent:119 gens: 0 1 1, mrl: flush_mismatch, : mend
34763 Sep 22 23:22:14.123 INFO extent:119 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34764 Sep 22 23:22:14.123 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34765 Sep 22 23:22:14.123 INFO extent:119 dirty: false false false, mrl: flush_mismatch, : mend
34766 Sep 22 23:22:14.123 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34767 Sep 22 23:22:14.123 INFO find dest for source 1 for extent at index 119, mrl: flush_mismatch, : mend
34768 Sep 22 23:22:14.123 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34769 Sep 22 23:22:14.123 INFO Extent 120 has flush number mismatch, : mend
34770 Sep 22 23:22:14.123 INFO First source client ID for extent 120, mrl: flush_mismatch, : mend
34771 Sep 22 23:22:14.123 INFO extent:120 gens: 0 1 1, mrl: flush_mismatch, : mend
34772 Sep 22 23:22:14.123 INFO extent:120 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34773 Sep 22 23:22:14.123 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34774 Sep 22 23:22:14.123 INFO extent:120 dirty: false false false, mrl: flush_mismatch, : mend
34775 Sep 22 23:22:14.123 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34776 Sep 22 23:22:14.123 INFO find dest for source 1 for extent at index 120, mrl: flush_mismatch, : mend
34777 Sep 22 23:22:14.123 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34778 Sep 22 23:22:14.123 INFO Extent 121 has flush number mismatch, : mend
34779 Sep 22 23:22:14.123 INFO First source client ID for extent 121, mrl: flush_mismatch, : mend
34780 Sep 22 23:22:14.123 INFO extent:121 gens: 0 1 1, mrl: flush_mismatch, : mend
34781 Sep 22 23:22:14.123 INFO extent:121 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34782 Sep 22 23:22:14.123 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34783 Sep 22 23:22:14.123 INFO extent:121 dirty: false false false, mrl: flush_mismatch, : mend
34784 Sep 22 23:22:14.123 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34785 Sep 22 23:22:14.123 INFO find dest for source 1 for extent at index 121, mrl: flush_mismatch, : mend
34786 Sep 22 23:22:14.123 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34787 Sep 22 23:22:14.123 INFO Extent 122 has flush number mismatch, : mend
34788 Sep 22 23:22:14.123 INFO First source client ID for extent 122, mrl: flush_mismatch, : mend
34789 Sep 22 23:22:14.123 INFO extent:122 gens: 0 1 1, mrl: flush_mismatch, : mend
34790 Sep 22 23:22:14.123 INFO extent:122 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34791 Sep 22 23:22:14.123 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34792 Sep 22 23:22:14.123 INFO extent:122 dirty: false false false, mrl: flush_mismatch, : mend
34793 Sep 22 23:22:14.123 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34794 Sep 22 23:22:14.123 INFO find dest for source 1 for extent at index 122, mrl: flush_mismatch, : mend
34795 Sep 22 23:22:14.123 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34796 Sep 22 23:22:14.123 INFO Extent 123 has flush number mismatch, : mend
34797 Sep 22 23:22:14.123 INFO First source client ID for extent 123, mrl: flush_mismatch, : mend
34798 Sep 22 23:22:14.123 INFO extent:123 gens: 0 1 1, mrl: flush_mismatch, : mend
34799 Sep 22 23:22:14.123 INFO extent:123 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34800 Sep 22 23:22:14.123 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34801 Sep 22 23:22:14.123 INFO extent:123 dirty: false false false, mrl: flush_mismatch, : mend
34802 Sep 22 23:22:14.123 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34803 Sep 22 23:22:14.123 INFO find dest for source 1 for extent at index 123, mrl: flush_mismatch, : mend
34804 Sep 22 23:22:14.123 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34805 Sep 22 23:22:14.123 INFO Extent 124 has flush number mismatch, : mend
34806 Sep 22 23:22:14.123 INFO First source client ID for extent 124, mrl: flush_mismatch, : mend
34807 Sep 22 23:22:14.123 INFO extent:124 gens: 0 1 1, mrl: flush_mismatch, : mend
34808 Sep 22 23:22:14.123 INFO extent:124 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34809 Sep 22 23:22:14.124 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34810 Sep 22 23:22:14.124 INFO extent:124 dirty: false false false, mrl: flush_mismatch, : mend
34811 Sep 22 23:22:14.124 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34812 Sep 22 23:22:14.124 INFO find dest for source 1 for extent at index 124, mrl: flush_mismatch, : mend
34813 Sep 22 23:22:14.124 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34814 Sep 22 23:22:14.124 INFO Extent 125 has flush number mismatch, : mend
34815 Sep 22 23:22:14.124 INFO First source client ID for extent 125, mrl: flush_mismatch, : mend
34816 Sep 22 23:22:14.124 INFO extent:125 gens: 0 1 1, mrl: flush_mismatch, : mend
34817 Sep 22 23:22:14.124 INFO extent:125 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34818 Sep 22 23:22:14.124 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34819 Sep 22 23:22:14.124 INFO extent:125 dirty: false false false, mrl: flush_mismatch, : mend
34820 Sep 22 23:22:14.124 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34821 Sep 22 23:22:14.124 INFO find dest for source 1 for extent at index 125, mrl: flush_mismatch, : mend
34822 Sep 22 23:22:14.124 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34823 Sep 22 23:22:14.124 INFO Extent 126 has flush number mismatch, : mend
34824 Sep 22 23:22:14.124 INFO First source client ID for extent 126, mrl: flush_mismatch, : mend
34825 Sep 22 23:22:14.124 INFO extent:126 gens: 0 1 1, mrl: flush_mismatch, : mend
34826 Sep 22 23:22:14.124 INFO extent:126 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34827 Sep 22 23:22:14.124 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34828 Sep 22 23:22:14.124 INFO extent:126 dirty: false false false, mrl: flush_mismatch, : mend
34829 Sep 22 23:22:14.124 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34830 Sep 22 23:22:14.124 INFO find dest for source 1 for extent at index 126, mrl: flush_mismatch, : mend
34831 Sep 22 23:22:14.124 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34832 Sep 22 23:22:14.124 INFO Extent 127 has flush number mismatch, : mend
34833 Sep 22 23:22:14.124 INFO First source client ID for extent 127, mrl: flush_mismatch, : mend
34834 Sep 22 23:22:14.124 INFO extent:127 gens: 0 1 1, mrl: flush_mismatch, : mend
34835 Sep 22 23:22:14.124 INFO extent:127 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34836 Sep 22 23:22:14.124 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34837 Sep 22 23:22:14.124 INFO extent:127 dirty: false false false, mrl: flush_mismatch, : mend
34838 Sep 22 23:22:14.124 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34839 Sep 22 23:22:14.124 INFO find dest for source 1 for extent at index 127, mrl: flush_mismatch, : mend
34840 Sep 22 23:22:14.124 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34841 Sep 22 23:22:14.124 INFO Extent 128 has flush number mismatch, : mend
34842 Sep 22 23:22:14.124 INFO First source client ID for extent 128, mrl: flush_mismatch, : mend
34843 Sep 22 23:22:14.124 INFO extent:128 gens: 0 1 1, mrl: flush_mismatch, : mend
34844 Sep 22 23:22:14.124 INFO extent:128 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34845 Sep 22 23:22:14.124 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34846 Sep 22 23:22:14.124 INFO extent:128 dirty: false false false, mrl: flush_mismatch, : mend
34847 Sep 22 23:22:14.124 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34848 Sep 22 23:22:14.124 INFO find dest for source 1 for extent at index 128, mrl: flush_mismatch, : mend
34849 Sep 22 23:22:14.124 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34850 Sep 22 23:22:14.124 INFO Extent 129 has flush number mismatch, : mend
34851 Sep 22 23:22:14.124 INFO First source client ID for extent 129, mrl: flush_mismatch, : mend
34852 Sep 22 23:22:14.124 INFO extent:129 gens: 0 1 1, mrl: flush_mismatch, : mend
34853 Sep 22 23:22:14.124 INFO extent:129 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34854 Sep 22 23:22:14.124 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34855 Sep 22 23:22:14.124 INFO extent:129 dirty: false false false, mrl: flush_mismatch, : mend
34856 Sep 22 23:22:14.124 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34857 Sep 22 23:22:14.124 INFO find dest for source 1 for extent at index 129, mrl: flush_mismatch, : mend
34858 Sep 22 23:22:14.124 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34859 Sep 22 23:22:14.124 INFO Extent 130 has flush number mismatch, : mend
34860 Sep 22 23:22:14.124 INFO First source client ID for extent 130, mrl: flush_mismatch, : mend
34861 Sep 22 23:22:14.124 INFO extent:130 gens: 0 1 1, mrl: flush_mismatch, : mend
34862 Sep 22 23:22:14.124 INFO extent:130 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34863 Sep 22 23:22:14.124 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34864 Sep 22 23:22:14.124 INFO extent:130 dirty: false false false, mrl: flush_mismatch, : mend
34865 Sep 22 23:22:14.124 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34866 Sep 22 23:22:14.124 INFO find dest for source 1 for extent at index 130, mrl: flush_mismatch, : mend
34867 Sep 22 23:22:14.124 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34868 Sep 22 23:22:14.124 INFO Extent 131 has flush number mismatch, : mend
34869 Sep 22 23:22:14.124 INFO First source client ID for extent 131, mrl: flush_mismatch, : mend
34870 Sep 22 23:22:14.124 INFO extent:131 gens: 0 1 1, mrl: flush_mismatch, : mend
34871 Sep 22 23:22:14.124 INFO extent:131 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34872 Sep 22 23:22:14.124 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34873 Sep 22 23:22:14.124 INFO extent:131 dirty: false false false, mrl: flush_mismatch, : mend
34874 Sep 22 23:22:14.124 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34875 Sep 22 23:22:14.124 INFO find dest for source 1 for extent at index 131, mrl: flush_mismatch, : mend
34876 Sep 22 23:22:14.124 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34877 Sep 22 23:22:14.124 INFO Extent 132 has flush number mismatch, : mend
34878 Sep 22 23:22:14.124 INFO First source client ID for extent 132, mrl: flush_mismatch, : mend
34879 Sep 22 23:22:14.124 INFO extent:132 gens: 0 1 1, mrl: flush_mismatch, : mend
34880 Sep 22 23:22:14.124 INFO extent:132 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34881 Sep 22 23:22:14.124 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34882 Sep 22 23:22:14.124 INFO extent:132 dirty: false false false, mrl: flush_mismatch, : mend
34883 Sep 22 23:22:14.124 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34884 Sep 22 23:22:14.124 INFO find dest for source 1 for extent at index 132, mrl: flush_mismatch, : mend
34885 Sep 22 23:22:14.124 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34886 Sep 22 23:22:14.124 INFO Extent 133 has flush number mismatch, : mend
34887 Sep 22 23:22:14.124 INFO First source client ID for extent 133, mrl: flush_mismatch, : mend
34888 Sep 22 23:22:14.124 INFO extent:133 gens: 0 1 1, mrl: flush_mismatch, : mend
34889 Sep 22 23:22:14.124 INFO extent:133 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34890 Sep 22 23:22:14.124 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34891 Sep 22 23:22:14.124 INFO extent:133 dirty: false false false, mrl: flush_mismatch, : mend
34892 Sep 22 23:22:14.124 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34893 Sep 22 23:22:14.124 INFO find dest for source 1 for extent at index 133, mrl: flush_mismatch, : mend
34894 Sep 22 23:22:14.124 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34895 Sep 22 23:22:14.124 INFO Extent 134 has flush number mismatch, : mend
34896 Sep 22 23:22:14.124 INFO First source client ID for extent 134, mrl: flush_mismatch, : mend
34897 Sep 22 23:22:14.124 INFO extent:134 gens: 0 1 1, mrl: flush_mismatch, : mend
34898 Sep 22 23:22:14.124 INFO extent:134 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34899 Sep 22 23:22:14.125 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34900 Sep 22 23:22:14.125 INFO extent:134 dirty: false false false, mrl: flush_mismatch, : mend
34901 Sep 22 23:22:14.125 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34902 Sep 22 23:22:14.125 INFO find dest for source 1 for extent at index 134, mrl: flush_mismatch, : mend
34903 Sep 22 23:22:14.125 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34904 Sep 22 23:22:14.125 INFO Extent 135 has flush number mismatch, : mend
34905 Sep 22 23:22:14.125 INFO First source client ID for extent 135, mrl: flush_mismatch, : mend
34906 Sep 22 23:22:14.125 INFO extent:135 gens: 0 1 1, mrl: flush_mismatch, : mend
34907 Sep 22 23:22:14.125 INFO extent:135 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34908 Sep 22 23:22:14.125 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34909 Sep 22 23:22:14.125 INFO extent:135 dirty: false false false, mrl: flush_mismatch, : mend
34910 Sep 22 23:22:14.125 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34911 Sep 22 23:22:14.125 INFO find dest for source 1 for extent at index 135, mrl: flush_mismatch, : mend
34912 Sep 22 23:22:14.125 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34913 Sep 22 23:22:14.125 INFO Extent 136 has flush number mismatch, : mend
34914 Sep 22 23:22:14.125 INFO First source client ID for extent 136, mrl: flush_mismatch, : mend
34915 Sep 22 23:22:14.125 INFO extent:136 gens: 0 1 1, mrl: flush_mismatch, : mend
34916 Sep 22 23:22:14.125 INFO extent:136 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34917 Sep 22 23:22:14.125 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34918 Sep 22 23:22:14.125 INFO extent:136 dirty: false false false, mrl: flush_mismatch, : mend
34919 Sep 22 23:22:14.125 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34920 Sep 22 23:22:14.125 INFO find dest for source 1 for extent at index 136, mrl: flush_mismatch, : mend
34921 Sep 22 23:22:14.125 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34922 Sep 22 23:22:14.125 INFO Extent 137 has flush number mismatch, : mend
34923 Sep 22 23:22:14.125 INFO First source client ID for extent 137, mrl: flush_mismatch, : mend
34924 Sep 22 23:22:14.125 INFO extent:137 gens: 0 1 1, mrl: flush_mismatch, : mend
34925 Sep 22 23:22:14.125 INFO extent:137 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34926 Sep 22 23:22:14.125 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34927 Sep 22 23:22:14.125 INFO extent:137 dirty: false false false, mrl: flush_mismatch, : mend
34928 Sep 22 23:22:14.125 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34929 Sep 22 23:22:14.125 INFO find dest for source 1 for extent at index 137, mrl: flush_mismatch, : mend
34930 Sep 22 23:22:14.125 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34931 Sep 22 23:22:14.125 INFO Extent 138 has flush number mismatch, : mend
34932 Sep 22 23:22:14.125 INFO First source client ID for extent 138, mrl: flush_mismatch, : mend
34933 Sep 22 23:22:14.125 INFO extent:138 gens: 0 1 1, mrl: flush_mismatch, : mend
34934 Sep 22 23:22:14.125 INFO extent:138 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34935 Sep 22 23:22:14.125 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34936 Sep 22 23:22:14.125 INFO extent:138 dirty: false false false, mrl: flush_mismatch, : mend
34937 Sep 22 23:22:14.125 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34938 Sep 22 23:22:14.125 INFO find dest for source 1 for extent at index 138, mrl: flush_mismatch, : mend
34939 Sep 22 23:22:14.125 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34940 Sep 22 23:22:14.125 INFO Extent 139 has flush number mismatch, : mend
34941 Sep 22 23:22:14.125 INFO First source client ID for extent 139, mrl: flush_mismatch, : mend
34942 Sep 22 23:22:14.125 INFO extent:139 gens: 0 1 1, mrl: flush_mismatch, : mend
34943 Sep 22 23:22:14.125 INFO extent:139 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34944 Sep 22 23:22:14.125 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34945 Sep 22 23:22:14.125 INFO extent:139 dirty: false false false, mrl: flush_mismatch, : mend
34946 Sep 22 23:22:14.125 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34947 Sep 22 23:22:14.125 INFO find dest for source 1 for extent at index 139, mrl: flush_mismatch, : mend
34948 Sep 22 23:22:14.125 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34949 Sep 22 23:22:14.125 INFO Extent 140 has flush number mismatch, : mend
34950 Sep 22 23:22:14.125 INFO First source client ID for extent 140, mrl: flush_mismatch, : mend
34951 Sep 22 23:22:14.125 INFO extent:140 gens: 0 1 1, mrl: flush_mismatch, : mend
34952 Sep 22 23:22:14.125 INFO extent:140 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34953 Sep 22 23:22:14.125 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34954 Sep 22 23:22:14.125 INFO extent:140 dirty: false false false, mrl: flush_mismatch, : mend
34955 Sep 22 23:22:14.125 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34956 Sep 22 23:22:14.125 INFO find dest for source 1 for extent at index 140, mrl: flush_mismatch, : mend
34957 Sep 22 23:22:14.125 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34958 Sep 22 23:22:14.125 INFO Extent 141 has flush number mismatch, : mend
34959 Sep 22 23:22:14.125 INFO First source client ID for extent 141, mrl: flush_mismatch, : mend
34960 Sep 22 23:22:14.125 INFO extent:141 gens: 0 1 1, mrl: flush_mismatch, : mend
34961 Sep 22 23:22:14.125 INFO extent:141 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34962 Sep 22 23:22:14.125 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34963 Sep 22 23:22:14.125 INFO extent:141 dirty: false false false, mrl: flush_mismatch, : mend
34964 Sep 22 23:22:14.125 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34965 Sep 22 23:22:14.125 INFO find dest for source 1 for extent at index 141, mrl: flush_mismatch, : mend
34966 Sep 22 23:22:14.125 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34967 Sep 22 23:22:14.125 INFO Extent 142 has flush number mismatch, : mend
34968 Sep 22 23:22:14.125 INFO First source client ID for extent 142, mrl: flush_mismatch, : mend
34969 Sep 22 23:22:14.125 INFO extent:142 gens: 0 1 1, mrl: flush_mismatch, : mend
34970 Sep 22 23:22:14.125 INFO extent:142 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34971 Sep 22 23:22:14.125 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34972 Sep 22 23:22:14.125 INFO extent:142 dirty: false false false, mrl: flush_mismatch, : mend
34973 Sep 22 23:22:14.125 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34974 Sep 22 23:22:14.125 INFO find dest for source 1 for extent at index 142, mrl: flush_mismatch, : mend
34975 Sep 22 23:22:14.125 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34976 Sep 22 23:22:14.125 INFO Extent 143 has flush number mismatch, : mend
34977 Sep 22 23:22:14.125 INFO First source client ID for extent 143, mrl: flush_mismatch, : mend
34978 Sep 22 23:22:14.125 INFO extent:143 gens: 0 1 1, mrl: flush_mismatch, : mend
34979 Sep 22 23:22:14.125 INFO extent:143 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34980 Sep 22 23:22:14.125 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34981 Sep 22 23:22:14.125 INFO extent:143 dirty: false false false, mrl: flush_mismatch, : mend
34982 Sep 22 23:22:14.125 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34983 Sep 22 23:22:14.125 INFO find dest for source 1 for extent at index 143, mrl: flush_mismatch, : mend
34984 Sep 22 23:22:14.125 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34985 Sep 22 23:22:14.125 INFO Extent 144 has flush number mismatch, : mend
34986 Sep 22 23:22:14.125 INFO First source client ID for extent 144, mrl: flush_mismatch, : mend
34987 Sep 22 23:22:14.125 INFO extent:144 gens: 0 1 1, mrl: flush_mismatch, : mend
34988 Sep 22 23:22:14.126 INFO extent:144 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34989 Sep 22 23:22:14.126 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34990 Sep 22 23:22:14.126 INFO extent:144 dirty: false false false, mrl: flush_mismatch, : mend
34991 Sep 22 23:22:14.126 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34992 Sep 22 23:22:14.126 INFO find dest for source 1 for extent at index 144, mrl: flush_mismatch, : mend
34993 Sep 22 23:22:14.126 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34994 Sep 22 23:22:14.126 INFO Extent 145 has flush number mismatch, : mend
34995 Sep 22 23:22:14.126 INFO First source client ID for extent 145, mrl: flush_mismatch, : mend
34996 Sep 22 23:22:14.126 INFO extent:145 gens: 0 1 1, mrl: flush_mismatch, : mend
34997 Sep 22 23:22:14.126 INFO extent:145 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34998 Sep 22 23:22:14.126 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34999 Sep 22 23:22:14.126 INFO extent:145 dirty: false false false, mrl: flush_mismatch, : mend
35000 Sep 22 23:22:14.126 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35001 Sep 22 23:22:14.126 INFO find dest for source 1 for extent at index 145, mrl: flush_mismatch, : mend
35002 Sep 22 23:22:14.126 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35003 Sep 22 23:22:14.126 INFO Extent 146 has flush number mismatch, : mend
35004 Sep 22 23:22:14.126 INFO First source client ID for extent 146, mrl: flush_mismatch, : mend
35005 Sep 22 23:22:14.126 INFO extent:146 gens: 0 1 1, mrl: flush_mismatch, : mend
35006 Sep 22 23:22:14.126 INFO extent:146 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35007 Sep 22 23:22:14.126 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35008 Sep 22 23:22:14.126 INFO extent:146 dirty: false false false, mrl: flush_mismatch, : mend
35009 Sep 22 23:22:14.126 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35010 Sep 22 23:22:14.126 INFO find dest for source 1 for extent at index 146, mrl: flush_mismatch, : mend
35011 Sep 22 23:22:14.126 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35012 Sep 22 23:22:14.126 INFO Extent 147 has flush number mismatch, : mend
35013 Sep 22 23:22:14.126 INFO First source client ID for extent 147, mrl: flush_mismatch, : mend
35014 Sep 22 23:22:14.126 INFO extent:147 gens: 0 1 1, mrl: flush_mismatch, : mend
35015 Sep 22 23:22:14.126 INFO extent:147 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35016 Sep 22 23:22:14.126 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35017 Sep 22 23:22:14.126 INFO extent:147 dirty: false false false, mrl: flush_mismatch, : mend
35018 Sep 22 23:22:14.126 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35019 Sep 22 23:22:14.126 INFO find dest for source 1 for extent at index 147, mrl: flush_mismatch, : mend
35020 Sep 22 23:22:14.126 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35021 Sep 22 23:22:14.126 INFO Extent 148 has flush number mismatch, : mend
35022 Sep 22 23:22:14.126 INFO First source client ID for extent 148, mrl: flush_mismatch, : mend
35023 Sep 22 23:22:14.126 INFO extent:148 gens: 0 1 1, mrl: flush_mismatch, : mend
35024 Sep 22 23:22:14.126 INFO extent:148 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35025 Sep 22 23:22:14.126 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35026 Sep 22 23:22:14.126 INFO extent:148 dirty: false false false, mrl: flush_mismatch, : mend
35027 Sep 22 23:22:14.126 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35028 Sep 22 23:22:14.126 INFO find dest for source 1 for extent at index 148, mrl: flush_mismatch, : mend
35029 Sep 22 23:22:14.126 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35030 Sep 22 23:22:14.126 INFO Extent 149 has flush number mismatch, : mend
35031 Sep 22 23:22:14.126 INFO First source client ID for extent 149, mrl: flush_mismatch, : mend
35032 Sep 22 23:22:14.126 INFO extent:149 gens: 0 1 1, mrl: flush_mismatch, : mend
35033 Sep 22 23:22:14.126 INFO extent:149 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35034 Sep 22 23:22:14.126 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35035 Sep 22 23:22:14.126 INFO extent:149 dirty: false false false, mrl: flush_mismatch, : mend
35036 Sep 22 23:22:14.126 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35037 Sep 22 23:22:14.126 INFO find dest for source 1 for extent at index 149, mrl: flush_mismatch, : mend
35038 Sep 22 23:22:14.126 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35039 Sep 22 23:22:14.126 INFO Extent 150 has flush number mismatch, : mend
35040 Sep 22 23:22:14.126 INFO First source client ID for extent 150, mrl: flush_mismatch, : mend
35041 Sep 22 23:22:14.126 INFO extent:150 gens: 0 1 1, mrl: flush_mismatch, : mend
35042 Sep 22 23:22:14.126 INFO extent:150 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35043 Sep 22 23:22:14.126 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35044 Sep 22 23:22:14.126 INFO extent:150 dirty: false false false, mrl: flush_mismatch, : mend
35045 Sep 22 23:22:14.126 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35046 Sep 22 23:22:14.126 INFO find dest for source 1 for extent at index 150, mrl: flush_mismatch, : mend
35047 Sep 22 23:22:14.126 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35048 Sep 22 23:22:14.126 INFO Extent 151 has flush number mismatch, : mend
35049 Sep 22 23:22:14.126 INFO First source client ID for extent 151, mrl: flush_mismatch, : mend
35050 Sep 22 23:22:14.126 INFO extent:151 gens: 0 1 1, mrl: flush_mismatch, : mend
35051 Sep 22 23:22:14.126 INFO extent:151 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35052 Sep 22 23:22:14.126 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35053 Sep 22 23:22:14.126 INFO extent:151 dirty: false false false, mrl: flush_mismatch, : mend
35054 Sep 22 23:22:14.126 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35055 Sep 22 23:22:14.126 INFO find dest for source 1 for extent at index 151, mrl: flush_mismatch, : mend
35056 Sep 22 23:22:14.126 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35057 Sep 22 23:22:14.126 INFO Extent 152 has flush number mismatch, : mend
35058 Sep 22 23:22:14.126 INFO First source client ID for extent 152, mrl: flush_mismatch, : mend
35059 Sep 22 23:22:14.126 INFO extent:152 gens: 0 1 1, mrl: flush_mismatch, : mend
35060 Sep 22 23:22:14.126 INFO extent:152 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35061 Sep 22 23:22:14.126 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35062 Sep 22 23:22:14.126 INFO extent:152 dirty: false false false, mrl: flush_mismatch, : mend
35063 Sep 22 23:22:14.126 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35064 Sep 22 23:22:14.126 INFO find dest for source 1 for extent at index 152, mrl: flush_mismatch, : mend
35065 Sep 22 23:22:14.126 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35066 Sep 22 23:22:14.126 INFO Extent 153 has flush number mismatch, : mend
35067 Sep 22 23:22:14.126 INFO First source client ID for extent 153, mrl: flush_mismatch, : mend
35068 Sep 22 23:22:14.126 INFO extent:153 gens: 0 1 1, mrl: flush_mismatch, : mend
35069 Sep 22 23:22:14.126 INFO extent:153 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35070 Sep 22 23:22:14.126 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35071 Sep 22 23:22:14.126 INFO extent:153 dirty: false false false, mrl: flush_mismatch, : mend
35072 Sep 22 23:22:14.126 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35073 Sep 22 23:22:14.126 INFO find dest for source 1 for extent at index 153, mrl: flush_mismatch, : mend
35074 Sep 22 23:22:14.126 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35075 Sep 22 23:22:14.126 INFO Extent 154 has flush number mismatch, : mend
35076 Sep 22 23:22:14.126 INFO First source client ID for extent 154, mrl: flush_mismatch, : mend
35077 Sep 22 23:22:14.126 INFO extent:154 gens: 0 1 1, mrl: flush_mismatch, : mend
35078 Sep 22 23:22:14.126 INFO extent:154 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35079 Sep 22 23:22:14.127 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35080 Sep 22 23:22:14.127 INFO extent:154 dirty: false false false, mrl: flush_mismatch, : mend
35081 Sep 22 23:22:14.127 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35082 Sep 22 23:22:14.127 INFO find dest for source 1 for extent at index 154, mrl: flush_mismatch, : mend
35083 Sep 22 23:22:14.127 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35084 Sep 22 23:22:14.127 INFO Extent 155 has flush number mismatch, : mend
35085 Sep 22 23:22:14.127 INFO First source client ID for extent 155, mrl: flush_mismatch, : mend
35086 Sep 22 23:22:14.127 INFO extent:155 gens: 0 1 1, mrl: flush_mismatch, : mend
35087 Sep 22 23:22:14.127 INFO extent:155 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35088 Sep 22 23:22:14.127 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35089 Sep 22 23:22:14.127 INFO extent:155 dirty: false false false, mrl: flush_mismatch, : mend
35090 Sep 22 23:22:14.127 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35091 Sep 22 23:22:14.127 INFO find dest for source 1 for extent at index 155, mrl: flush_mismatch, : mend
35092 Sep 22 23:22:14.127 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35093 Sep 22 23:22:14.127 INFO Extent 156 has flush number mismatch, : mend
35094 Sep 22 23:22:14.127 INFO First source client ID for extent 156, mrl: flush_mismatch, : mend
35095 Sep 22 23:22:14.127 INFO extent:156 gens: 0 1 1, mrl: flush_mismatch, : mend
35096 Sep 22 23:22:14.127 INFO extent:156 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35097 Sep 22 23:22:14.127 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35098 Sep 22 23:22:14.127 INFO extent:156 dirty: false false false, mrl: flush_mismatch, : mend
35099 Sep 22 23:22:14.127 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35100 Sep 22 23:22:14.127 INFO find dest for source 1 for extent at index 156, mrl: flush_mismatch, : mend
35101 Sep 22 23:22:14.127 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35102 Sep 22 23:22:14.127 INFO Extent 157 has flush number mismatch, : mend
35103 Sep 22 23:22:14.127 INFO First source client ID for extent 157, mrl: flush_mismatch, : mend
35104 Sep 22 23:22:14.127 INFO extent:157 gens: 0 1 1, mrl: flush_mismatch, : mend
35105 Sep 22 23:22:14.127 INFO extent:157 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35106 Sep 22 23:22:14.127 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35107 Sep 22 23:22:14.127 INFO extent:157 dirty: false false false, mrl: flush_mismatch, : mend
35108 Sep 22 23:22:14.127 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35109 Sep 22 23:22:14.127 INFO find dest for source 1 for extent at index 157, mrl: flush_mismatch, : mend
35110 Sep 22 23:22:14.127 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35111 Sep 22 23:22:14.127 INFO Extent 158 has flush number mismatch, : mend
35112 Sep 22 23:22:14.127 INFO First source client ID for extent 158, mrl: flush_mismatch, : mend
35113 Sep 22 23:22:14.127 INFO extent:158 gens: 0 1 1, mrl: flush_mismatch, : mend
35114 Sep 22 23:22:14.127 INFO extent:158 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35115 Sep 22 23:22:14.127 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35116 Sep 22 23:22:14.127 INFO extent:158 dirty: false false false, mrl: flush_mismatch, : mend
35117 Sep 22 23:22:14.127 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35118 Sep 22 23:22:14.127 INFO find dest for source 1 for extent at index 158, mrl: flush_mismatch, : mend
35119 Sep 22 23:22:14.127 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35120 Sep 22 23:22:14.127 INFO Extent 159 has flush number mismatch, : mend
35121 Sep 22 23:22:14.127 INFO First source client ID for extent 159, mrl: flush_mismatch, : mend
35122 Sep 22 23:22:14.127 INFO extent:159 gens: 0 1 1, mrl: flush_mismatch, : mend
35123 Sep 22 23:22:14.127 INFO extent:159 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35124 Sep 22 23:22:14.127 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35125 Sep 22 23:22:14.127 INFO extent:159 dirty: false false false, mrl: flush_mismatch, : mend
35126 Sep 22 23:22:14.127 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35127 Sep 22 23:22:14.127 INFO find dest for source 1 for extent at index 159, mrl: flush_mismatch, : mend
35128 Sep 22 23:22:14.127 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35129 Sep 22 23:22:14.127 INFO Extent 160 has flush number mismatch, : mend
35130 Sep 22 23:22:14.127 INFO First source client ID for extent 160, mrl: flush_mismatch, : mend
35131 Sep 22 23:22:14.127 INFO extent:160 gens: 0 1 1, mrl: flush_mismatch, : mend
35132 Sep 22 23:22:14.127 INFO extent:160 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35133 Sep 22 23:22:14.127 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35134 Sep 22 23:22:14.127 INFO extent:160 dirty: false false false, mrl: flush_mismatch, : mend
35135 Sep 22 23:22:14.127 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35136 Sep 22 23:22:14.127 INFO find dest for source 1 for extent at index 160, mrl: flush_mismatch, : mend
35137 Sep 22 23:22:14.127 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35138 Sep 22 23:22:14.127 INFO Extent 161 has flush number mismatch, : mend
35139 Sep 22 23:22:14.127 INFO First source client ID for extent 161, mrl: flush_mismatch, : mend
35140 Sep 22 23:22:14.127 INFO extent:161 gens: 0 1 1, mrl: flush_mismatch, : mend
35141 Sep 22 23:22:14.127 INFO extent:161 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35142 Sep 22 23:22:14.127 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35143 Sep 22 23:22:14.127 INFO extent:161 dirty: false false false, mrl: flush_mismatch, : mend
35144 Sep 22 23:22:14.127 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35145 Sep 22 23:22:14.127 INFO find dest for source 1 for extent at index 161, mrl: flush_mismatch, : mend
35146 Sep 22 23:22:14.127 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35147 Sep 22 23:22:14.127 INFO Extent 162 has flush number mismatch, : mend
35148 Sep 22 23:22:14.127 INFO First source client ID for extent 162, mrl: flush_mismatch, : mend
35149 Sep 22 23:22:14.127 INFO extent:162 gens: 0 1 1, mrl: flush_mismatch, : mend
35150 Sep 22 23:22:14.127 INFO extent:162 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35151 Sep 22 23:22:14.127 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35152 Sep 22 23:22:14.127 INFO extent:162 dirty: false false false, mrl: flush_mismatch, : mend
35153 Sep 22 23:22:14.127 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35154 Sep 22 23:22:14.127 INFO find dest for source 1 for extent at index 162, mrl: flush_mismatch, : mend
35155 Sep 22 23:22:14.127 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35156 Sep 22 23:22:14.127 INFO Extent 163 has flush number mismatch, : mend
35157 Sep 22 23:22:14.127 INFO First source client ID for extent 163, mrl: flush_mismatch, : mend
35158 Sep 22 23:22:14.127 INFO extent:163 gens: 0 1 1, mrl: flush_mismatch, : mend
35159 Sep 22 23:22:14.127 INFO extent:163 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35160 Sep 22 23:22:14.127 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35161 Sep 22 23:22:14.127 INFO extent:163 dirty: false false false, mrl: flush_mismatch, : mend
35162 Sep 22 23:22:14.127 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35163 Sep 22 23:22:14.127 INFO find dest for source 1 for extent at index 163, mrl: flush_mismatch, : mend
35164 Sep 22 23:22:14.127 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35165 Sep 22 23:22:14.127 INFO Extent 164 has flush number mismatch, : mend
35166 Sep 22 23:22:14.127 INFO First source client ID for extent 164, mrl: flush_mismatch, : mend
35167 Sep 22 23:22:14.127 INFO extent:164 gens: 0 1 1, mrl: flush_mismatch, : mend
35168 Sep 22 23:22:14.127 INFO extent:164 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35169 Sep 22 23:22:14.127 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35170 Sep 22 23:22:14.128 INFO extent:164 dirty: false false false, mrl: flush_mismatch, : mend
35171 Sep 22 23:22:14.128 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35172 Sep 22 23:22:14.128 INFO find dest for source 1 for extent at index 164, mrl: flush_mismatch, : mend
35173 Sep 22 23:22:14.128 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35174 Sep 22 23:22:14.128 INFO Extent 165 has flush number mismatch, : mend
35175 Sep 22 23:22:14.128 INFO First source client ID for extent 165, mrl: flush_mismatch, : mend
35176 Sep 22 23:22:14.128 INFO extent:165 gens: 0 1 1, mrl: flush_mismatch, : mend
35177 Sep 22 23:22:14.128 INFO extent:165 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35178 Sep 22 23:22:14.128 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35179 Sep 22 23:22:14.128 INFO extent:165 dirty: false false false, mrl: flush_mismatch, : mend
35180 Sep 22 23:22:14.128 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35181 Sep 22 23:22:14.128 INFO find dest for source 1 for extent at index 165, mrl: flush_mismatch, : mend
35182 Sep 22 23:22:14.128 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35183 Sep 22 23:22:14.128 INFO Extent 166 has flush number mismatch, : mend
35184 Sep 22 23:22:14.128 INFO First source client ID for extent 166, mrl: flush_mismatch, : mend
35185 Sep 22 23:22:14.128 INFO extent:166 gens: 0 1 1, mrl: flush_mismatch, : mend
35186 Sep 22 23:22:14.128 INFO extent:166 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35187 Sep 22 23:22:14.128 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35188 Sep 22 23:22:14.128 INFO extent:166 dirty: false false false, mrl: flush_mismatch, : mend
35189 Sep 22 23:22:14.128 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35190 Sep 22 23:22:14.128 INFO find dest for source 1 for extent at index 166, mrl: flush_mismatch, : mend
35191 Sep 22 23:22:14.128 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35192 Sep 22 23:22:14.128 INFO Extent 167 has flush number mismatch, : mend
35193 Sep 22 23:22:14.128 INFO First source client ID for extent 167, mrl: flush_mismatch, : mend
35194 Sep 22 23:22:14.128 INFO extent:167 gens: 0 1 1, mrl: flush_mismatch, : mend
35195 Sep 22 23:22:14.128 INFO extent:167 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35196 Sep 22 23:22:14.128 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35197 Sep 22 23:22:14.128 INFO extent:167 dirty: false false false, mrl: flush_mismatch, : mend
35198 Sep 22 23:22:14.128 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35199 Sep 22 23:22:14.128 INFO find dest for source 1 for extent at index 167, mrl: flush_mismatch, : mend
35200 Sep 22 23:22:14.128 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35201 Sep 22 23:22:14.128 INFO Extent 168 has flush number mismatch, : mend
35202 Sep 22 23:22:14.128 INFO First source client ID for extent 168, mrl: flush_mismatch, : mend
35203 Sep 22 23:22:14.128 INFO extent:168 gens: 0 1 1, mrl: flush_mismatch, : mend
35204 Sep 22 23:22:14.128 INFO extent:168 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35205 Sep 22 23:22:14.128 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35206 Sep 22 23:22:14.128 INFO extent:168 dirty: false false false, mrl: flush_mismatch, : mend
35207 Sep 22 23:22:14.128 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35208 Sep 22 23:22:14.128 INFO find dest for source 1 for extent at index 168, mrl: flush_mismatch, : mend
35209 Sep 22 23:22:14.128 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35210 Sep 22 23:22:14.128 INFO Extent 169 has flush number mismatch, : mend
35211 Sep 22 23:22:14.128 INFO First source client ID for extent 169, mrl: flush_mismatch, : mend
35212 Sep 22 23:22:14.128 INFO extent:169 gens: 0 1 1, mrl: flush_mismatch, : mend
35213 Sep 22 23:22:14.128 INFO extent:169 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35214 Sep 22 23:22:14.128 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35215 Sep 22 23:22:14.128 INFO extent:169 dirty: false false false, mrl: flush_mismatch, : mend
35216 Sep 22 23:22:14.128 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35217 Sep 22 23:22:14.128 INFO find dest for source 1 for extent at index 169, mrl: flush_mismatch, : mend
35218 Sep 22 23:22:14.128 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35219 Sep 22 23:22:14.128 INFO Extent 170 has flush number mismatch, : mend
35220 Sep 22 23:22:14.128 INFO First source client ID for extent 170, mrl: flush_mismatch, : mend
35221 Sep 22 23:22:14.128 INFO extent:170 gens: 0 1 1, mrl: flush_mismatch, : mend
35222 Sep 22 23:22:14.128 INFO extent:170 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35223 Sep 22 23:22:14.128 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35224 Sep 22 23:22:14.128 INFO extent:170 dirty: false false false, mrl: flush_mismatch, : mend
35225 Sep 22 23:22:14.128 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35226 Sep 22 23:22:14.128 INFO find dest for source 1 for extent at index 170, mrl: flush_mismatch, : mend
35227 Sep 22 23:22:14.128 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35228 Sep 22 23:22:14.128 INFO Extent 171 has flush number mismatch, : mend
35229 Sep 22 23:22:14.128 INFO First source client ID for extent 171, mrl: flush_mismatch, : mend
35230 Sep 22 23:22:14.128 INFO extent:171 gens: 0 1 1, mrl: flush_mismatch, : mend
35231 Sep 22 23:22:14.128 INFO extent:171 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35232 Sep 22 23:22:14.128 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35233 Sep 22 23:22:14.128 INFO extent:171 dirty: false false false, mrl: flush_mismatch, : mend
35234 Sep 22 23:22:14.128 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35235 Sep 22 23:22:14.128 INFO find dest for source 1 for extent at index 171, mrl: flush_mismatch, : mend
35236 Sep 22 23:22:14.128 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35237 Sep 22 23:22:14.128 INFO Extent 172 has flush number mismatch, : mend
35238 Sep 22 23:22:14.128 INFO First source client ID for extent 172, mrl: flush_mismatch, : mend
35239 Sep 22 23:22:14.128 INFO extent:172 gens: 0 1 1, mrl: flush_mismatch, : mend
35240 Sep 22 23:22:14.128 INFO extent:172 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35241 Sep 22 23:22:14.128 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35242 Sep 22 23:22:14.128 INFO extent:172 dirty: false false false, mrl: flush_mismatch, : mend
35243 Sep 22 23:22:14.128 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35244 Sep 22 23:22:14.128 INFO find dest for source 1 for extent at index 172, mrl: flush_mismatch, : mend
35245 Sep 22 23:22:14.128 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35246 Sep 22 23:22:14.128 INFO Extent 173 has flush number mismatch, : mend
35247 Sep 22 23:22:14.128 INFO First source client ID for extent 173, mrl: flush_mismatch, : mend
35248 Sep 22 23:22:14.128 INFO extent:173 gens: 0 1 1, mrl: flush_mismatch, : mend
35249 Sep 22 23:22:14.128 INFO extent:173 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35250 Sep 22 23:22:14.128 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35251 Sep 22 23:22:14.128 INFO extent:173 dirty: false false false, mrl: flush_mismatch, : mend
35252 Sep 22 23:22:14.128 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35253 Sep 22 23:22:14.128 INFO find dest for source 1 for extent at index 173, mrl: flush_mismatch, : mend
35254 Sep 22 23:22:14.128 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35255 Sep 22 23:22:14.128 INFO Extent 174 has flush number mismatch, : mend
35256 Sep 22 23:22:14.128 INFO First source client ID for extent 174, mrl: flush_mismatch, : mend
35257 Sep 22 23:22:14.128 INFO extent:174 gens: 0 1 1, mrl: flush_mismatch, : mend
35258 Sep 22 23:22:14.128 INFO extent:174 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35259 Sep 22 23:22:14.128 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35260 Sep 22 23:22:14.128 INFO extent:174 dirty: false false false, mrl: flush_mismatch, : mend
35261 Sep 22 23:22:14.129 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35262 Sep 22 23:22:14.129 INFO find dest for source 1 for extent at index 174, mrl: flush_mismatch, : mend
35263 Sep 22 23:22:14.129 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35264 Sep 22 23:22:14.129 INFO Extent 175 has flush number mismatch, : mend
35265 Sep 22 23:22:14.129 INFO First source client ID for extent 175, mrl: flush_mismatch, : mend
35266 Sep 22 23:22:14.129 INFO extent:175 gens: 0 1 1, mrl: flush_mismatch, : mend
35267 Sep 22 23:22:14.129 INFO extent:175 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35268 Sep 22 23:22:14.129 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35269 Sep 22 23:22:14.129 INFO extent:175 dirty: false false false, mrl: flush_mismatch, : mend
35270 Sep 22 23:22:14.129 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35271 Sep 22 23:22:14.129 INFO find dest for source 1 for extent at index 175, mrl: flush_mismatch, : mend
35272 Sep 22 23:22:14.129 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35273 Sep 22 23:22:14.129 INFO Extent 176 has flush number mismatch, : mend
35274 Sep 22 23:22:14.129 INFO First source client ID for extent 176, mrl: flush_mismatch, : mend
35275 Sep 22 23:22:14.129 INFO extent:176 gens: 0 1 1, mrl: flush_mismatch, : mend
35276 Sep 22 23:22:14.129 INFO extent:176 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35277 Sep 22 23:22:14.129 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35278 Sep 22 23:22:14.129 INFO extent:176 dirty: false false false, mrl: flush_mismatch, : mend
35279 Sep 22 23:22:14.129 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35280 Sep 22 23:22:14.129 INFO find dest for source 1 for extent at index 176, mrl: flush_mismatch, : mend
35281 Sep 22 23:22:14.129 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35282 Sep 22 23:22:14.129 INFO Extent 177 has flush number mismatch, : mend
35283 Sep 22 23:22:14.129 INFO First source client ID for extent 177, mrl: flush_mismatch, : mend
35284 Sep 22 23:22:14.129 INFO extent:177 gens: 0 1 1, mrl: flush_mismatch, : mend
35285 Sep 22 23:22:14.129 INFO extent:177 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35286 Sep 22 23:22:14.129 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35287 Sep 22 23:22:14.129 INFO extent:177 dirty: false false false, mrl: flush_mismatch, : mend
35288 Sep 22 23:22:14.129 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35289 Sep 22 23:22:14.129 INFO find dest for source 1 for extent at index 177, mrl: flush_mismatch, : mend
35290 Sep 22 23:22:14.129 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35291 Sep 22 23:22:14.129 INFO Extent 178 has flush number mismatch, : mend
35292 Sep 22 23:22:14.129 INFO First source client ID for extent 178, mrl: flush_mismatch, : mend
35293 Sep 22 23:22:14.129 INFO extent:178 gens: 0 1 1, mrl: flush_mismatch, : mend
35294 Sep 22 23:22:14.129 INFO extent:178 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35295 Sep 22 23:22:14.129 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35296 Sep 22 23:22:14.129 INFO extent:178 dirty: false false false, mrl: flush_mismatch, : mend
35297 Sep 22 23:22:14.129 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35298 Sep 22 23:22:14.129 INFO find dest for source 1 for extent at index 178, mrl: flush_mismatch, : mend
35299 Sep 22 23:22:14.129 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35300 Sep 22 23:22:14.129 INFO Extent 179 has flush number mismatch, : mend
35301 Sep 22 23:22:14.129 INFO First source client ID for extent 179, mrl: flush_mismatch, : mend
35302 Sep 22 23:22:14.129 INFO extent:179 gens: 0 1 1, mrl: flush_mismatch, : mend
35303 Sep 22 23:22:14.129 INFO extent:179 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35304 Sep 22 23:22:14.129 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35305 Sep 22 23:22:14.129 INFO extent:179 dirty: false false false, mrl: flush_mismatch, : mend
35306 Sep 22 23:22:14.129 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35307 Sep 22 23:22:14.129 INFO find dest for source 1 for extent at index 179, mrl: flush_mismatch, : mend
35308 Sep 22 23:22:14.129 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35309 Sep 22 23:22:14.129 INFO Extent 180 has flush number mismatch, : mend
35310 Sep 22 23:22:14.129 INFO First source client ID for extent 180, mrl: flush_mismatch, : mend
35311 Sep 22 23:22:14.129 INFO extent:180 gens: 0 1 1, mrl: flush_mismatch, : mend
35312 Sep 22 23:22:14.129 INFO extent:180 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35313 Sep 22 23:22:14.129 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35314 Sep 22 23:22:14.129 INFO extent:180 dirty: false false false, mrl: flush_mismatch, : mend
35315 Sep 22 23:22:14.129 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35316 Sep 22 23:22:14.129 INFO find dest for source 1 for extent at index 180, mrl: flush_mismatch, : mend
35317 Sep 22 23:22:14.129 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35318 Sep 22 23:22:14.129 INFO Extent 181 has flush number mismatch, : mend
35319 Sep 22 23:22:14.129 INFO First source client ID for extent 181, mrl: flush_mismatch, : mend
35320 Sep 22 23:22:14.129 INFO extent:181 gens: 0 1 1, mrl: flush_mismatch, : mend
35321 Sep 22 23:22:14.129 INFO extent:181 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35322 Sep 22 23:22:14.129 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35323 Sep 22 23:22:14.129 INFO extent:181 dirty: false false false, mrl: flush_mismatch, : mend
35324 Sep 22 23:22:14.129 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35325 Sep 22 23:22:14.129 INFO find dest for source 1 for extent at index 181, mrl: flush_mismatch, : mend
35326 Sep 22 23:22:14.129 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35327 Sep 22 23:22:14.129 INFO Extent 182 has flush number mismatch, : mend
35328 Sep 22 23:22:14.129 INFO First source client ID for extent 182, mrl: flush_mismatch, : mend
35329 Sep 22 23:22:14.129 INFO extent:182 gens: 0 1 1, mrl: flush_mismatch, : mend
35330 Sep 22 23:22:14.129 INFO extent:182 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35331 Sep 22 23:22:14.129 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35332 Sep 22 23:22:14.129 INFO extent:182 dirty: false false false, mrl: flush_mismatch, : mend
35333 Sep 22 23:22:14.129 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35334 Sep 22 23:22:14.129 INFO find dest for source 1 for extent at index 182, mrl: flush_mismatch, : mend
35335 Sep 22 23:22:14.129 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35336 Sep 22 23:22:14.129 INFO Extent 183 has flush number mismatch, : mend
35337 Sep 22 23:22:14.129 INFO First source client ID for extent 183, mrl: flush_mismatch, : mend
35338 Sep 22 23:22:14.129 INFO extent:183 gens: 0 1 1, mrl: flush_mismatch, : mend
35339 Sep 22 23:22:14.129 INFO extent:183 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35340 Sep 22 23:22:14.129 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35341 Sep 22 23:22:14.129 INFO extent:183 dirty: false false false, mrl: flush_mismatch, : mend
35342 Sep 22 23:22:14.129 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35343 Sep 22 23:22:14.129 INFO find dest for source 1 for extent at index 183, mrl: flush_mismatch, : mend
35344 Sep 22 23:22:14.129 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35345 Sep 22 23:22:14.129 INFO Extent 184 has flush number mismatch, : mend
35346 Sep 22 23:22:14.129 INFO First source client ID for extent 184, mrl: flush_mismatch, : mend
35347 Sep 22 23:22:14.129 INFO extent:184 gens: 0 1 1, mrl: flush_mismatch, : mend
35348 Sep 22 23:22:14.129 INFO extent:184 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35349 Sep 22 23:22:14.129 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35350 Sep 22 23:22:14.129 INFO extent:184 dirty: false false false, mrl: flush_mismatch, : mend
35351 Sep 22 23:22:14.129 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35352 Sep 22 23:22:14.130 INFO find dest for source 1 for extent at index 184, mrl: flush_mismatch, : mend
35353 Sep 22 23:22:14.130 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35354 Sep 22 23:22:14.130 INFO Extent 185 has flush number mismatch, : mend
35355 Sep 22 23:22:14.130 INFO First source client ID for extent 185, mrl: flush_mismatch, : mend
35356 Sep 22 23:22:14.130 INFO extent:185 gens: 0 1 1, mrl: flush_mismatch, : mend
35357 Sep 22 23:22:14.130 INFO extent:185 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35358 Sep 22 23:22:14.130 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35359 Sep 22 23:22:14.130 INFO extent:185 dirty: false false false, mrl: flush_mismatch, : mend
35360 Sep 22 23:22:14.130 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35361 Sep 22 23:22:14.130 INFO find dest for source 1 for extent at index 185, mrl: flush_mismatch, : mend
35362 Sep 22 23:22:14.130 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35363 Sep 22 23:22:14.130 INFO Extent 186 has flush number mismatch, : mend
35364 Sep 22 23:22:14.130 INFO First source client ID for extent 186, mrl: flush_mismatch, : mend
35365 Sep 22 23:22:14.130 INFO extent:186 gens: 0 1 1, mrl: flush_mismatch, : mend
35366 Sep 22 23:22:14.130 INFO extent:186 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35367 Sep 22 23:22:14.130 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35368 Sep 22 23:22:14.130 INFO extent:186 dirty: false false false, mrl: flush_mismatch, : mend
35369 Sep 22 23:22:14.130 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35370 Sep 22 23:22:14.130 INFO find dest for source 1 for extent at index 186, mrl: flush_mismatch, : mend
35371 Sep 22 23:22:14.130 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35372 Sep 22 23:22:14.130 INFO Extent 187 has flush number mismatch, : mend
35373 Sep 22 23:22:14.130 INFO First source client ID for extent 187, mrl: flush_mismatch, : mend
35374 Sep 22 23:22:14.130 INFO extent:187 gens: 0 1 1, mrl: flush_mismatch, : mend
35375 Sep 22 23:22:14.130 INFO extent:187 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35376 Sep 22 23:22:14.130 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35377 Sep 22 23:22:14.130 INFO extent:187 dirty: false false false, mrl: flush_mismatch, : mend
35378 Sep 22 23:22:14.130 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35379 Sep 22 23:22:14.130 INFO find dest for source 1 for extent at index 187, mrl: flush_mismatch, : mend
35380 Sep 22 23:22:14.130 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35381 Sep 22 23:22:14.130 INFO Transition from WaitQuorum to Repair
35382 Sep 22 23:22:14.130 INFO Transition from WaitQuorum to Repair
35383 Sep 22 23:22:14.130 INFO Transition from WaitQuorum to Repair
35384 Sep 22 23:22:14.130 INFO Found 188 extents that need repair
35385 Sep 22 23:22:14.130 INFO Full repair list: {31: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 175: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 3: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 53: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 59: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 103: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 92: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 129: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 132: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 138: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 147: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 67: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 2: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 25: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 152: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 153: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 156: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 125: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 37: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 150: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 157: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 89: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 163: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 73: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 168: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 176: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 38: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 21: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 97: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 124: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 47: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 179: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 19: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 167: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 182: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 78: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 52: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 172: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 183: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 11: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 184: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 41: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 77: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 107: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 64: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 171: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 165: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 118: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 111: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 74: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 117: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 75: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 36: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 68: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 123: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 55: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 159: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 35: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 162: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 121: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 65: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 106: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 22: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 146: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 148: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 6: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 62: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 126: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 128: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 79: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 144: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 160: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 133: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 109: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 169: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 28: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 49: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 15: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 43: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 60: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 18: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 110: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 136: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 61: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 119: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 30: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 46: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 127: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 99: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 12: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 34: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 83: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 90: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 66: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 87: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 139: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 26: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 84: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 56: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 94: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 93: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 44: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 45: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 115: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 32: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 141: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 9: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 186: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 130: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 122: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 105: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 137: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 7: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 63: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 58: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 96: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 40: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 80: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 149: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 161: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 71: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 5: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 23: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 98: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 1: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 166: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 185: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 86: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 135: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 13: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 29: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 27: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 187: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 10: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 102: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 85: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 100: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 95: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 154: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 50: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 91: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 158: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 177: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 134: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 69: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 155: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 0: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 164: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 24: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 39: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 48: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 16: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 4: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 70: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 76: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 113: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 108: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 42: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 140: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 178: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 181: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 82: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 174: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 17: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 116: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 142: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 88: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 51: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 33: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 54: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 112: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 143: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 151: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 101: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 173: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 20: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 104: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 81: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 120: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 145: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 131: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 170: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 72: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 180: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 57: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 14: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 8: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 114: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }}, : downstairs
35386 Sep 22 23:22:14.131 INFO Task list: [ReconcileIO { id: ReconciliationId(0), op: ExtentFlush { repair_id: ReconciliationId(0), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(1), extent_id: 31 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(2), op: ExtentRepair { repair_id: ReconciliationId(2), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(3), op: ExtentReopen { repair_id: ReconciliationId(3), extent_id: 31 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(4), op: ExtentFlush { repair_id: ReconciliationId(4), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(5), op: ExtentClose { repair_id: ReconciliationId(5), extent_id: 175 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(6), op: ExtentRepair { repair_id: ReconciliationId(6), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(7), op: ExtentReopen { repair_id: ReconciliationId(7), extent_id: 175 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(8), op: ExtentFlush { repair_id: ReconciliationId(8), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(9), op: ExtentClose { repair_id: ReconciliationId(9), extent_id: 3 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(10), op: ExtentRepair { repair_id: ReconciliationId(10), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(11), op: ExtentReopen { repair_id: ReconciliationId(11), extent_id: 3 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(12), op: ExtentFlush { repair_id: ReconciliationId(12), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(13), op: ExtentClose { repair_id: ReconciliationId(13), extent_id: 53 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(14), op: ExtentRepair { repair_id: ReconciliationId(14), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(15), op: ExtentReopen { repair_id: ReconciliationId(15), extent_id: 53 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(16), op: ExtentFlush { repair_id: ReconciliationId(16), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(17), op: ExtentClose { repair_id: ReconciliationId(17), extent_id: 59 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(18), op: ExtentRepair { repair_id: ReconciliationId(18), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(19), op: ExtentReopen { repair_id: ReconciliationId(19), extent_id: 59 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(20), op: ExtentFlush { repair_id: ReconciliationId(20), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(21), op: ExtentClose { repair_id: ReconciliationId(21), extent_id: 103 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(22), op: ExtentRepair { repair_id: ReconciliationId(22), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(23), op: ExtentReopen { repair_id: ReconciliationId(23), extent_id: 103 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(24), op: ExtentFlush { repair_id: ReconciliationId(24), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(25), op: ExtentClose { repair_id: ReconciliationId(25), extent_id: 92 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(26), op: ExtentRepair { repair_id: ReconciliationId(26), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(27), op: ExtentReopen { repair_id: ReconciliationId(27), extent_id: 92 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(28), op: ExtentFlush { repair_id: ReconciliationId(28), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(29), op: ExtentClose { repair_id: ReconciliationId(29), extent_id: 129 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(30), op: ExtentRepair { repair_id: ReconciliationId(30), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(31), op: ExtentReopen { repair_id: ReconciliationId(31), extent_id: 129 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(32), op: ExtentFlush { repair_id: ReconciliationId(32), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(33), op: ExtentClose { repair_id: ReconciliationId(33), extent_id: 132 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(34), op: ExtentRepair { repair_id: ReconciliationId(34), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(35), op: ExtentReopen { repair_id: ReconciliationId(35), extent_id: 132 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(36), op: ExtentFlush { repair_id: ReconciliationId(36), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(37), op: ExtentClose { repair_id: ReconciliationId(37), extent_id: 138 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(38), op: ExtentRepair { repair_id: ReconciliationId(38), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(39), op: ExtentReopen { repair_id: ReconciliationId(39), extent_id: 138 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(40), op: ExtentFlush { repair_id: ReconciliationId(40), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(41), op: ExtentClose { repair_id: ReconciliationId(41), extent_id: 147 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(42), op: ExtentRepair { repair_id: ReconciliationId(42), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(43), op: ExtentReopen { repair_id: ReconciliationId(43), extent_id: 147 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(44), op: ExtentFlush { repair_id: ReconciliationId(44), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(45), op: ExtentClose { repair_id: ReconciliationId(45), extent_id: 67 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(46), op: ExtentRepair { repair_id: ReconciliationId(46), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(47), op: ExtentReopen { repair_id: ReconciliationId(47), extent_id: 67 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(48), op: ExtentFlush { repair_id: ReconciliationId(48), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(49), op: ExtentClose { repair_id: ReconciliationId(49), extent_id: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(50), op: ExtentRepair { repair_id: ReconciliationId(50), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(51), op: ExtentReopen { repair_id: ReconciliationId(51), extent_id: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(52), op: ExtentFlush { repair_id: ReconciliationId(52), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(53), op: ExtentClose { repair_id: ReconciliationId(53), extent_id: 25 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(54), op: ExtentRepair { repair_id: ReconciliationId(54), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(55), op: ExtentReopen { repair_id: ReconciliationId(55), extent_id: 25 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(56), op: ExtentFlush { repair_id: ReconciliationId(56), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(57), op: ExtentClose { repair_id: ReconciliationId(57), extent_id: 152 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(58), op: ExtentRepair { repair_id: ReconciliationId(58), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(59), op: ExtentReopen { repair_id: ReconciliationId(59), extent_id: 152 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(60), op: ExtentFlush { repair_id: ReconciliationId(60), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(61), op: ExtentClose { repair_id: ReconciliationId(61), extent_id: 153 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(62), op: ExtentRepair { repair_id: ReconciliationId(62), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(63), op: ExtentReopen { repair_id: ReconciliationId(63), extent_id: 153 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(64), op: ExtentFlush { repair_id: ReconciliationId(64), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(65), op: ExtentClose { repair_id: ReconciliationId(65), extent_id: 156 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(66), op: ExtentRepair { repair_id: ReconciliationId(66), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(67), op: ExtentReopen { repair_id: ReconciliationId(67), extent_id: 156 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(68), op: ExtentFlush { repair_id: ReconciliationId(68), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(69), op: ExtentClose { repair_id: ReconciliationId(69), extent_id: 125 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(70), op: ExtentRepair { repair_id: ReconciliationId(70), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(71), op: ExtentReopen { repair_id: ReconciliationId(71), extent_id: 125 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(72), op: ExtentFlush { repair_id: ReconciliationId(72), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(73), op: ExtentClose { repair_id: ReconciliationId(73), extent_id: 37 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(74), op: ExtentRepair { repair_id: ReconciliationId(74), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(75), op: ExtentReopen { repair_id: ReconciliationId(75), extent_id: 37 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(76), op: ExtentFlush { repair_id: ReconciliationId(76), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(77), op: ExtentClose { repair_id: ReconciliationId(77), extent_id: 150 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(78), op: ExtentRepair { repair_id: ReconciliationId(78), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(79), op: ExtentReopen { repair_id: ReconciliationId(79), extent_id: 150 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(80), op: ExtentFlush { repair_id: ReconciliationId(80), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(81), op: ExtentClose { repair_id: ReconciliationId(81), extent_id: 157 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(82), op: ExtentRepair { repair_id: ReconciliationId(82), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(83), op: ExtentReopen { repair_id: ReconciliationId(83), extent_id: 157 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(84), op: ExtentFlush { repair_id: ReconciliationId(84), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(85), op: ExtentClose { repair_id: ReconciliationId(85), extent_id: 89 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(86), op: ExtentRepair { repair_id: ReconciliationId(86), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(87), op: ExtentReopen { repair_id: ReconciliationId(87), extent_id: 89 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(88), op: ExtentFlush { repair_id: ReconciliationId(88), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(89), op: ExtentClose { repair_id: ReconciliationId(89), extent_id: 163 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(90), op: ExtentRepair { repair_id: ReconciliationId(90), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(91), op: ExtentReopen { repair_id: ReconciliationId(91), extent_id: 163 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(92), op: ExtentFlush { repair_id: ReconciliationId(92), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(93), op: ExtentClose { repair_id: ReconciliationId(93), extent_id: 73 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(94), op: ExtentRepair { repair_id: ReconciliationId(94), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(95), op: ExtentReopen { repair_id: ReconciliationId(95), extent_id: 73 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(96), op: ExtentFlush { repair_id: ReconciliationId(96), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(97), op: ExtentClose { repair_id: ReconciliationId(97), extent_id: 168 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(98), op: ExtentRepair { repair_id: ReconciliationId(98), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(99), op: ExtentReopen { repair_id: ReconciliationId(99), extent_id: 168 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(100), op: ExtentFlush { repair_id: ReconciliationId(100), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(101), op: ExtentClose { repair_id: ReconciliationId(101), extent_id: 176 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(102), op: ExtentRepair { repair_id: ReconciliationId(102), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(103), op: ExtentReopen { repair_id: ReconciliationId(103), extent_id: 176 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(104), op: ExtentFlush { repair_id: ReconciliationId(104), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(105), op: ExtentClose { repair_id: ReconciliationId(105), extent_id: 38 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(106), op: ExtentRepair { repair_id: ReconciliationId(106), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(107), op: ExtentReopen { repair_id: ReconciliationId(107), extent_id: 38 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(108), op: ExtentFlush { repair_id: ReconciliationId(108), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(109), op: ExtentClose { repair_id: ReconciliationId(109), extent_id: 21 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(110), op: ExtentRepair { repair_id: ReconciliationId(110), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(111), op: ExtentReopen { repair_id: ReconciliationId(111), extent_id: 21 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(112), op: ExtentFlush { repair_id: ReconciliationId(112), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(113), op: ExtentClose { repair_id: ReconciliationId(113), extent_id: 97 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(114), op: ExtentRepair { repair_id: ReconciliationId(114), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(115), op: ExtentReopen { repair_id: ReconciliationId(115), extent_id: 97 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(116), op: ExtentFlush { repair_id: ReconciliationId(116), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(117), op: ExtentClose { repair_id: ReconciliationId(117), extent_id: 124 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(118), op: ExtentRepair { repair_id: ReconciliationId(118), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(119), op: ExtentReopen { repair_id: ReconciliationId(119), extent_id: 124 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(120), op: ExtentFlush { repair_id: ReconciliationId(120), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(121), op: ExtentClose { repair_id: ReconciliationId(121), extent_id: 47 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(122), op: ExtentRepair { repair_id: ReconciliationId(122), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(123), op: ExtentReopen { repair_id: ReconciliationId(123), extent_id: 47 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(124), op: ExtentFlush { repair_id: ReconciliationId(124), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(125), op: ExtentClose { repair_id: ReconciliationId(125), extent_id: 179 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(126), op: ExtentRepair { repair_id: ReconciliationId(126), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(127), op: ExtentReopen { repair_id: ReconciliationId(127), extent_id: 179 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(128), op: ExtentFlush { repair_id: ReconciliationId(128), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(129), op: ExtentClose { repair_id: ReconciliationId(129), extent_id: 19 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(130), op: ExtentRepair { repair_id: ReconciliationId(130), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(131), op: ExtentReopen { repair_id: ReconciliationId(131), extent_id: 19 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(132), op: ExtentFlush { repair_id: ReconciliationId(132), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(133), op: ExtentClose { repair_id: ReconciliationId(133), extent_id: 167 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(134), op: ExtentRepair { repair_id: ReconciliationId(134), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(135), op: ExtentReopen { repair_id: ReconciliationId(135), extent_id: 167 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(136), op: ExtentFlush { repair_id: ReconciliationId(136), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(137), op: ExtentClose { repair_id: ReconciliationId(137), extent_id: 182 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(138), op: ExtentRepair { repair_id: ReconciliationId(138), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(139), op: ExtentReopen { repair_id: ReconciliationId(139), extent_id: 182 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(140), op: ExtentFlush { repair_id: ReconciliationId(140), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(141), op: ExtentClose { repair_id: ReconciliationId(141), extent_id: 78 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(142), op: ExtentRepair { repair_id: ReconciliationId(142), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(143), op: ExtentReopen { repair_id: ReconciliationId(143), extent_id: 78 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(144), op: ExtentFlush { repair_id: ReconciliationId(144), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(145), op: ExtentClose { repair_id: ReconciliationId(145), extent_id: 52 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(146), op: ExtentRepair { repair_id: ReconciliationId(146), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(147), op: ExtentReopen { repair_id: ReconciliationId(147), extent_id: 52 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(148), op: ExtentFlush { repair_id: ReconciliationId(148), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(149), op: ExtentClose { repair_id: ReconciliationId(149), extent_id: 172 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(150), op: ExtentRepair { repair_id: ReconciliationId(150), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(151), op: ExtentReopen { repair_id: ReconciliationId(151), extent_id: 172 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(152), op: ExtentFlush { repair_id: ReconciliationId(152), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(153), op: ExtentClose { repair_id: ReconciliationId(153), extent_id: 183 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(154), op: ExtentRepair { repair_id: ReconciliationId(154), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(155), op: ExtentReopen { repair_id: ReconciliationId(155), extent_id: 183 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(156), op: ExtentFlush { repair_id: ReconciliationId(156), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(157), op: ExtentClose { repair_id: ReconciliationId(157), extent_id: 11 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(158), op: ExtentRepair { repair_id: ReconciliationId(158), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(159), op: ExtentReopen { repair_id: ReconciliationId(159), extent_id: 11 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(160), op: ExtentFlush { repair_id: ReconciliationId(160), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(161), op: ExtentClose { repair_id: ReconciliationId(161), extent_id: 184 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(162), op: ExtentRepair { repair_id: ReconciliationId(162), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(163), op: ExtentReopen { repair_id: ReconciliationId(163), extent_id: 184 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(164), op: ExtentFlush { repair_id: ReconciliationId(164), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(165), op: ExtentClose { repair_id: ReconciliationId(165), extent_id: 41 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(166), op: ExtentRepair { repair_id: ReconciliationId(166), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(167), op: ExtentReopen { repair_id: ReconciliationId(167), extent_id: 41 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(168), op: ExtentFlush { repair_id: ReconciliationId(168), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(169), op: ExtentClose { repair_id: ReconciliationId(169), extent_id: 77 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(170), op: ExtentRepair { repair_id: ReconciliationId(170), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(171), op: ExtentReopen { repair_id: ReconciliationId(171), extent_id: 77 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(172), op: ExtentFlush { repair_id: ReconciliationId(172), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(173), op: ExtentClose { repair_id: ReconciliationId(173), extent_id: 107 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(174), op: ExtentRepair { repair_id: ReconciliationId(174), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(175), op: ExtentReopen { repair_id: ReconciliationId(175), extent_id: 107 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(176), op: ExtentFlush { repair_id: ReconciliationId(176), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(177), op: ExtentClose { repair_id: ReconciliationId(177), extent_id: 64 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(178), op: ExtentRepair { repair_id: ReconciliationId(178), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(179), op: ExtentReopen { repair_id: ReconciliationId(179), extent_id: 64 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(180), op: ExtentFlush { repair_id: ReconciliationId(180), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(181), op: ExtentClose { repair_id: ReconciliationId(181), extent_id: 171 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(182), op: ExtentRepair { repair_id: ReconciliationId(182), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(183), op: ExtentReopen { repair_id: ReconciliationId(183), extent_id: 171 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(184), op: ExtentFlush { repair_id: ReconciliationId(184), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(185), op: ExtentClose { repair_id: ReconciliationId(185), extent_id: 165 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(186), op: ExtentRepair { repair_id: ReconciliationId(186), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(187), op: ExtentReopen { repair_id: ReconciliationId(187), extent_id: 165 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(188), op: ExtentFlush { repair_id: ReconciliationId(188), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(189), op: ExtentClose { repair_id: ReconciliationId(189), extent_id: 118 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(190), op: ExtentRepair { repair_id: ReconciliationId(190), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(191), op: ExtentReopen { repair_id: ReconciliationId(191), extent_id: 118 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(192), op: ExtentFlush { repair_id: ReconciliationId(192), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(193), op: ExtentClose { repair_id: ReconciliationId(193), extent_id: 111 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(194), op: ExtentRepair { repair_id: ReconciliationId(194), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(195), op: ExtentReopen { repair_id: ReconciliationId(195), extent_id: 111 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(196), op: ExtentFlush { repair_id: ReconciliationId(196), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(197), op: ExtentClose { repair_id: ReconciliationId(197), extent_id: 74 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(198), op: ExtentRepair { repair_id: ReconciliationId(198), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(199), op: ExtentReopen { repair_id: ReconciliationId(199), extent_id: 74 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(200), op: ExtentFlush { repair_id: ReconciliationId(200), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(201), op: ExtentClose { repair_id: ReconciliationId(201), extent_id: 117 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(202), op: ExtentRepair { repair_id: ReconciliationId(202), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(203), op: ExtentReopen { repair_id: ReconciliationId(203), extent_id: 117 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(204), op: ExtentFlush { repair_id: ReconciliationId(204), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(205), op: ExtentClose { repair_id: ReconciliationId(205), extent_id: 75 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(206), op: ExtentRepair { repair_id: ReconciliationId(206), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(207), op: ExtentReopen { repair_id: ReconciliationId(207), extent_id: 75 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(208), op: ExtentFlush { repair_id: ReconciliationId(208), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(209), op: ExtentClose { repair_id: ReconciliationId(209), extent_id: 36 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(210), op: ExtentRepair { repair_id: ReconciliationId(210), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(211), op: ExtentReopen { repair_id: ReconciliationId(211), extent_id: 36 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(212), op: ExtentFlush { repair_id: ReconciliationId(212), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(213), op: ExtentClose { repair_id: ReconciliationId(213), extent_id: 68 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(214), op: ExtentRepair { repair_id: ReconciliationId(214), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(215), op: ExtentReopen { repair_id: ReconciliationId(215), extent_id: 68 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(216), op: ExtentFlush { repair_id: ReconciliationId(216), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(217), op: ExtentClose { repair_id: ReconciliationId(217), extent_id: 123 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(218), op: ExtentRepair { repair_id: ReconciliationId(218), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(219), op: ExtentReopen { repair_id: ReconciliationId(219), extent_id: 123 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(220), op: ExtentFlush { repair_id: ReconciliationId(220), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(221), op: ExtentClose { repair_id: ReconciliationId(221), extent_id: 55 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(222), op: ExtentRepair { repair_id: ReconciliationId(222), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(223), op: ExtentReopen { repair_id: ReconciliationId(223), extent_id: 55 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(224), op: ExtentFlush { repair_id: ReconciliationId(224), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(225), op: ExtentClose { repair_id: ReconciliationId(225), extent_id: 159 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(226), op: ExtentRepair { repair_id: ReconciliationId(226), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(227), op: ExtentReopen { repair_id: ReconciliationId(227), extent_id: 159 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(228), op: ExtentFlush { repair_id: ReconciliationId(228), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(229), op: ExtentClose { repair_id: ReconciliationId(229), extent_id: 35 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(230), op: ExtentRepair { repair_id: ReconciliationId(230), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(231), op: ExtentReopen { repair_id: ReconciliationId(231), extent_id: 35 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(232), op: ExtentFlush { repair_id: ReconciliationId(232), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(233), op: ExtentClose { repair_id: ReconciliationId(233), extent_id: 162 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(234), op: ExtentRepair { repair_id: ReconciliationId(234), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(235), op: ExtentReopen { repair_id: ReconciliationId(235), extent_id: 162 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(236), op: ExtentFlush { repair_id: ReconciliationId(236), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(237), op: ExtentClose { repair_id: ReconciliationId(237), extent_id: 121 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(238), op: ExtentRepair { repair_id: ReconciliationId(238), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(239), op: ExtentReopen { repair_id: ReconciliationId(239), extent_id: 121 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(240), op: ExtentFlush { repair_id: ReconciliationId(240), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(241), op: ExtentClose { repair_id: ReconciliationId(241), extent_id: 65 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(242), op: ExtentRepair { repair_id: ReconciliationId(242), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(243), op: ExtentReopen { repair_id: ReconciliationId(243), extent_id: 65 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(244), op: ExtentFlush { repair_id: ReconciliationId(244), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(245), op: ExtentClose { repair_id: ReconciliationId(245), extent_id: 106 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(246), op: ExtentRepair { repair_id: ReconciliationId(246), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(247), op: ExtentReopen { repair_id: ReconciliationId(247), extent_id: 106 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(248), op: ExtentFlush { repair_id: ReconciliationId(248), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(249), op: ExtentClose { repair_id: ReconciliationId(249), extent_id: 22 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(250), op: ExtentRepair { repair_id: ReconciliationId(250), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(251), op: ExtentReopen { repair_id: ReconciliationId(251), extent_id: 22 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(252), op: ExtentFlush { repair_id: ReconciliationId(252), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(253), op: ExtentClose { repair_id: ReconciliationId(253), extent_id: 146 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(254), op: ExtentRepair { repair_id: ReconciliationId(254), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(255), op: ExtentReopen { repair_id: ReconciliationId(255), extent_id: 146 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(256), op: ExtentFlush { repair_id: ReconciliationId(256), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(257), op: ExtentClose { repair_id: ReconciliationId(257), extent_id: 148 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(258), op: ExtentRepair { repair_id: ReconciliationId(258), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(259), op: ExtentReopen { repair_id: ReconciliationId(259), extent_id: 148 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(260), op: ExtentFlush { repair_id: ReconciliationId(260), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(261), op: ExtentClose { repair_id: ReconciliationId(261), extent_id: 6 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(262), op: ExtentRepair { repair_id: ReconciliationId(262), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(263), op: ExtentReopen { repair_id: ReconciliationId(263), extent_id: 6 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(264), op: ExtentFlush { repair_id: ReconciliationId(264), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(265), op: ExtentClose { repair_id: ReconciliationId(265), extent_id: 62 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(266), op: ExtentRepair { repair_id: ReconciliationId(266), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(267), op: ExtentReopen { repair_id: ReconciliationId(267), extent_id: 62 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(268), op: ExtentFlush { repair_id: ReconciliationId(268), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(269), op: ExtentClose { repair_id: ReconciliationId(269), extent_id: 126 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(270), op: ExtentRepair { repair_id: ReconciliationId(270), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(271), op: ExtentReopen { repair_id: ReconciliationId(271), extent_id: 126 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(272), op: ExtentFlush { repair_id: ReconciliationId(272), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(273), op: ExtentClose { repair_id: ReconciliationId(273), extent_id: 128 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(274), op: ExtentRepair { repair_id: ReconciliationId(274), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(275), op: ExtentReopen { repair_id: ReconciliationId(275), extent_id: 128 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(276), op: ExtentFlush { repair_id: ReconciliationId(276), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(277), op: ExtentClose { repair_id: ReconciliationId(277), extent_id: 79 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(278), op: ExtentRepair { repair_id: ReconciliationId(278), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(279), op: ExtentReopen { repair_id: ReconciliationId(279), extent_id: 79 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(280), op: ExtentFlush { repair_id: ReconciliationId(280), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(281), op: ExtentClose { repair_id: ReconciliationId(281), extent_id: 144 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(282), op: ExtentRepair { repair_id: ReconciliationId(282), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(283), op: ExtentReopen { repair_id: ReconciliationId(283), extent_id: 144 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(284), op: ExtentFlush { repair_id: ReconciliationId(284), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(285), op: ExtentClose { repair_id: ReconciliationId(285), extent_id: 160 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(286), op: ExtentRepair { repair_id: ReconciliationId(286), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(287), op: ExtentReopen { repair_id: ReconciliationId(287), extent_id: 160 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(288), op: ExtentFlush { repair_id: ReconciliationId(288), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(289), op: ExtentClose { repair_id: ReconciliationId(289), extent_id: 133 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(290), op: ExtentRepair { repair_id: ReconciliationId(290), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(291), op: ExtentReopen { repair_id: ReconciliationId(291), extent_id: 133 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(292), op: ExtentFlush { repair_id: ReconciliationId(292), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(293), op: ExtentClose { repair_id: ReconciliationId(293), extent_id: 109 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(294), op: ExtentRepair { repair_id: ReconciliationId(294), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(295), op: ExtentReopen { repair_id: ReconciliationId(295), extent_id: 109 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(296), op: ExtentFlush { repair_id: ReconciliationId(296), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(297), op: ExtentClose { repair_id: ReconciliationId(297), extent_id: 169 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(298), op: ExtentRepair { repair_id: ReconciliationId(298), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(299), op: ExtentReopen { repair_id: ReconciliationId(299), extent_id: 169 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(300), op: ExtentFlush { repair_id: ReconciliationId(300), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(301), op: ExtentClose { repair_id: ReconciliationId(301), extent_id: 28 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(302), op: ExtentRepair { repair_id: ReconciliationId(302), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(303), op: ExtentReopen { repair_id: ReconciliationId(303), extent_id: 28 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(304), op: ExtentFlush { repair_id: ReconciliationId(304), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(305), op: ExtentClose { repair_id: ReconciliationId(305), extent_id: 49 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(306), op: ExtentRepair { repair_id: ReconciliationId(306), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(307), op: ExtentReopen { repair_id: ReconciliationId(307), extent_id: 49 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(308), op: ExtentFlush { repair_id: ReconciliationId(308), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(309), op: ExtentClose { repair_id: ReconciliationId(309), extent_id: 15 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(310), op: ExtentRepair { repair_id: ReconciliationId(310), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(311), op: ExtentReopen { repair_id: ReconciliationId(311), extent_id: 15 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(312), op: ExtentFlush { repair_id: ReconciliationId(312), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(313), op: ExtentClose { repair_id: ReconciliationId(313), extent_id: 43 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(314), op: ExtentRepair { repair_id: ReconciliationId(314), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(315), op: ExtentReopen { repair_id: ReconciliationId(315), extent_id: 43 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(316), op: ExtentFlush { repair_id: ReconciliationId(316), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(317), op: ExtentClose { repair_id: ReconciliationId(317), extent_id: 60 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(318), op: ExtentRepair { repair_id: ReconciliationId(318), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(319), op: ExtentReopen { repair_id: ReconciliationId(319), extent_id: 60 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(320), op: ExtentFlush { repair_id: ReconciliationId(320), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(321), op: ExtentClose { repair_id: ReconciliationId(321), extent_id: 18 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(322), op: ExtentRepair { repair_id: ReconciliationId(322), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(323), op: ExtentReopen { repair_id: ReconciliationId(323), extent_id: 18 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(324), op: ExtentFlush { repair_id: ReconciliationId(324), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(325), op: ExtentClose { repair_id: ReconciliationId(325), extent_id: 110 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(326), op: ExtentRepair { repair_id: ReconciliationId(326), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(327), op: ExtentReopen { repair_id: ReconciliationId(327), extent_id: 110 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(328), op: ExtentFlush { repair_id: ReconciliationId(328), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(329), op: ExtentClose { repair_id: ReconciliationId(329), extent_id: 136 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(330), op: ExtentRepair { repair_id: ReconciliationId(330), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(331), op: ExtentReopen { repair_id: ReconciliationId(331), extent_id: 136 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(332), op: ExtentFlush { repair_id: ReconciliationId(332), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(333), op: ExtentClose { repair_id: ReconciliationId(333), extent_id: 61 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(334), op: ExtentRepair { repair_id: ReconciliationId(334), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(335), op: ExtentReopen { repair_id: ReconciliationId(335), extent_id: 61 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(336), op: ExtentFlush { repair_id: ReconciliationId(336), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(337), op: ExtentClose { repair_id: ReconciliationId(337), extent_id: 119 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(338), op: ExtentRepair { repair_id: ReconciliationId(338), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(339), op: ExtentReopen { repair_id: ReconciliationId(339), extent_id: 119 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(340), op: ExtentFlush { repair_id: ReconciliationId(340), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(341), op: ExtentClose { repair_id: ReconciliationId(341), extent_id: 30 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(342), op: ExtentRepair { repair_id: ReconciliationId(342), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(343), op: ExtentReopen { repair_id: ReconciliationId(343), extent_id: 30 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(344), op: ExtentFlush { repair_id: ReconciliationId(344), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(345), op: ExtentClose { repair_id: ReconciliationId(345), extent_id: 46 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(346), op: ExtentRepair { repair_id: ReconciliationId(346), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(347), op: ExtentReopen { repair_id: ReconciliationId(347), extent_id: 46 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(348), op: ExtentFlush { repair_id: ReconciliationId(348), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(349), op: ExtentClose { repair_id: ReconciliationId(349), extent_id: 127 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(350), op: ExtentRepair { repair_id: ReconciliationId(350), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(351), op: ExtentReopen { repair_id: ReconciliationId(351), extent_id: 127 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(352), op: ExtentFlush { repair_id: ReconciliationId(352), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(353), op: ExtentClose { repair_id: ReconciliationId(353), extent_id: 99 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(354), op: ExtentRepair { repair_id: ReconciliationId(354), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(355), op: ExtentReopen { repair_id: ReconciliationId(355), extent_id: 99 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(356), op: ExtentFlush { repair_id: ReconciliationId(356), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(357), op: ExtentClose { repair_id: ReconciliationId(357), extent_id: 12 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(358), op: ExtentRepair { repair_id: ReconciliationId(358), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(359), op: ExtentReopen { repair_id: ReconciliationId(359), extent_id: 12 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(360), op: ExtentFlush { repair_id: ReconciliationId(360), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(361), op: ExtentClose { repair_id: ReconciliationId(361), extent_id: 34 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(362), op: ExtentRepair { repair_id: ReconciliationId(362), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(363), op: ExtentReopen { repair_id: ReconciliationId(363), extent_id: 34 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(364), op: ExtentFlush { repair_id: ReconciliationId(364), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(365), op: ExtentClose { repair_id: ReconciliationId(365), extent_id: 83 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(366), op: ExtentRepair { repair_id: ReconciliationId(366), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(367), op: ExtentReopen { repair_id: ReconciliationId(367), extent_id: 83 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(368), op: ExtentFlush { repair_id: ReconciliationId(368), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(369), op: ExtentClose { repair_id: ReconciliationId(369), extent_id: 90 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(370), op: ExtentRepair { repair_id: ReconciliationId(370), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(371), op: ExtentReopen { repair_id: ReconciliationId(371), extent_id: 90 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(372), op: ExtentFlush { repair_id: ReconciliationId(372), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(373), op: ExtentClose { repair_id: ReconciliationId(373), extent_id: 66 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(374), op: ExtentRepair { repair_id: ReconciliationId(374), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(375), op: ExtentReopen { repair_id: ReconciliationId(375), extent_id: 66 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(376), op: ExtentFlush { repair_id: ReconciliationId(376), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(377), op: ExtentClose { repair_id: ReconciliationId(377), extent_id: 87 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(378), op: ExtentRepair { repair_id: ReconciliationId(378), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(379), op: ExtentReopen { repair_id: ReconciliationId(379), extent_id: 87 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(380), op: ExtentFlush { repair_id: ReconciliationId(380), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(381), op: ExtentClose { repair_id: ReconciliationId(381), extent_id: 139 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(382), op: ExtentRepair { repair_id: ReconciliationId(382), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(383), op: ExtentReopen { repair_id: ReconciliationId(383), extent_id: 139 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(384), op: ExtentFlush { repair_id: ReconciliationId(384), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(385), op: ExtentClose { repair_id: ReconciliationId(385), extent_id: 26 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(386), op: ExtentRepair { repair_id: ReconciliationId(386), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(387), op: ExtentReopen { repair_id: ReconciliationId(387), extent_id: 26 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(388), op: ExtentFlush { repair_id: ReconciliationId(388), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(389), op: ExtentClose { repair_id: ReconciliationId(389), extent_id: 84 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(390), op: ExtentRepair { repair_id: ReconciliationId(390), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(391), op: ExtentReopen { repair_id: ReconciliationId(391), extent_id: 84 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(392), op: ExtentFlush { repair_id: ReconciliationId(392), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(393), op: ExtentClose { repair_id: ReconciliationId(393), extent_id: 56 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(394), op: ExtentRepair { repair_id: ReconciliationId(394), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(395), op: ExtentReopen { repair_id: ReconciliationId(395), extent_id: 56 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(396), op: ExtentFlush { repair_id: ReconciliationId(396), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(397), op: ExtentClose { repair_id: ReconciliationId(397), extent_id: 94 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(398), op: ExtentRepair { repair_id: ReconciliationId(398), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(399), op: ExtentReopen { repair_id: ReconciliationId(399), extent_id: 94 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(400), op: ExtentFlush { repair_id: ReconciliationId(400), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(401), op: ExtentClose { repair_id: ReconciliationId(401), extent_id: 93 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(402), op: ExtentRepair { repair_id: ReconciliationId(402), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(403), op: ExtentReopen { repair_id: ReconciliationId(403), extent_id: 93 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(404), op: ExtentFlush { repair_id: ReconciliationId(404), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(405), op: ExtentClose { repair_id: ReconciliationId(405), extent_id: 44 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(406), op: ExtentRepair { repair_id: ReconciliationId(406), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(407), op: ExtentReopen { repair_id: ReconciliationId(407), extent_id: 44 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(408), op: ExtentFlush { repair_id: ReconciliationId(408), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(409), op: ExtentClose { repair_id: ReconciliationId(409), extent_id: 45 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(410), op: ExtentRepair { repair_id: ReconciliationId(410), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(411), op: ExtentReopen { repair_id: ReconciliationId(411), extent_id: 45 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(412), op: ExtentFlush { repair_id: ReconciliationId(412), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(413), op: ExtentClose { repair_id: ReconciliationId(413), extent_id: 115 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(414), op: ExtentRepair { repair_id: ReconciliationId(414), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(415), op: ExtentReopen { repair_id: ReconciliationId(415), extent_id: 115 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(416), op: ExtentFlush { repair_id: ReconciliationId(416), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(417), op: ExtentClose { repair_id: ReconciliationId(417), extent_id: 32 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(418), op: ExtentRepair { repair_id: ReconciliationId(418), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(419), op: ExtentReopen { repair_id: ReconciliationId(419), extent_id: 32 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(420), op: ExtentFlush { repair_id: ReconciliationId(420), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(421), op: ExtentClose { repair_id: ReconciliationId(421), extent_id: 141 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(422), op: ExtentRepair { repair_id: ReconciliationId(422), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(423), op: ExtentReopen { repair_id: ReconciliationId(423), extent_id: 141 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(424), op: ExtentFlush { repair_id: ReconciliationId(424), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(425), op: ExtentClose { repair_id: ReconciliationId(425), extent_id: 9 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(426), op: ExtentRepair { repair_id: ReconciliationId(426), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(427), op: ExtentReopen { repair_id: ReconciliationId(427), extent_id: 9 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(428), op: ExtentFlush { repair_id: ReconciliationId(428), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(429), op: ExtentClose { repair_id: ReconciliationId(429), extent_id: 186 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(430), op: ExtentRepair { repair_id: ReconciliationId(430), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(431), op: ExtentReopen { repair_id: ReconciliationId(431), extent_id: 186 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(432), op: ExtentFlush { repair_id: ReconciliationId(432), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(433), op: ExtentClose { repair_id: ReconciliationId(433), extent_id: 130 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(434), op: ExtentRepair { repair_id: ReconciliationId(434), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(435), op: ExtentReopen { repair_id: ReconciliationId(435), extent_id: 130 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(436), op: ExtentFlush { repair_id: ReconciliationId(436), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(437), op: ExtentClose { repair_id: ReconciliationId(437), extent_id: 122 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(438), op: ExtentRepair { repair_id: ReconciliationId(438), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(439), op: ExtentReopen { repair_id: ReconciliationId(439), extent_id: 122 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(440), op: ExtentFlush { repair_id: ReconciliationId(440), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(441), op: ExtentClose { repair_id: ReconciliationId(441), extent_id: 105 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(442), op: ExtentRepair { repair_id: ReconciliationId(442), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(443), op: ExtentReopen { repair_id: ReconciliationId(443), extent_id: 105 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(444), op: ExtentFlush { repair_id: ReconciliationId(444), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(445), op: ExtentClose { repair_id: ReconciliationId(445), extent_id: 137 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(446), op: ExtentRepair { repair_id: ReconciliationId(446), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(447), op: ExtentReopen { repair_id: ReconciliationId(447), extent_id: 137 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(448), op: ExtentFlush { repair_id: ReconciliationId(448), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(449), op: ExtentClose { repair_id: ReconciliationId(449), extent_id: 7 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(450), op: ExtentRepair { repair_id: ReconciliationId(450), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(451), op: ExtentReopen { repair_id: ReconciliationId(451), extent_id: 7 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(452), op: ExtentFlush { repair_id: ReconciliationId(452), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(453), op: ExtentClose { repair_id: ReconciliationId(453), extent_id: 63 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(454), op: ExtentRepair { repair_id: ReconciliationId(454), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(455), op: ExtentReopen { repair_id: ReconciliationId(455), extent_id: 63 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(456), op: ExtentFlush { repair_id: ReconciliationId(456), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(457), op: ExtentClose { repair_id: ReconciliationId(457), extent_id: 58 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(458), op: ExtentRepair { repair_id: ReconciliationId(458), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(459), op: ExtentReopen { repair_id: ReconciliationId(459), extent_id: 58 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(460), op: ExtentFlush { repair_id: ReconciliationId(460), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(461), op: ExtentClose { repair_id: ReconciliationId(461), extent_id: 96 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(462), op: ExtentRepair { repair_id: ReconciliationId(462), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(463), op: ExtentReopen { repair_id: ReconciliationId(463), extent_id: 96 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(464), op: ExtentFlush { repair_id: ReconciliationId(464), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(465), op: ExtentClose { repair_id: ReconciliationId(465), extent_id: 40 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(466), op: ExtentRepair { repair_id: ReconciliationId(466), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(467), op: ExtentReopen { repair_id: ReconciliationId(467), extent_id: 40 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(468), op: ExtentFlush { repair_id: ReconciliationId(468), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(469), op: ExtentClose { repair_id: ReconciliationId(469), extent_id: 80 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(470), op: ExtentRepair { repair_id: ReconciliationId(470), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(471), op: ExtentReopen { repair_id: ReconciliationId(471), extent_id: 80 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(472), op: ExtentFlush { repair_id: ReconciliationId(472), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(473), op: ExtentClose { repair_id: ReconciliationId(473), extent_id: 149 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(474), op: ExtentRepair { repair_id: ReconciliationId(474), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(475), op: ExtentReopen { repair_id: ReconciliationId(475), extent_id: 149 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(476), op: ExtentFlush { repair_id: ReconciliationId(476), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(477), op: ExtentClose { repair_id: ReconciliationId(477), extent_id: 161 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(478), op: ExtentRepair { repair_id: ReconciliationId(478), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(479), op: ExtentReopen { repair_id: ReconciliationId(479), extent_id: 161 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(480), op: ExtentFlush { repair_id: ReconciliationId(480), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(481), op: ExtentClose { repair_id: ReconciliationId(481), extent_id: 71 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(482), op: ExtentRepair { repair_id: ReconciliationId(482), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(483), op: ExtentReopen { repair_id: ReconciliationId(483), extent_id: 71 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(484), op: ExtentFlush { repair_id: ReconciliationId(484), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(485), op: ExtentClose { repair_id: ReconciliationId(485), extent_id: 5 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(486), op: ExtentRepair { repair_id: ReconciliationId(486), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(487), op: ExtentReopen { repair_id: ReconciliationId(487), extent_id: 5 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(488), op: ExtentFlush { repair_id: ReconciliationId(488), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(489), op: ExtentClose { repair_id: ReconciliationId(489), extent_id: 23 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(490), op: ExtentRepair { repair_id: ReconciliationId(490), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(491), op: ExtentReopen { repair_id: ReconciliationId(491), extent_id: 23 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(492), op: ExtentFlush { repair_id: ReconciliationId(492), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(493), op: ExtentClose { repair_id: ReconciliationId(493), extent_id: 98 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(494), op: ExtentRepair { repair_id: ReconciliationId(494), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(495), op: ExtentReopen { repair_id: ReconciliationId(495), extent_id: 98 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(496), op: ExtentFlush { repair_id: ReconciliationId(496), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(497), op: ExtentClose { repair_id: ReconciliationId(497), extent_id: 1 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(498), op: ExtentRepair { repair_id: ReconciliationId(498), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(499), op: ExtentReopen { repair_id: ReconciliationId(499), extent_id: 1 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(500), op: ExtentFlush { repair_id: ReconciliationId(500), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(501), op: ExtentClose { repair_id: ReconciliationId(501), extent_id: 166 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(502), op: ExtentRepair { repair_id: ReconciliationId(502), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(503), op: ExtentReopen { repair_id: ReconciliationId(503), extent_id: 166 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(504), op: ExtentFlush { repair_id: ReconciliationId(504), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(505), op: ExtentClose { repair_id: ReconciliationId(505), extent_id: 185 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(506), op: ExtentRepair { repair_id: ReconciliationId(506), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(507), op: ExtentReopen { repair_id: ReconciliationId(507), extent_id: 185 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(508), op: ExtentFlush { repair_id: ReconciliationId(508), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(509), op: ExtentClose { repair_id: ReconciliationId(509), extent_id: 86 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(510), op: ExtentRepair { repair_id: ReconciliationId(510), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(511), op: ExtentReopen { repair_id: ReconciliationId(511), extent_id: 86 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(512), op: ExtentFlush { repair_id: ReconciliationId(512), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(513), op: ExtentClose { repair_id: ReconciliationId(513), extent_id: 135 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(514), op: ExtentRepair { repair_id: ReconciliationId(514), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(515), op: ExtentReopen { repair_id: ReconciliationId(515), extent_id: 135 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(516), op: ExtentFlush { repair_id: ReconciliationId(516), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(517), op: ExtentClose { repair_id: ReconciliationId(517), extent_id: 13 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(518), op: ExtentRepair { repair_id: ReconciliationId(518), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(519), op: ExtentReopen { repair_id: ReconciliationId(519), extent_id: 13 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(520), op: ExtentFlush { repair_id: ReconciliationId(520), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(521), op: ExtentClose { repair_id: ReconciliationId(521), extent_id: 29 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(522), op: ExtentRepair { repair_id: ReconciliationId(522), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(523), op: ExtentReopen { repair_id: ReconciliationId(523), extent_id: 29 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(524), op: ExtentFlush { repair_id: ReconciliationId(524), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(525), op: ExtentClose { repair_id: ReconciliationId(525), extent_id: 27 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(526), op: ExtentRepair { repair_id: ReconciliationId(526), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(527), op: ExtentReopen { repair_id: ReconciliationId(527), extent_id: 27 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(528), op: ExtentFlush { repair_id: ReconciliationId(528), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(529), op: ExtentClose { repair_id: ReconciliationId(529), extent_id: 187 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(530), op: ExtentRepair { repair_id: ReconciliationId(530), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(531), op: ExtentReopen { repair_id: ReconciliationId(531), extent_id: 187 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(532), op: ExtentFlush { repair_id: ReconciliationId(532), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(533), op: ExtentClose { repair_id: ReconciliationId(533), extent_id: 10 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(534), op: ExtentRepair { repair_id: ReconciliationId(534), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(535), op: ExtentReopen { repair_id: ReconciliationId(535), extent_id: 10 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(536), op: ExtentFlush { repair_id: ReconciliationId(536), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(537), op: ExtentClose { repair_id: ReconciliationId(537), extent_id: 102 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(538), op: ExtentRepair { repair_id: ReconciliationId(538), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(539), op: ExtentReopen { repair_id: ReconciliationId(539), extent_id: 102 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(540), op: ExtentFlush { repair_id: ReconciliationId(540), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(541), op: ExtentClose { repair_id: ReconciliationId(541), extent_id: 85 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(542), op: ExtentRepair { repair_id: ReconciliationId(542), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(543), op: ExtentReopen { repair_id: ReconciliationId(543), extent_id: 85 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(544), op: ExtentFlush { repair_id: ReconciliationId(544), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(545), op: ExtentClose { repair_id: ReconciliationId(545), extent_id: 100 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(546), op: ExtentRepair { repair_id: ReconciliationId(546), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(547), op: ExtentReopen { repair_id: ReconciliationId(547), extent_id: 100 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(548), op: ExtentFlush { repair_id: ReconciliationId(548), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(549), op: ExtentClose { repair_id: ReconciliationId(549), extent_id: 95 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(550), op: ExtentRepair { repair_id: ReconciliationId(550), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(551), op: ExtentReopen { repair_id: ReconciliationId(551), extent_id: 95 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(552), op: ExtentFlush { repair_id: ReconciliationId(552), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(553), op: ExtentClose { repair_id: ReconciliationId(553), extent_id: 154 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(554), op: ExtentRepair { repair_id: ReconciliationId(554), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(555), op: ExtentReopen { repair_id: ReconciliationId(555), extent_id: 154 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(556), op: ExtentFlush { repair_id: ReconciliationId(556), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(557), op: ExtentClose { repair_id: ReconciliationId(557), extent_id: 50 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(558), op: ExtentRepair { repair_id: ReconciliationId(558), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(559), op: ExtentReopen { repair_id: ReconciliationId(559), extent_id: 50 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(560), op: ExtentFlush { repair_id: ReconciliationId(560), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(561), op: ExtentClose { repair_id: ReconciliationId(561), extent_id: 91 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(562), op: ExtentRepair { repair_id: ReconciliationId(562), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(563), op: ExtentReopen { repair_id: ReconciliationId(563), extent_id: 91 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(564), op: ExtentFlush { repair_id: ReconciliationId(564), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(565), op: ExtentClose { repair_id: ReconciliationId(565), extent_id: 158 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(566), op: ExtentRepair { repair_id: ReconciliationId(566), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(567), op: ExtentReopen { repair_id: ReconciliationId(567), extent_id: 158 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(568), op: ExtentFlush { repair_id: ReconciliationId(568), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(569), op: ExtentClose { repair_id: ReconciliationId(569), extent_id: 177 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(570), op: ExtentRepair { repair_id: ReconciliationId(570), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(571), op: ExtentReopen { repair_id: ReconciliationId(571), extent_id: 177 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(572), op: ExtentFlush { repair_id: ReconciliationId(572), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(573), op: ExtentClose { repair_id: ReconciliationId(573), extent_id: 134 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(574), op: ExtentRepair { repair_id: ReconciliationId(574), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(575), op: ExtentReopen { repair_id: ReconciliationId(575), extent_id: 134 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(576), op: ExtentFlush { repair_id: ReconciliationId(576), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(577), op: ExtentClose { repair_id: ReconciliationId(577), extent_id: 69 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(578), op: ExtentRepair { repair_id: ReconciliationId(578), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(579), op: ExtentReopen { repair_id: ReconciliationId(579), extent_id: 69 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(580), op: ExtentFlush { repair_id: ReconciliationId(580), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(581), op: ExtentClose { repair_id: ReconciliationId(581), extent_id: 155 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(582), op: ExtentRepair { repair_id: ReconciliationId(582), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(583), op: ExtentReopen { repair_id: ReconciliationId(583), extent_id: 155 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(584), op: ExtentFlush { repair_id: ReconciliationId(584), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(585), op: ExtentClose { repair_id: ReconciliationId(585), extent_id: 0 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(586), op: ExtentRepair { repair_id: ReconciliationId(586), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(587), op: ExtentReopen { repair_id: ReconciliationId(587), extent_id: 0 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(588), op: ExtentFlush { repair_id: ReconciliationId(588), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(589), op: ExtentClose { repair_id: ReconciliationId(589), extent_id: 164 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(590), op: ExtentRepair { repair_id: ReconciliationId(590), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(591), op: ExtentReopen { repair_id: ReconciliationId(591), extent_id: 164 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(592), op: ExtentFlush { repair_id: ReconciliationId(592), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(593), op: ExtentClose { repair_id: ReconciliationId(593), extent_id: 24 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(594), op: ExtentRepair { repair_id: ReconciliationId(594), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(595), op: ExtentReopen { repair_id: ReconciliationId(595), extent_id: 24 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(596), op: ExtentFlush { repair_id: ReconciliationId(596), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(597), op: ExtentClose { repair_id: ReconciliationId(597), extent_id: 39 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(598), op: ExtentRepair { repair_id: ReconciliationId(598), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(599), op: ExtentReopen { repair_id: ReconciliationId(599), extent_id: 39 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(600), op: ExtentFlush { repair_id: ReconciliationId(600), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(601), op: ExtentClose { repair_id: ReconciliationId(601), extent_id: 48 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(602), op: ExtentRepair { repair_id: ReconciliationId(602), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(603), op: ExtentReopen { repair_id: ReconciliationId(603), extent_id: 48 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(604), op: ExtentFlush { repair_id: ReconciliationId(604), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(605), op: ExtentClose { repair_id: ReconciliationId(605), extent_id: 16 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(606), op: ExtentRepair { repair_id: ReconciliationId(606), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(607), op: ExtentReopen { repair_id: ReconciliationId(607), extent_id: 16 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(608), op: ExtentFlush { repair_id: ReconciliationId(608), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(609), op: ExtentClose { repair_id: ReconciliationId(609), extent_id: 4 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(610), op: ExtentRepair { repair_id: ReconciliationId(610), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(611), op: ExtentReopen { repair_id: ReconciliationId(611), extent_id: 4 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(612), op: ExtentFlush { repair_id: ReconciliationId(612), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(613), op: ExtentClose { repair_id: ReconciliationId(613), extent_id: 70 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(614), op: ExtentRepair { repair_id: ReconciliationId(614), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(615), op: ExtentReopen { repair_id: ReconciliationId(615), extent_id: 70 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(616), op: ExtentFlush { repair_id: ReconciliationId(616), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(617), op: ExtentClose { repair_id: ReconciliationId(617), extent_id: 76 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(618), op: ExtentRepair { repair_id: ReconciliationId(618), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(619), op: ExtentReopen { repair_id: ReconciliationId(619), extent_id: 76 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(620), op: ExtentFlush { repair_id: ReconciliationId(620), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(621), op: ExtentClose { repair_id: ReconciliationId(621), extent_id: 113 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(622), op: ExtentRepair { repair_id: ReconciliationId(622), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(623), op: ExtentReopen { repair_id: ReconciliationId(623), extent_id: 113 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(624), op: ExtentFlush { repair_id: ReconciliationId(624), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(625), op: ExtentClose { repair_id: ReconciliationId(625), extent_id: 108 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(626), op: ExtentRepair { repair_id: ReconciliationId(626), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(627), op: ExtentReopen { repair_id: ReconciliationId(627), extent_id: 108 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(628), op: ExtentFlush { repair_id: ReconciliationId(628), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(629), op: ExtentClose { repair_id: ReconciliationId(629), extent_id: 42 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(630), op: ExtentRepair { repair_id: ReconciliationId(630), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(631), op: ExtentReopen { repair_id: ReconciliationId(631), extent_id: 42 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(632), op: ExtentFlush { repair_id: ReconciliationId(632), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(633), op: ExtentClose { repair_id: ReconciliationId(633), extent_id: 140 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(634), op: ExtentRepair { repair_id: ReconciliationId(634), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(635), op: ExtentReopen { repair_id: ReconciliationId(635), extent_id: 140 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(636), op: ExtentFlush { repair_id: ReconciliationId(636), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(637), op: ExtentClose { repair_id: ReconciliationId(637), extent_id: 178 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(638), op: ExtentRepair { repair_id: ReconciliationId(638), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(639), op: ExtentReopen { repair_id: ReconciliationId(639), extent_id: 178 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(640), op: ExtentFlush { repair_id: ReconciliationId(640), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(641), op: ExtentClose { repair_id: ReconciliationId(641), extent_id: 181 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(642), op: ExtentRepair { repair_id: ReconciliationId(642), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(643), op: ExtentReopen { repair_id: ReconciliationId(643), extent_id: 181 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(644), op: ExtentFlush { repair_id: ReconciliationId(644), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(645), op: ExtentClose { repair_id: ReconciliationId(645), extent_id: 82 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(646), op: ExtentRepair { repair_id: ReconciliationId(646), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(647), op: ExtentReopen { repair_id: ReconciliationId(647), extent_id: 82 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(648), op: ExtentFlush { repair_id: ReconciliationId(648), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(649), op: ExtentClose { repair_id: ReconciliationId(649), extent_id: 174 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(650), op: ExtentRepair { repair_id: ReconciliationId(650), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(651), op: ExtentReopen { repair_id: ReconciliationId(651), extent_id: 174 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(652), op: ExtentFlush { repair_id: ReconciliationId(652), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(653), op: ExtentClose { repair_id: ReconciliationId(653), extent_id: 17 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(654), op: ExtentRepair { repair_id: ReconciliationId(654), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(655), op: ExtentReopen { repair_id: ReconciliationId(655), extent_id: 17 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(656), op: ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(657), op: ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(658), op: ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(659), op: ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(660), op: ExtentFlush { repair_id: ReconciliationId(660), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(661), op: ExtentClose { repair_id: ReconciliationId(661), extent_id: 142 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(662), op: ExtentRepair { repair_id: ReconciliationId(662), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(663), op: ExtentReopen { repair_id: ReconciliationId(663), extent_id: 142 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(664), op: ExtentFlush { repair_id: ReconciliationId(664), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(665), op: ExtentClose { repair_id: ReconciliationId(665), extent_id: 88 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(666), op: ExtentRepair { repair_id: ReconciliationId(666), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(667), op: ExtentReopen { repair_id: ReconciliationId(667), extent_id: 88 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(668), op: ExtentFlush { repair_id: ReconciliationId(668), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(669), op: ExtentClose { repair_id: ReconciliationId(669), extent_id: 51 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(670), op: ExtentRepair { repair_id: ReconciliationId(670), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(671), op: ExtentReopen { repair_id: ReconciliationId(671), extent_id: 51 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(672), op: ExtentFlush { repair_id: ReconciliationId(672), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(673), op: ExtentClose { repair_id: ReconciliationId(673), extent_id: 33 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(674), op: ExtentRepair { repair_id: ReconciliationId(674), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(675), op: ExtentReopen { repair_id: ReconciliationId(675), extent_id: 33 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(676), op: ExtentFlush { repair_id: ReconciliationId(676), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(677), op: ExtentClose { repair_id: ReconciliationId(677), extent_id: 54 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(678), op: ExtentRepair { repair_id: ReconciliationId(678), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(679), op: ExtentReopen { repair_id: ReconciliationId(679), extent_id: 54 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(680), op: ExtentFlush { repair_id: ReconciliationId(680), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(681), op: ExtentClose { repair_id: ReconciliationId(681), extent_id: 112 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(682), op: ExtentRepair { repair_id: ReconciliationId(682), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(683), op: ExtentReopen { repair_id: ReconciliationId(683), extent_id: 112 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(684), op: ExtentFlush { repair_id: ReconciliationId(684), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(685), op: ExtentClose { repair_id: ReconciliationId(685), extent_id: 143 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(686), op: ExtentRepair { repair_id: ReconciliationId(686), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(687), op: ExtentReopen { repair_id: ReconciliationId(687), extent_id: 143 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(688), op: ExtentFlush { repair_id: ReconciliationId(688), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(689), op: ExtentClose { repair_id: ReconciliationId(689), extent_id: 151 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(690), op: ExtentRepair { repair_id: ReconciliationId(690), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(691), op: ExtentReopen { repair_id: ReconciliationId(691), extent_id: 151 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(692), op: ExtentFlush { repair_id: ReconciliationId(692), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(693), op: ExtentClose { repair_id: ReconciliationId(693), extent_id: 101 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(694), op: ExtentRepair { repair_id: ReconciliationId(694), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(695), op: ExtentReopen { repair_id: ReconciliationId(695), extent_id: 101 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(696), op: ExtentFlush { repair_id: ReconciliationId(696), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(697), op: ExtentClose { repair_id: ReconciliationId(697), extent_id: 173 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(698), op: ExtentRepair { repair_id: ReconciliationId(698), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(699), op: ExtentReopen { repair_id: ReconciliationId(699), extent_id: 173 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(700), op: ExtentFlush { repair_id: ReconciliationId(700), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(701), op: ExtentClose { repair_id: ReconciliationId(701), extent_id: 20 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(702), op: ExtentRepair { repair_id: ReconciliationId(702), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(703), op: ExtentReopen { repair_id: ReconciliationId(703), extent_id: 20 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(704), op: ExtentFlush { repair_id: ReconciliationId(704), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(705), op: ExtentClose { repair_id: ReconciliationId(705), extent_id: 104 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(706), op: ExtentRepair { repair_id: ReconciliationId(706), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(707), op: ExtentReopen { repair_id: ReconciliationId(707), extent_id: 104 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(708), op: ExtentFlush { repair_id: ReconciliationId(708), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(709), op: ExtentClose { repair_id: ReconciliationId(709), extent_id: 81 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(710), op: ExtentRepair { repair_id: ReconciliationId(710), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(711), op: ExtentReopen { repair_id: ReconciliationId(711), extent_id: 81 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(712), op: ExtentFlush { repair_id: ReconciliationId(712), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(713), op: ExtentClose { repair_id: ReconciliationId(713), extent_id: 120 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(714), op: ExtentRepair { repair_id: ReconciliationId(714), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(715), op: ExtentReopen { repair_id: ReconciliationId(715), extent_id: 120 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(716), op: ExtentFlush { repair_id: ReconciliationId(716), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(717), op: ExtentClose { repair_id: ReconciliationId(717), extent_id: 145 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(718), op: ExtentRepair { repair_id: ReconciliationId(718), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(719), op: ExtentReopen { repair_id: ReconciliationId(719), extent_id: 145 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(720), op: ExtentFlush { repair_id: ReconciliationId(720), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(721), op: ExtentClose { repair_id: ReconciliationId(721), extent_id: 131 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(722), op: ExtentRepair { repair_id: ReconciliationId(722), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(723), op: ExtentReopen { repair_id: ReconciliationId(723), extent_id: 131 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(724), op: ExtentFlush { repair_id: ReconciliationId(724), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(725), op: ExtentClose { repair_id: ReconciliationId(725), extent_id: 170 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(726), op: ExtentRepair { repair_id: ReconciliationId(726), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(727), op: ExtentReopen { repair_id: ReconciliationId(727), extent_id: 170 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(728), op: ExtentFlush { repair_id: ReconciliationId(728), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(729), op: ExtentClose { repair_id: ReconciliationId(729), extent_id: 72 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(730), op: ExtentRepair { repair_id: ReconciliationId(730), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(731), op: ExtentReopen { repair_id: ReconciliationId(731), extent_id: 72 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(732), op: ExtentFlush { repair_id: ReconciliationId(732), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(733), op: ExtentClose { repair_id: ReconciliationId(733), extent_id: 180 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(734), op: ExtentRepair { repair_id: ReconciliationId(734), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(735), op: ExtentReopen { repair_id: ReconciliationId(735), extent_id: 180 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(736), op: ExtentFlush { repair_id: ReconciliationId(736), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(737), op: ExtentClose { repair_id: ReconciliationId(737), extent_id: 57 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(738), op: ExtentRepair { repair_id: ReconciliationId(738), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(739), op: ExtentReopen { repair_id: ReconciliationId(739), extent_id: 57 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(740), op: ExtentFlush { repair_id: ReconciliationId(740), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(741), op: ExtentClose { repair_id: ReconciliationId(741), extent_id: 14 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(742), op: ExtentRepair { repair_id: ReconciliationId(742), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(743), op: ExtentReopen { repair_id: ReconciliationId(743), extent_id: 14 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(744), op: ExtentFlush { repair_id: ReconciliationId(744), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(745), op: ExtentClose { repair_id: ReconciliationId(745), extent_id: 8 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(746), op: ExtentRepair { repair_id: ReconciliationId(746), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(747), op: ExtentReopen { repair_id: ReconciliationId(747), extent_id: 8 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(748), op: ExtentFlush { repair_id: ReconciliationId(748), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(749), op: ExtentClose { repair_id: ReconciliationId(749), extent_id: 114 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(750), op: ExtentRepair { repair_id: ReconciliationId(750), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(751), op: ExtentReopen { repair_id: ReconciliationId(751), extent_id: 114 }, state: ClientData([New, New, New]) }], : downstairs
35387 Sep 22 23:22:14.135 INFO Begin repair with 752 commands
35388 Sep 22 23:22:14.135 INFO Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentFlush { repair_id: ReconciliationId(0), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35389 Sep 22 23:22:14.135 INFO Sent repair work, now wait for resp
35390 Sep 22 23:22:14.135 INFO [0] received reconcile message
35391 Sep 22 23:22:14.135 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentFlush { repair_id: ReconciliationId(0), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35392 Sep 22 23:22:14.135 INFO [0] client ExtentFlush { repair_id: ReconciliationId(0), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35393 Sep 22 23:22:14.135 INFO [1] received reconcile message
35394 Sep 22 23:22:14.135 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentFlush { repair_id: ReconciliationId(0), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35395 Sep 22 23:22:14.135 INFO [1] client ExtentFlush { repair_id: ReconciliationId(0), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35396 Sep 22 23:22:14.135 INFO [2] received reconcile message
35397 Sep 22 23:22:14.135 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentFlush { repair_id: ReconciliationId(0), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35398 Sep 22 23:22:14.135 INFO [2] client ExtentFlush { repair_id: ReconciliationId(0), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35399 Sep 22 23:22:14.135 DEBG 0 Flush extent 31 with f:2 g:2
35400 Sep 22 23:22:14.135 DEBG Flush just extent 31 with f:2 and g:2
35401 Sep 22 23:22:14.135 DEBG [1] It's time to notify for 0
35402 Sep 22 23:22:14.135 INFO Completion from [1] id:0 status:true
35403 Sep 22 23:22:14.135 INFO [1/752] Repair commands completed
35404 Sep 22 23:22:14.135 INFO Pop front: ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(1), extent_id: 31 }, state: ClientData([New, New, New]) }
35405 Sep 22 23:22:14.135 INFO Sent repair work, now wait for resp
35406 Sep 22 23:22:14.136 INFO [0] received reconcile message
35407 Sep 22 23:22:14.136 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(1), extent_id: 31 }, state: ClientData([InProgress, New, New]) }, : downstairs
35408 Sep 22 23:22:14.136 INFO [0] client ExtentClose { repair_id: ReconciliationId(1), extent_id: 31 }
35409 Sep 22 23:22:14.136 INFO [1] received reconcile message
35410 Sep 22 23:22:14.136 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(1), extent_id: 31 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35411 Sep 22 23:22:14.136 INFO [1] client ExtentClose { repair_id: ReconciliationId(1), extent_id: 31 }
35412 Sep 22 23:22:14.136 INFO [2] received reconcile message
35413 Sep 22 23:22:14.136 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(1), extent_id: 31 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35414 Sep 22 23:22:14.136 INFO [2] client ExtentClose { repair_id: ReconciliationId(1), extent_id: 31 }
35415 Sep 22 23:22:14.136 DEBG 1 Close extent 31
35416 Sep 22 23:22:14.136 DEBG 1 Close extent 31
35417 Sep 22 23:22:14.136 DEBG 1 Close extent 31
35418 Sep 22 23:22:14.137 DEBG [2] It's time to notify for 1
35419 Sep 22 23:22:14.137 INFO Completion from [2] id:1 status:true
35420 Sep 22 23:22:14.137 INFO [2/752] Repair commands completed
35421 Sep 22 23:22:14.137 INFO Pop front: ReconcileIO { id: ReconciliationId(2), op: ExtentRepair { repair_id: ReconciliationId(2), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35422 Sep 22 23:22:14.137 INFO Sent repair work, now wait for resp
35423 Sep 22 23:22:14.137 INFO [0] received reconcile message
35424 Sep 22 23:22:14.137 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(2), op: ExtentRepair { repair_id: ReconciliationId(2), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35425 Sep 22 23:22:14.137 INFO [0] client ExtentRepair { repair_id: ReconciliationId(2), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35426 Sep 22 23:22:14.137 INFO [0] Sending repair request ReconciliationId(2)
35427 Sep 22 23:22:14.137 INFO [1] received reconcile message
35428 Sep 22 23:22:14.137 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(2), op: ExtentRepair { repair_id: ReconciliationId(2), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35429 Sep 22 23:22:14.137 INFO [1] client ExtentRepair { repair_id: ReconciliationId(2), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35430 Sep 22 23:22:14.137 INFO [1] No action required ReconciliationId(2)
35431 Sep 22 23:22:14.137 INFO [2] received reconcile message
35432 Sep 22 23:22:14.137 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(2), op: ExtentRepair { repair_id: ReconciliationId(2), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35433 Sep 22 23:22:14.137 INFO [2] client ExtentRepair { repair_id: ReconciliationId(2), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35434 Sep 22 23:22:14.137 INFO [2] No action required ReconciliationId(2)
35435 Sep 22 23:22:14.137 DEBG 2 Repair extent 31 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
35436 Sep 22 23:22:14.137 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/01F.copy"
35437 Sep 22 23:22:14.202 INFO accepted connection, remote_addr: 127.0.0.1:60599, local_addr: 127.0.0.1:52864, task: repair
35438 Sep 22 23:22:14.203 TRCE incoming request, uri: /extent/31/files, method: GET, req_id: dbda508a-9d8a-485a-932c-0c4e0c18b908, remote_addr: 127.0.0.1:60599, local_addr: 127.0.0.1:52864, task: repair
35439 Sep 22 23:22:14.203 INFO request completed, latency_us: 270, response_code: 200, uri: /extent/31/files, method: GET, req_id: dbda508a-9d8a-485a-932c-0c4e0c18b908, remote_addr: 127.0.0.1:60599, local_addr: 127.0.0.1:52864, task: repair
35440 Sep 22 23:22:14.203 INFO eid:31 Found repair files: ["01F", "01F.db"]
35441 Sep 22 23:22:14.204 TRCE incoming request, uri: /newextent/31/data, method: GET, req_id: 077849bd-4094-4a21-a79a-6f3d77680cf1, remote_addr: 127.0.0.1:60599, local_addr: 127.0.0.1:52864, task: repair
35442 Sep 22 23:22:14.204 INFO request completed, latency_us: 477, response_code: 200, uri: /newextent/31/data, method: GET, req_id: 077849bd-4094-4a21-a79a-6f3d77680cf1, remote_addr: 127.0.0.1:60599, local_addr: 127.0.0.1:52864, task: repair
35443 Sep 22 23:22:14.209 TRCE incoming request, uri: /newextent/31/db, method: GET, req_id: 0bba777e-9711-4188-88f2-2c5eb30c4f88, remote_addr: 127.0.0.1:60599, local_addr: 127.0.0.1:52864, task: repair
35444 Sep 22 23:22:14.209 INFO request completed, latency_us: 289, response_code: 200, uri: /newextent/31/db, method: GET, req_id: 0bba777e-9711-4188-88f2-2c5eb30c4f88, remote_addr: 127.0.0.1:60599, local_addr: 127.0.0.1:52864, task: repair
35445 Sep 22 23:22:14.211 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/01F.copy" to "/tmp/downstairs-zrMnlo6G/00/000/01F.replace"
35446 Sep 22 23:22:14.211 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35447 Sep 22 23:22:14.212 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/01F.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
35448 Sep 22 23:22:14.212 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/01F"
35449 Sep 22 23:22:14.212 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/01F.db"
35450 Sep 22 23:22:14.212 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35451 Sep 22 23:22:14.212 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/01F.replace" to "/tmp/downstairs-zrMnlo6G/00/000/01F.completed"
35452 Sep 22 23:22:14.212 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35453 Sep 22 23:22:14.212 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35454 Sep 22 23:22:14.213 DEBG [0] It's time to notify for 2
35455 Sep 22 23:22:14.213 INFO Completion from [0] id:2 status:true
35456 Sep 22 23:22:14.213 INFO [3/752] Repair commands completed
35457 Sep 22 23:22:14.213 INFO Pop front: ReconcileIO { id: ReconciliationId(3), op: ExtentReopen { repair_id: ReconciliationId(3), extent_id: 31 }, state: ClientData([New, New, New]) }
35458 Sep 22 23:22:14.213 INFO Sent repair work, now wait for resp
35459 Sep 22 23:22:14.213 INFO [0] received reconcile message
35460 Sep 22 23:22:14.213 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(3), op: ExtentReopen { repair_id: ReconciliationId(3), extent_id: 31 }, state: ClientData([InProgress, New, New]) }, : downstairs
35461 Sep 22 23:22:14.213 INFO [0] client ExtentReopen { repair_id: ReconciliationId(3), extent_id: 31 }
35462 Sep 22 23:22:14.213 INFO [1] received reconcile message
35463 Sep 22 23:22:14.213 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(3), op: ExtentReopen { repair_id: ReconciliationId(3), extent_id: 31 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35464 Sep 22 23:22:14.213 INFO [1] client ExtentReopen { repair_id: ReconciliationId(3), extent_id: 31 }
35465 Sep 22 23:22:14.213 INFO [2] received reconcile message
35466 Sep 22 23:22:14.213 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(3), op: ExtentReopen { repair_id: ReconciliationId(3), extent_id: 31 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35467 Sep 22 23:22:14.213 INFO [2] client ExtentReopen { repair_id: ReconciliationId(3), extent_id: 31 }
35468 Sep 22 23:22:14.213 DEBG 3 Reopen extent 31
35469 Sep 22 23:22:14.214 DEBG 3 Reopen extent 31
35470 Sep 22 23:22:14.214 DEBG 3 Reopen extent 31
35471 Sep 22 23:22:14.215 DEBG [2] It's time to notify for 3
35472 Sep 22 23:22:14.215 INFO Completion from [2] id:3 status:true
35473 Sep 22 23:22:14.215 INFO [4/752] Repair commands completed
35474 Sep 22 23:22:14.215 INFO Pop front: ReconcileIO { id: ReconciliationId(4), op: ExtentFlush { repair_id: ReconciliationId(4), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35475 Sep 22 23:22:14.215 INFO Sent repair work, now wait for resp
35476 Sep 22 23:22:14.215 INFO [0] received reconcile message
35477 Sep 22 23:22:14.215 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(4), op: ExtentFlush { repair_id: ReconciliationId(4), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35478 Sep 22 23:22:14.215 INFO [0] client ExtentFlush { repair_id: ReconciliationId(4), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35479 Sep 22 23:22:14.215 INFO [1] received reconcile message
35480 Sep 22 23:22:14.215 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(4), op: ExtentFlush { repair_id: ReconciliationId(4), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35481 Sep 22 23:22:14.215 INFO [1] client ExtentFlush { repair_id: ReconciliationId(4), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35482 Sep 22 23:22:14.215 INFO [2] received reconcile message
35483 Sep 22 23:22:14.215 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(4), op: ExtentFlush { repair_id: ReconciliationId(4), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35484 Sep 22 23:22:14.215 INFO [2] client ExtentFlush { repair_id: ReconciliationId(4), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35485 Sep 22 23:22:14.215 DEBG 4 Flush extent 175 with f:2 g:2
35486 Sep 22 23:22:14.215 DEBG Flush just extent 175 with f:2 and g:2
35487 Sep 22 23:22:14.216 DEBG [1] It's time to notify for 4
35488 Sep 22 23:22:14.216 INFO Completion from [1] id:4 status:true
35489 Sep 22 23:22:14.216 INFO [5/752] Repair commands completed
35490 Sep 22 23:22:14.216 INFO Pop front: ReconcileIO { id: ReconciliationId(5), op: ExtentClose { repair_id: ReconciliationId(5), extent_id: 175 }, state: ClientData([New, New, New]) }
35491 Sep 22 23:22:14.216 INFO Sent repair work, now wait for resp
35492 Sep 22 23:22:14.216 INFO [0] received reconcile message
35493 Sep 22 23:22:14.216 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(5), op: ExtentClose { repair_id: ReconciliationId(5), extent_id: 175 }, state: ClientData([InProgress, New, New]) }, : downstairs
35494 Sep 22 23:22:14.216 INFO [0] client ExtentClose { repair_id: ReconciliationId(5), extent_id: 175 }
35495 Sep 22 23:22:14.216 INFO [1] received reconcile message
35496 Sep 22 23:22:14.216 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(5), op: ExtentClose { repair_id: ReconciliationId(5), extent_id: 175 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35497 Sep 22 23:22:14.216 INFO [1] client ExtentClose { repair_id: ReconciliationId(5), extent_id: 175 }
35498 Sep 22 23:22:14.216 INFO [2] received reconcile message
35499 Sep 22 23:22:14.216 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(5), op: ExtentClose { repair_id: ReconciliationId(5), extent_id: 175 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35500 Sep 22 23:22:14.216 INFO [2] client ExtentClose { repair_id: ReconciliationId(5), extent_id: 175 }
35501 Sep 22 23:22:14.216 DEBG 5 Close extent 175
35502 Sep 22 23:22:14.216 DEBG 5 Close extent 175
35503 Sep 22 23:22:14.217 DEBG 5 Close extent 175
35504 Sep 22 23:22:14.217 DEBG [2] It's time to notify for 5
35505 Sep 22 23:22:14.217 INFO Completion from [2] id:5 status:true
35506 Sep 22 23:22:14.217 INFO [6/752] Repair commands completed
35507 Sep 22 23:22:14.217 INFO Pop front: ReconcileIO { id: ReconciliationId(6), op: ExtentRepair { repair_id: ReconciliationId(6), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35508 Sep 22 23:22:14.217 INFO Sent repair work, now wait for resp
35509 Sep 22 23:22:14.217 INFO [0] received reconcile message
35510 Sep 22 23:22:14.217 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(6), op: ExtentRepair { repair_id: ReconciliationId(6), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35511 Sep 22 23:22:14.217 INFO [0] client ExtentRepair { repair_id: ReconciliationId(6), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35512 Sep 22 23:22:14.217 INFO [0] Sending repair request ReconciliationId(6)
35513 Sep 22 23:22:14.217 INFO [1] received reconcile message
35514 Sep 22 23:22:14.217 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(6), op: ExtentRepair { repair_id: ReconciliationId(6), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35515 Sep 22 23:22:14.217 INFO [1] client ExtentRepair { repair_id: ReconciliationId(6), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35516 Sep 22 23:22:14.217 INFO [1] No action required ReconciliationId(6)
35517 Sep 22 23:22:14.217 INFO [2] received reconcile message
35518 Sep 22 23:22:14.217 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(6), op: ExtentRepair { repair_id: ReconciliationId(6), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35519 Sep 22 23:22:14.217 INFO [2] client ExtentRepair { repair_id: ReconciliationId(6), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35520 Sep 22 23:22:14.217 INFO [2] No action required ReconciliationId(6)
35521 Sep 22 23:22:14.217 DEBG 6 Repair extent 175 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
35522 Sep 22 23:22:14.217 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0AF.copy"
35523 Sep 22 23:22:14.282 INFO accepted connection, remote_addr: 127.0.0.1:42863, local_addr: 127.0.0.1:52864, task: repair
35524 Sep 22 23:22:14.282 TRCE incoming request, uri: /extent/175/files, method: GET, req_id: 6c12a7e7-5aa4-4668-8b41-ffe364262304, remote_addr: 127.0.0.1:42863, local_addr: 127.0.0.1:52864, task: repair
35525 Sep 22 23:22:14.283 INFO request completed, latency_us: 208, response_code: 200, uri: /extent/175/files, method: GET, req_id: 6c12a7e7-5aa4-4668-8b41-ffe364262304, remote_addr: 127.0.0.1:42863, local_addr: 127.0.0.1:52864, task: repair
35526 Sep 22 23:22:14.283 INFO eid:175 Found repair files: ["0AF", "0AF.db"]
35527 Sep 22 23:22:14.283 TRCE incoming request, uri: /newextent/175/data, method: GET, req_id: 2686f34a-43e2-4d00-9de0-1e633fa0c75e, remote_addr: 127.0.0.1:42863, local_addr: 127.0.0.1:52864, task: repair
35528 Sep 22 23:22:14.283 INFO request completed, latency_us: 324, response_code: 200, uri: /newextent/175/data, method: GET, req_id: 2686f34a-43e2-4d00-9de0-1e633fa0c75e, remote_addr: 127.0.0.1:42863, local_addr: 127.0.0.1:52864, task: repair
35529 Sep 22 23:22:14.288 TRCE incoming request, uri: /newextent/175/db, method: GET, req_id: aa7671e6-dc37-4383-940b-858878f6e46d, remote_addr: 127.0.0.1:42863, local_addr: 127.0.0.1:52864, task: repair
35530 Sep 22 23:22:14.289 INFO request completed, latency_us: 288, response_code: 200, uri: /newextent/175/db, method: GET, req_id: aa7671e6-dc37-4383-940b-858878f6e46d, remote_addr: 127.0.0.1:42863, local_addr: 127.0.0.1:52864, task: repair
35531 Sep 22 23:22:14.290 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0AF.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0AF.replace"
35532 Sep 22 23:22:14.290 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35533 Sep 22 23:22:14.291 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0AF.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
35534 Sep 22 23:22:14.291 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0AF"
35535 Sep 22 23:22:14.291 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0AF.db"
35536 Sep 22 23:22:14.291 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35537 Sep 22 23:22:14.291 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0AF.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0AF.completed"
35538 Sep 22 23:22:14.291 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35539 Sep 22 23:22:14.291 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35540 Sep 22 23:22:14.292 DEBG [0] It's time to notify for 6
35541 Sep 22 23:22:14.292 INFO Completion from [0] id:6 status:true
35542 Sep 22 23:22:14.292 INFO [7/752] Repair commands completed
35543 Sep 22 23:22:14.292 INFO Pop front: ReconcileIO { id: ReconciliationId(7), op: ExtentReopen { repair_id: ReconciliationId(7), extent_id: 175 }, state: ClientData([New, New, New]) }
35544 Sep 22 23:22:14.292 INFO Sent repair work, now wait for resp
35545 Sep 22 23:22:14.292 INFO [0] received reconcile message
35546 Sep 22 23:22:14.292 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(7), op: ExtentReopen { repair_id: ReconciliationId(7), extent_id: 175 }, state: ClientData([InProgress, New, New]) }, : downstairs
35547 Sep 22 23:22:14.292 INFO [0] client ExtentReopen { repair_id: ReconciliationId(7), extent_id: 175 }
35548 Sep 22 23:22:14.292 INFO [1] received reconcile message
35549 Sep 22 23:22:14.292 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(7), op: ExtentReopen { repair_id: ReconciliationId(7), extent_id: 175 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35550 Sep 22 23:22:14.292 INFO [1] client ExtentReopen { repair_id: ReconciliationId(7), extent_id: 175 }
35551 Sep 22 23:22:14.292 INFO [2] received reconcile message
35552 Sep 22 23:22:14.292 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(7), op: ExtentReopen { repair_id: ReconciliationId(7), extent_id: 175 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35553 Sep 22 23:22:14.292 INFO [2] client ExtentReopen { repair_id: ReconciliationId(7), extent_id: 175 }
35554 Sep 22 23:22:14.292 DEBG 7 Reopen extent 175
35555 Sep 22 23:22:14.293 DEBG 7 Reopen extent 175
35556 Sep 22 23:22:14.293 DEBG 7 Reopen extent 175
35557 Sep 22 23:22:14.294 DEBG [2] It's time to notify for 7
35558 Sep 22 23:22:14.294 INFO Completion from [2] id:7 status:true
35559 Sep 22 23:22:14.294 INFO [8/752] Repair commands completed
35560 Sep 22 23:22:14.294 INFO Pop front: ReconcileIO { id: ReconciliationId(8), op: ExtentFlush { repair_id: ReconciliationId(8), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35561 Sep 22 23:22:14.294 INFO Sent repair work, now wait for resp
35562 Sep 22 23:22:14.294 INFO [0] received reconcile message
35563 Sep 22 23:22:14.294 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(8), op: ExtentFlush { repair_id: ReconciliationId(8), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35564 Sep 22 23:22:14.294 INFO [0] client ExtentFlush { repair_id: ReconciliationId(8), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35565 Sep 22 23:22:14.294 INFO [1] received reconcile message
35566 Sep 22 23:22:14.294 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(8), op: ExtentFlush { repair_id: ReconciliationId(8), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35567 Sep 22 23:22:14.294 INFO [1] client ExtentFlush { repair_id: ReconciliationId(8), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35568 Sep 22 23:22:14.294 INFO [2] received reconcile message
35569 Sep 22 23:22:14.294 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(8), op: ExtentFlush { repair_id: ReconciliationId(8), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35570 Sep 22 23:22:14.294 INFO [2] client ExtentFlush { repair_id: ReconciliationId(8), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35571 Sep 22 23:22:14.294 DEBG 8 Flush extent 3 with f:2 g:2
35572 Sep 22 23:22:14.294 DEBG Flush just extent 3 with f:2 and g:2
35573 Sep 22 23:22:14.294 DEBG [1] It's time to notify for 8
35574 Sep 22 23:22:14.294 INFO Completion from [1] id:8 status:true
35575 Sep 22 23:22:14.294 INFO [9/752] Repair commands completed
35576 Sep 22 23:22:14.294 INFO Pop front: ReconcileIO { id: ReconciliationId(9), op: ExtentClose { repair_id: ReconciliationId(9), extent_id: 3 }, state: ClientData([New, New, New]) }
35577 Sep 22 23:22:14.294 INFO Sent repair work, now wait for resp
35578 Sep 22 23:22:14.294 INFO [0] received reconcile message
35579 Sep 22 23:22:14.295 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(9), op: ExtentClose { repair_id: ReconciliationId(9), extent_id: 3 }, state: ClientData([InProgress, New, New]) }, : downstairs
35580 Sep 22 23:22:14.295 INFO [0] client ExtentClose { repair_id: ReconciliationId(9), extent_id: 3 }
35581 Sep 22 23:22:14.295 INFO [1] received reconcile message
35582 Sep 22 23:22:14.295 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(9), op: ExtentClose { repair_id: ReconciliationId(9), extent_id: 3 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35583 Sep 22 23:22:14.295 INFO [1] client ExtentClose { repair_id: ReconciliationId(9), extent_id: 3 }
35584 Sep 22 23:22:14.295 INFO [2] received reconcile message
35585 Sep 22 23:22:14.295 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(9), op: ExtentClose { repair_id: ReconciliationId(9), extent_id: 3 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35586 Sep 22 23:22:14.295 INFO [2] client ExtentClose { repair_id: ReconciliationId(9), extent_id: 3 }
35587 Sep 22 23:22:14.295 DEBG 9 Close extent 3
35588 Sep 22 23:22:14.295 DEBG 9 Close extent 3
35589 Sep 22 23:22:14.295 DEBG 9 Close extent 3
35590 Sep 22 23:22:14.296 DEBG [2] It's time to notify for 9
35591 Sep 22 23:22:14.296 INFO Completion from [2] id:9 status:true
35592 Sep 22 23:22:14.296 INFO [10/752] Repair commands completed
35593 Sep 22 23:22:14.296 INFO Pop front: ReconcileIO { id: ReconciliationId(10), op: ExtentRepair { repair_id: ReconciliationId(10), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35594 Sep 22 23:22:14.296 INFO Sent repair work, now wait for resp
35595 Sep 22 23:22:14.296 INFO [0] received reconcile message
35596 Sep 22 23:22:14.296 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(10), op: ExtentRepair { repair_id: ReconciliationId(10), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35597 Sep 22 23:22:14.296 INFO [0] client ExtentRepair { repair_id: ReconciliationId(10), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35598 Sep 22 23:22:14.296 INFO [0] Sending repair request ReconciliationId(10)
35599 Sep 22 23:22:14.296 INFO [1] received reconcile message
35600 Sep 22 23:22:14.296 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(10), op: ExtentRepair { repair_id: ReconciliationId(10), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35601 Sep 22 23:22:14.296 INFO [1] client ExtentRepair { repair_id: ReconciliationId(10), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35602 Sep 22 23:22:14.296 INFO [1] No action required ReconciliationId(10)
35603 Sep 22 23:22:14.296 INFO [2] received reconcile message
35604 Sep 22 23:22:14.296 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(10), op: ExtentRepair { repair_id: ReconciliationId(10), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35605 Sep 22 23:22:14.296 INFO [2] client ExtentRepair { repair_id: ReconciliationId(10), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35606 Sep 22 23:22:14.296 INFO [2] No action required ReconciliationId(10)
35607 Sep 22 23:22:14.296 DEBG 10 Repair extent 3 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
35608 Sep 22 23:22:14.296 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/003.copy"
35609 Sep 22 23:22:14.360 INFO accepted connection, remote_addr: 127.0.0.1:53368, local_addr: 127.0.0.1:52864, task: repair
35610 Sep 22 23:22:14.360 TRCE incoming request, uri: /extent/3/files, method: GET, req_id: 13b98295-8755-4d86-b0eb-c70f449bc669, remote_addr: 127.0.0.1:53368, local_addr: 127.0.0.1:52864, task: repair
35611 Sep 22 23:22:14.361 INFO request completed, latency_us: 188, response_code: 200, uri: /extent/3/files, method: GET, req_id: 13b98295-8755-4d86-b0eb-c70f449bc669, remote_addr: 127.0.0.1:53368, local_addr: 127.0.0.1:52864, task: repair
35612 Sep 22 23:22:14.361 INFO eid:3 Found repair files: ["003", "003.db"]
35613 Sep 22 23:22:14.361 TRCE incoming request, uri: /newextent/3/data, method: GET, req_id: 3ba67e57-64c7-47c7-9d4a-347b27c20bca, remote_addr: 127.0.0.1:53368, local_addr: 127.0.0.1:52864, task: repair
35614 Sep 22 23:22:14.361 INFO request completed, latency_us: 311, response_code: 200, uri: /newextent/3/data, method: GET, req_id: 3ba67e57-64c7-47c7-9d4a-347b27c20bca, remote_addr: 127.0.0.1:53368, local_addr: 127.0.0.1:52864, task: repair
35615 Sep 22 23:22:14.366 TRCE incoming request, uri: /newextent/3/db, method: GET, req_id: a9c897a9-cfbd-4957-9df5-5a026f8e251d, remote_addr: 127.0.0.1:53368, local_addr: 127.0.0.1:52864, task: repair
35616 Sep 22 23:22:14.367 INFO request completed, latency_us: 285, response_code: 200, uri: /newextent/3/db, method: GET, req_id: a9c897a9-cfbd-4957-9df5-5a026f8e251d, remote_addr: 127.0.0.1:53368, local_addr: 127.0.0.1:52864, task: repair
35617 Sep 22 23:22:14.368 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/003.copy" to "/tmp/downstairs-zrMnlo6G/00/000/003.replace"
35618 Sep 22 23:22:14.368 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35619 Sep 22 23:22:14.369 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/003.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
35620 Sep 22 23:22:14.369 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/003"
35621 Sep 22 23:22:14.369 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/003.db"
35622 Sep 22 23:22:14.369 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35623 Sep 22 23:22:14.369 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/003.replace" to "/tmp/downstairs-zrMnlo6G/00/000/003.completed"
35624 Sep 22 23:22:14.369 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35625 Sep 22 23:22:14.369 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35626 Sep 22 23:22:14.369 DEBG [0] It's time to notify for 10
35627 Sep 22 23:22:14.369 INFO Completion from [0] id:10 status:true
35628 Sep 22 23:22:14.370 INFO [11/752] Repair commands completed
35629 Sep 22 23:22:14.370 INFO Pop front: ReconcileIO { id: ReconciliationId(11), op: ExtentReopen { repair_id: ReconciliationId(11), extent_id: 3 }, state: ClientData([New, New, New]) }
35630 Sep 22 23:22:14.370 INFO Sent repair work, now wait for resp
35631 Sep 22 23:22:14.370 INFO [0] received reconcile message
35632 Sep 22 23:22:14.370 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(11), op: ExtentReopen { repair_id: ReconciliationId(11), extent_id: 3 }, state: ClientData([InProgress, New, New]) }, : downstairs
35633 Sep 22 23:22:14.370 INFO [0] client ExtentReopen { repair_id: ReconciliationId(11), extent_id: 3 }
35634 Sep 22 23:22:14.370 INFO [1] received reconcile message
35635 Sep 22 23:22:14.370 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(11), op: ExtentReopen { repair_id: ReconciliationId(11), extent_id: 3 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35636 Sep 22 23:22:14.370 INFO [1] client ExtentReopen { repair_id: ReconciliationId(11), extent_id: 3 }
35637 Sep 22 23:22:14.370 INFO [2] received reconcile message
35638 Sep 22 23:22:14.370 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(11), op: ExtentReopen { repair_id: ReconciliationId(11), extent_id: 3 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35639 Sep 22 23:22:14.370 INFO [2] client ExtentReopen { repair_id: ReconciliationId(11), extent_id: 3 }
35640 Sep 22 23:22:14.370 DEBG 11 Reopen extent 3
35641 Sep 22 23:22:14.371 DEBG 11 Reopen extent 3
35642 Sep 22 23:22:14.371 DEBG 11 Reopen extent 3
35643 Sep 22 23:22:14.372 DEBG [2] It's time to notify for 11
35644 Sep 22 23:22:14.372 INFO Completion from [2] id:11 status:true
35645 Sep 22 23:22:14.372 INFO [12/752] Repair commands completed
35646 Sep 22 23:22:14.372 INFO Pop front: ReconcileIO { id: ReconciliationId(12), op: ExtentFlush { repair_id: ReconciliationId(12), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35647 Sep 22 23:22:14.372 INFO Sent repair work, now wait for resp
35648 Sep 22 23:22:14.372 INFO [0] received reconcile message
35649 Sep 22 23:22:14.372 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(12), op: ExtentFlush { repair_id: ReconciliationId(12), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35650 Sep 22 23:22:14.372 INFO [0] client ExtentFlush { repair_id: ReconciliationId(12), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35651 Sep 22 23:22:14.372 INFO [1] received reconcile message
35652 Sep 22 23:22:14.372 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(12), op: ExtentFlush { repair_id: ReconciliationId(12), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35653 Sep 22 23:22:14.372 INFO [1] client ExtentFlush { repair_id: ReconciliationId(12), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35654 Sep 22 23:22:14.372 INFO [2] received reconcile message
35655 Sep 22 23:22:14.372 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(12), op: ExtentFlush { repair_id: ReconciliationId(12), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35656 Sep 22 23:22:14.372 INFO [2] client ExtentFlush { repair_id: ReconciliationId(12), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35657 Sep 22 23:22:14.372 DEBG 12 Flush extent 53 with f:2 g:2
35658 Sep 22 23:22:14.372 DEBG Flush just extent 53 with f:2 and g:2
35659 Sep 22 23:22:14.372 DEBG [1] It's time to notify for 12
35660 Sep 22 23:22:14.372 INFO Completion from [1] id:12 status:true
35661 Sep 22 23:22:14.372 INFO [13/752] Repair commands completed
35662 Sep 22 23:22:14.372 INFO Pop front: ReconcileIO { id: ReconciliationId(13), op: ExtentClose { repair_id: ReconciliationId(13), extent_id: 53 }, state: ClientData([New, New, New]) }
35663 Sep 22 23:22:14.372 INFO Sent repair work, now wait for resp
35664 Sep 22 23:22:14.372 INFO [0] received reconcile message
35665 Sep 22 23:22:14.372 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(13), op: ExtentClose { repair_id: ReconciliationId(13), extent_id: 53 }, state: ClientData([InProgress, New, New]) }, : downstairs
35666 Sep 22 23:22:14.372 INFO [0] client ExtentClose { repair_id: ReconciliationId(13), extent_id: 53 }
35667 Sep 22 23:22:14.372 INFO [1] received reconcile message
35668 Sep 22 23:22:14.372 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(13), op: ExtentClose { repair_id: ReconciliationId(13), extent_id: 53 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35669 Sep 22 23:22:14.372 INFO [1] client ExtentClose { repair_id: ReconciliationId(13), extent_id: 53 }
35670 Sep 22 23:22:14.372 INFO [2] received reconcile message
35671 Sep 22 23:22:14.372 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(13), op: ExtentClose { repair_id: ReconciliationId(13), extent_id: 53 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35672 Sep 22 23:22:14.372 INFO [2] client ExtentClose { repair_id: ReconciliationId(13), extent_id: 53 }
35673 Sep 22 23:22:14.373 DEBG 13 Close extent 53
35674 Sep 22 23:22:14.373 DEBG 13 Close extent 53
35675 Sep 22 23:22:14.373 DEBG 13 Close extent 53
35676 Sep 22 23:22:14.373 DEBG [2] It's time to notify for 13
35677 Sep 22 23:22:14.374 INFO Completion from [2] id:13 status:true
35678 Sep 22 23:22:14.374 INFO [14/752] Repair commands completed
35679 Sep 22 23:22:14.374 INFO Pop front: ReconcileIO { id: ReconciliationId(14), op: ExtentRepair { repair_id: ReconciliationId(14), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35680 Sep 22 23:22:14.374 INFO Sent repair work, now wait for resp
35681 Sep 22 23:22:14.374 INFO [0] received reconcile message
35682 Sep 22 23:22:14.374 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(14), op: ExtentRepair { repair_id: ReconciliationId(14), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35683 Sep 22 23:22:14.374 INFO [0] client ExtentRepair { repair_id: ReconciliationId(14), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35684 Sep 22 23:22:14.374 INFO [0] Sending repair request ReconciliationId(14)
35685 Sep 22 23:22:14.374 INFO [1] received reconcile message
35686 Sep 22 23:22:14.374 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(14), op: ExtentRepair { repair_id: ReconciliationId(14), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35687 Sep 22 23:22:14.374 INFO [1] client ExtentRepair { repair_id: ReconciliationId(14), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35688 Sep 22 23:22:14.374 INFO [1] No action required ReconciliationId(14)
35689 Sep 22 23:22:14.374 INFO [2] received reconcile message
35690 Sep 22 23:22:14.374 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(14), op: ExtentRepair { repair_id: ReconciliationId(14), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35691 Sep 22 23:22:14.374 INFO [2] client ExtentRepair { repair_id: ReconciliationId(14), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35692 Sep 22 23:22:14.374 INFO [2] No action required ReconciliationId(14)
35693 Sep 22 23:22:14.374 DEBG 14 Repair extent 53 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
35694 Sep 22 23:22:14.374 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/035.copy"
35695 Sep 22 23:22:14.438 INFO accepted connection, remote_addr: 127.0.0.1:60377, local_addr: 127.0.0.1:52864, task: repair
35696 Sep 22 23:22:14.438 TRCE incoming request, uri: /extent/53/files, method: GET, req_id: e2de8886-f68b-48be-8dc0-c8b440a97525, remote_addr: 127.0.0.1:60377, local_addr: 127.0.0.1:52864, task: repair
35697 Sep 22 23:22:14.438 INFO request completed, latency_us: 207, response_code: 200, uri: /extent/53/files, method: GET, req_id: e2de8886-f68b-48be-8dc0-c8b440a97525, remote_addr: 127.0.0.1:60377, local_addr: 127.0.0.1:52864, task: repair
35698 Sep 22 23:22:14.438 INFO eid:53 Found repair files: ["035", "035.db"]
35699 Sep 22 23:22:14.439 TRCE incoming request, uri: /newextent/53/data, method: GET, req_id: 8ff8d5b0-6cd8-45ff-958a-196b08d51ab7, remote_addr: 127.0.0.1:60377, local_addr: 127.0.0.1:52864, task: repair
35700 Sep 22 23:22:14.439 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/53/data, method: GET, req_id: 8ff8d5b0-6cd8-45ff-958a-196b08d51ab7, remote_addr: 127.0.0.1:60377, local_addr: 127.0.0.1:52864, task: repair
35701 Sep 22 23:22:14.444 TRCE incoming request, uri: /newextent/53/db, method: GET, req_id: 631aa683-1c93-4e4e-a8ec-7f0fa39e7b1c, remote_addr: 127.0.0.1:60377, local_addr: 127.0.0.1:52864, task: repair
35702 Sep 22 23:22:14.444 INFO request completed, latency_us: 286, response_code: 200, uri: /newextent/53/db, method: GET, req_id: 631aa683-1c93-4e4e-a8ec-7f0fa39e7b1c, remote_addr: 127.0.0.1:60377, local_addr: 127.0.0.1:52864, task: repair
35703 Sep 22 23:22:14.445 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/035.copy" to "/tmp/downstairs-zrMnlo6G/00/000/035.replace"
35704 Sep 22 23:22:14.445 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35705 Sep 22 23:22:14.446 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/035.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
35706 Sep 22 23:22:14.446 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/035"
35707 Sep 22 23:22:14.447 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/035.db"
35708 Sep 22 23:22:14.447 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35709 Sep 22 23:22:14.447 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/035.replace" to "/tmp/downstairs-zrMnlo6G/00/000/035.completed"
35710 Sep 22 23:22:14.447 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35711 Sep 22 23:22:14.447 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35712 Sep 22 23:22:14.447 DEBG [0] It's time to notify for 14
35713 Sep 22 23:22:14.447 INFO Completion from [0] id:14 status:true
35714 Sep 22 23:22:14.447 INFO [15/752] Repair commands completed
35715 Sep 22 23:22:14.447 INFO Pop front: ReconcileIO { id: ReconciliationId(15), op: ExtentReopen { repair_id: ReconciliationId(15), extent_id: 53 }, state: ClientData([New, New, New]) }
35716 Sep 22 23:22:14.447 INFO Sent repair work, now wait for resp
35717 Sep 22 23:22:14.447 INFO [0] received reconcile message
35718 Sep 22 23:22:14.447 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(15), op: ExtentReopen { repair_id: ReconciliationId(15), extent_id: 53 }, state: ClientData([InProgress, New, New]) }, : downstairs
35719 Sep 22 23:22:14.447 INFO [0] client ExtentReopen { repair_id: ReconciliationId(15), extent_id: 53 }
35720 Sep 22 23:22:14.447 INFO [1] received reconcile message
35721 Sep 22 23:22:14.447 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(15), op: ExtentReopen { repair_id: ReconciliationId(15), extent_id: 53 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35722 Sep 22 23:22:14.447 INFO [1] client ExtentReopen { repair_id: ReconciliationId(15), extent_id: 53 }
35723 Sep 22 23:22:14.447 INFO [2] received reconcile message
35724 Sep 22 23:22:14.447 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(15), op: ExtentReopen { repair_id: ReconciliationId(15), extent_id: 53 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35725 Sep 22 23:22:14.447 INFO [2] client ExtentReopen { repair_id: ReconciliationId(15), extent_id: 53 }
35726 Sep 22 23:22:14.447 DEBG 15 Reopen extent 53
35727 Sep 22 23:22:14.448 DEBG 15 Reopen extent 53
35728 Sep 22 23:22:14.448 DEBG 15 Reopen extent 53
35729 Sep 22 23:22:14.449 DEBG [2] It's time to notify for 15
35730 Sep 22 23:22:14.449 INFO Completion from [2] id:15 status:true
35731 Sep 22 23:22:14.449 INFO [16/752] Repair commands completed
35732 Sep 22 23:22:14.449 INFO Pop front: ReconcileIO { id: ReconciliationId(16), op: ExtentFlush { repair_id: ReconciliationId(16), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35733 Sep 22 23:22:14.449 INFO Sent repair work, now wait for resp
35734 Sep 22 23:22:14.449 INFO [0] received reconcile message
35735 Sep 22 23:22:14.449 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(16), op: ExtentFlush { repair_id: ReconciliationId(16), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35736 Sep 22 23:22:14.449 INFO [0] client ExtentFlush { repair_id: ReconciliationId(16), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35737 Sep 22 23:22:14.449 INFO [1] received reconcile message
35738 Sep 22 23:22:14.449 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(16), op: ExtentFlush { repair_id: ReconciliationId(16), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35739 Sep 22 23:22:14.449 INFO [1] client ExtentFlush { repair_id: ReconciliationId(16), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35740 Sep 22 23:22:14.449 INFO [2] received reconcile message
35741 Sep 22 23:22:14.449 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(16), op: ExtentFlush { repair_id: ReconciliationId(16), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35742 Sep 22 23:22:14.449 INFO [2] client ExtentFlush { repair_id: ReconciliationId(16), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35743 Sep 22 23:22:14.449 DEBG 16 Flush extent 59 with f:2 g:2
35744 Sep 22 23:22:14.449 DEBG Flush just extent 59 with f:2 and g:2
35745 Sep 22 23:22:14.450 DEBG [1] It's time to notify for 16
35746 Sep 22 23:22:14.450 INFO Completion from [1] id:16 status:true
35747 Sep 22 23:22:14.450 INFO [17/752] Repair commands completed
35748 Sep 22 23:22:14.450 INFO Pop front: ReconcileIO { id: ReconciliationId(17), op: ExtentClose { repair_id: ReconciliationId(17), extent_id: 59 }, state: ClientData([New, New, New]) }
35749 Sep 22 23:22:14.450 INFO Sent repair work, now wait for resp
35750 Sep 22 23:22:14.450 INFO [0] received reconcile message
35751 Sep 22 23:22:14.450 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(17), op: ExtentClose { repair_id: ReconciliationId(17), extent_id: 59 }, state: ClientData([InProgress, New, New]) }, : downstairs
35752 Sep 22 23:22:14.450 INFO [0] client ExtentClose { repair_id: ReconciliationId(17), extent_id: 59 }
35753 Sep 22 23:22:14.450 INFO [1] received reconcile message
35754 Sep 22 23:22:14.450 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(17), op: ExtentClose { repair_id: ReconciliationId(17), extent_id: 59 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35755 Sep 22 23:22:14.450 INFO [1] client ExtentClose { repair_id: ReconciliationId(17), extent_id: 59 }
35756 Sep 22 23:22:14.450 INFO [2] received reconcile message
35757 Sep 22 23:22:14.450 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(17), op: ExtentClose { repair_id: ReconciliationId(17), extent_id: 59 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35758 Sep 22 23:22:14.450 INFO [2] client ExtentClose { repair_id: ReconciliationId(17), extent_id: 59 }
35759 Sep 22 23:22:14.450 DEBG 17 Close extent 59
35760 Sep 22 23:22:14.450 DEBG 17 Close extent 59
35761 Sep 22 23:22:14.451 DEBG 17 Close extent 59
35762 Sep 22 23:22:14.451 DEBG [2] It's time to notify for 17
35763 Sep 22 23:22:14.451 INFO Completion from [2] id:17 status:true
35764 Sep 22 23:22:14.451 INFO [18/752] Repair commands completed
35765 Sep 22 23:22:14.451 INFO Pop front: ReconcileIO { id: ReconciliationId(18), op: ExtentRepair { repair_id: ReconciliationId(18), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35766 Sep 22 23:22:14.451 INFO Sent repair work, now wait for resp
35767 Sep 22 23:22:14.451 INFO [0] received reconcile message
35768 Sep 22 23:22:14.451 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(18), op: ExtentRepair { repair_id: ReconciliationId(18), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35769 Sep 22 23:22:14.451 INFO [0] client ExtentRepair { repair_id: ReconciliationId(18), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35770 Sep 22 23:22:14.451 INFO [0] Sending repair request ReconciliationId(18)
35771 Sep 22 23:22:14.451 INFO [1] received reconcile message
35772 Sep 22 23:22:14.451 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(18), op: ExtentRepair { repair_id: ReconciliationId(18), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35773 Sep 22 23:22:14.451 INFO [1] client ExtentRepair { repair_id: ReconciliationId(18), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35774 Sep 22 23:22:14.451 INFO [1] No action required ReconciliationId(18)
35775 Sep 22 23:22:14.451 INFO [2] received reconcile message
35776 Sep 22 23:22:14.451 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(18), op: ExtentRepair { repair_id: ReconciliationId(18), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35777 Sep 22 23:22:14.451 INFO [2] client ExtentRepair { repair_id: ReconciliationId(18), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35778 Sep 22 23:22:14.451 INFO [2] No action required ReconciliationId(18)
35779 Sep 22 23:22:14.451 DEBG 18 Repair extent 59 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
35780 Sep 22 23:22:14.451 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/03B.copy"
35781 Sep 22 23:22:14.516 INFO accepted connection, remote_addr: 127.0.0.1:43611, local_addr: 127.0.0.1:52864, task: repair
35782 Sep 22 23:22:14.516 TRCE incoming request, uri: /extent/59/files, method: GET, req_id: cbbee225-db88-4e16-932d-3d40f674ff02, remote_addr: 127.0.0.1:43611, local_addr: 127.0.0.1:52864, task: repair
35783 Sep 22 23:22:14.516 INFO request completed, latency_us: 187, response_code: 200, uri: /extent/59/files, method: GET, req_id: cbbee225-db88-4e16-932d-3d40f674ff02, remote_addr: 127.0.0.1:43611, local_addr: 127.0.0.1:52864, task: repair
35784 Sep 22 23:22:14.516 INFO eid:59 Found repair files: ["03B", "03B.db"]
35785 Sep 22 23:22:14.517 TRCE incoming request, uri: /newextent/59/data, method: GET, req_id: b24b7e21-5a24-456b-aee9-769de8ab9a2b, remote_addr: 127.0.0.1:43611, local_addr: 127.0.0.1:52864, task: repair
35786 Sep 22 23:22:14.517 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/59/data, method: GET, req_id: b24b7e21-5a24-456b-aee9-769de8ab9a2b, remote_addr: 127.0.0.1:43611, local_addr: 127.0.0.1:52864, task: repair
35787 Sep 22 23:22:14.522 TRCE incoming request, uri: /newextent/59/db, method: GET, req_id: 61251456-69c1-471c-b27a-65d590ed0023, remote_addr: 127.0.0.1:43611, local_addr: 127.0.0.1:52864, task: repair
35788 Sep 22 23:22:14.522 INFO request completed, latency_us: 289, response_code: 200, uri: /newextent/59/db, method: GET, req_id: 61251456-69c1-471c-b27a-65d590ed0023, remote_addr: 127.0.0.1:43611, local_addr: 127.0.0.1:52864, task: repair
35789 Sep 22 23:22:14.523 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/03B.copy" to "/tmp/downstairs-zrMnlo6G/00/000/03B.replace"
35790 Sep 22 23:22:14.523 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35791 Sep 22 23:22:14.524 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/03B.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
35792 Sep 22 23:22:14.525 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/03B"
35793 Sep 22 23:22:14.525 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/03B.db"
35794 Sep 22 23:22:14.525 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35795 Sep 22 23:22:14.525 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/03B.replace" to "/tmp/downstairs-zrMnlo6G/00/000/03B.completed"
35796 Sep 22 23:22:14.525 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35797 Sep 22 23:22:14.525 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35798 Sep 22 23:22:14.525 DEBG [0] It's time to notify for 18
35799 Sep 22 23:22:14.525 INFO Completion from [0] id:18 status:true
35800 Sep 22 23:22:14.525 INFO [19/752] Repair commands completed
35801 Sep 22 23:22:14.525 INFO Pop front: ReconcileIO { id: ReconciliationId(19), op: ExtentReopen { repair_id: ReconciliationId(19), extent_id: 59 }, state: ClientData([New, New, New]) }
35802 Sep 22 23:22:14.525 INFO Sent repair work, now wait for resp
35803 Sep 22 23:22:14.525 INFO [0] received reconcile message
35804 Sep 22 23:22:14.525 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(19), op: ExtentReopen { repair_id: ReconciliationId(19), extent_id: 59 }, state: ClientData([InProgress, New, New]) }, : downstairs
35805 Sep 22 23:22:14.525 INFO [0] client ExtentReopen { repair_id: ReconciliationId(19), extent_id: 59 }
35806 Sep 22 23:22:14.525 INFO [1] received reconcile message
35807 Sep 22 23:22:14.525 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(19), op: ExtentReopen { repair_id: ReconciliationId(19), extent_id: 59 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35808 Sep 22 23:22:14.525 INFO [1] client ExtentReopen { repair_id: ReconciliationId(19), extent_id: 59 }
35809 Sep 22 23:22:14.525 INFO [2] received reconcile message
35810 Sep 22 23:22:14.525 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(19), op: ExtentReopen { repair_id: ReconciliationId(19), extent_id: 59 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35811 Sep 22 23:22:14.525 INFO [2] client ExtentReopen { repair_id: ReconciliationId(19), extent_id: 59 }
35812 Sep 22 23:22:14.525 DEBG 19 Reopen extent 59
35813 Sep 22 23:22:14.526 DEBG 19 Reopen extent 59
35814 Sep 22 23:22:14.526 DEBG 19 Reopen extent 59
35815 Sep 22 23:22:14.527 DEBG [2] It's time to notify for 19
35816 Sep 22 23:22:14.527 INFO Completion from [2] id:19 status:true
35817 Sep 22 23:22:14.527 INFO [20/752] Repair commands completed
35818 Sep 22 23:22:14.527 INFO Pop front: ReconcileIO { id: ReconciliationId(20), op: ExtentFlush { repair_id: ReconciliationId(20), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35819 Sep 22 23:22:14.527 INFO Sent repair work, now wait for resp
35820 Sep 22 23:22:14.527 INFO [0] received reconcile message
35821 Sep 22 23:22:14.527 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(20), op: ExtentFlush { repair_id: ReconciliationId(20), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35822 Sep 22 23:22:14.527 INFO [0] client ExtentFlush { repair_id: ReconciliationId(20), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35823 Sep 22 23:22:14.527 INFO [1] received reconcile message
35824 Sep 22 23:22:14.527 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(20), op: ExtentFlush { repair_id: ReconciliationId(20), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35825 Sep 22 23:22:14.527 INFO [1] client ExtentFlush { repair_id: ReconciliationId(20), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35826 Sep 22 23:22:14.527 INFO [2] received reconcile message
35827 Sep 22 23:22:14.527 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(20), op: ExtentFlush { repair_id: ReconciliationId(20), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35828 Sep 22 23:22:14.527 INFO [2] client ExtentFlush { repair_id: ReconciliationId(20), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35829 Sep 22 23:22:14.527 DEBG 20 Flush extent 103 with f:2 g:2
35830 Sep 22 23:22:14.527 DEBG Flush just extent 103 with f:2 and g:2
35831 Sep 22 23:22:14.528 DEBG [1] It's time to notify for 20
35832 Sep 22 23:22:14.528 INFO Completion from [1] id:20 status:true
35833 Sep 22 23:22:14.528 INFO [21/752] Repair commands completed
35834 Sep 22 23:22:14.528 INFO Pop front: ReconcileIO { id: ReconciliationId(21), op: ExtentClose { repair_id: ReconciliationId(21), extent_id: 103 }, state: ClientData([New, New, New]) }
35835 Sep 22 23:22:14.528 INFO Sent repair work, now wait for resp
35836 Sep 22 23:22:14.528 INFO [0] received reconcile message
35837 Sep 22 23:22:14.528 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(21), op: ExtentClose { repair_id: ReconciliationId(21), extent_id: 103 }, state: ClientData([InProgress, New, New]) }, : downstairs
35838 Sep 22 23:22:14.528 INFO [0] client ExtentClose { repair_id: ReconciliationId(21), extent_id: 103 }
35839 Sep 22 23:22:14.528 INFO [1] received reconcile message
35840 Sep 22 23:22:14.528 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(21), op: ExtentClose { repair_id: ReconciliationId(21), extent_id: 103 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35841 Sep 22 23:22:14.528 INFO [1] client ExtentClose { repair_id: ReconciliationId(21), extent_id: 103 }
35842 Sep 22 23:22:14.528 INFO [2] received reconcile message
35843 Sep 22 23:22:14.528 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(21), op: ExtentClose { repair_id: ReconciliationId(21), extent_id: 103 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35844 Sep 22 23:22:14.528 INFO [2] client ExtentClose { repair_id: ReconciliationId(21), extent_id: 103 }
35845 Sep 22 23:22:14.528 DEBG 21 Close extent 103
35846 Sep 22 23:22:14.528 DEBG 21 Close extent 103
35847 Sep 22 23:22:14.529 DEBG 21 Close extent 103
35848 Sep 22 23:22:14.529 DEBG [2] It's time to notify for 21
35849 Sep 22 23:22:14.529 INFO Completion from [2] id:21 status:true
35850 Sep 22 23:22:14.529 INFO [22/752] Repair commands completed
35851 Sep 22 23:22:14.529 INFO Pop front: ReconcileIO { id: ReconciliationId(22), op: ExtentRepair { repair_id: ReconciliationId(22), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35852 Sep 22 23:22:14.529 INFO Sent repair work, now wait for resp
35853 Sep 22 23:22:14.529 INFO [0] received reconcile message
35854 Sep 22 23:22:14.529 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(22), op: ExtentRepair { repair_id: ReconciliationId(22), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35855 Sep 22 23:22:14.529 INFO [0] client ExtentRepair { repair_id: ReconciliationId(22), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35856 Sep 22 23:22:14.529 INFO [0] Sending repair request ReconciliationId(22)
35857 Sep 22 23:22:14.529 INFO [1] received reconcile message
35858 Sep 22 23:22:14.529 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(22), op: ExtentRepair { repair_id: ReconciliationId(22), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35859 Sep 22 23:22:14.529 INFO [1] client ExtentRepair { repair_id: ReconciliationId(22), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35860 Sep 22 23:22:14.529 INFO [1] No action required ReconciliationId(22)
35861 Sep 22 23:22:14.529 INFO [2] received reconcile message
35862 Sep 22 23:22:14.529 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(22), op: ExtentRepair { repair_id: ReconciliationId(22), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35863 Sep 22 23:22:14.529 INFO [2] client ExtentRepair { repair_id: ReconciliationId(22), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35864 Sep 22 23:22:14.529 INFO [2] No action required ReconciliationId(22)
35865 Sep 22 23:22:14.529 DEBG 22 Repair extent 103 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
35866 Sep 22 23:22:14.529 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/067.copy"
35867 Sep 22 23:22:14.593 INFO accepted connection, remote_addr: 127.0.0.1:48311, local_addr: 127.0.0.1:52864, task: repair
35868 Sep 22 23:22:14.594 TRCE incoming request, uri: /extent/103/files, method: GET, req_id: 9f723176-0cd4-4975-9695-22e5b3e4c325, remote_addr: 127.0.0.1:48311, local_addr: 127.0.0.1:52864, task: repair
35869 Sep 22 23:22:14.594 INFO request completed, latency_us: 189, response_code: 200, uri: /extent/103/files, method: GET, req_id: 9f723176-0cd4-4975-9695-22e5b3e4c325, remote_addr: 127.0.0.1:48311, local_addr: 127.0.0.1:52864, task: repair
35870 Sep 22 23:22:14.594 INFO eid:103 Found repair files: ["067", "067.db"]
35871 Sep 22 23:22:14.594 TRCE incoming request, uri: /newextent/103/data, method: GET, req_id: 038f18a2-8a71-4da7-853d-ab8848aaa44e, remote_addr: 127.0.0.1:48311, local_addr: 127.0.0.1:52864, task: repair
35872 Sep 22 23:22:14.595 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/103/data, method: GET, req_id: 038f18a2-8a71-4da7-853d-ab8848aaa44e, remote_addr: 127.0.0.1:48311, local_addr: 127.0.0.1:52864, task: repair
35873 Sep 22 23:22:14.600 TRCE incoming request, uri: /newextent/103/db, method: GET, req_id: afd79e50-7d2b-40e7-8236-3811abe6ec64, remote_addr: 127.0.0.1:48311, local_addr: 127.0.0.1:52864, task: repair
35874 Sep 22 23:22:14.600 INFO request completed, latency_us: 288, response_code: 200, uri: /newextent/103/db, method: GET, req_id: afd79e50-7d2b-40e7-8236-3811abe6ec64, remote_addr: 127.0.0.1:48311, local_addr: 127.0.0.1:52864, task: repair
35875 Sep 22 23:22:14.601 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/067.copy" to "/tmp/downstairs-zrMnlo6G/00/000/067.replace"
35876 Sep 22 23:22:14.601 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35877 Sep 22 23:22:14.602 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/067.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
35878 Sep 22 23:22:14.602 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/067"
35879 Sep 22 23:22:14.602 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/067.db"
35880 Sep 22 23:22:14.602 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35881 Sep 22 23:22:14.602 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/067.replace" to "/tmp/downstairs-zrMnlo6G/00/000/067.completed"
35882 Sep 22 23:22:14.602 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35883 Sep 22 23:22:14.602 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35884 Sep 22 23:22:14.603 DEBG [0] It's time to notify for 22
35885 Sep 22 23:22:14.603 INFO Completion from [0] id:22 status:true
35886 Sep 22 23:22:14.603 INFO [23/752] Repair commands completed
35887 Sep 22 23:22:14.603 INFO Pop front: ReconcileIO { id: ReconciliationId(23), op: ExtentReopen { repair_id: ReconciliationId(23), extent_id: 103 }, state: ClientData([New, New, New]) }
35888 Sep 22 23:22:14.603 INFO Sent repair work, now wait for resp
35889 Sep 22 23:22:14.603 INFO [0] received reconcile message
35890 Sep 22 23:22:14.603 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(23), op: ExtentReopen { repair_id: ReconciliationId(23), extent_id: 103 }, state: ClientData([InProgress, New, New]) }, : downstairs
35891 Sep 22 23:22:14.603 INFO [0] client ExtentReopen { repair_id: ReconciliationId(23), extent_id: 103 }
35892 Sep 22 23:22:14.603 INFO [1] received reconcile message
35893 Sep 22 23:22:14.603 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(23), op: ExtentReopen { repair_id: ReconciliationId(23), extent_id: 103 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35894 Sep 22 23:22:14.603 INFO [1] client ExtentReopen { repair_id: ReconciliationId(23), extent_id: 103 }
35895 Sep 22 23:22:14.603 INFO [2] received reconcile message
35896 Sep 22 23:22:14.603 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(23), op: ExtentReopen { repair_id: ReconciliationId(23), extent_id: 103 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35897 Sep 22 23:22:14.603 INFO [2] client ExtentReopen { repair_id: ReconciliationId(23), extent_id: 103 }
35898 Sep 22 23:22:14.603 DEBG 23 Reopen extent 103
35899 Sep 22 23:22:14.604 DEBG 23 Reopen extent 103
35900 Sep 22 23:22:14.604 DEBG 23 Reopen extent 103
35901 Sep 22 23:22:14.605 DEBG [2] It's time to notify for 23
35902 Sep 22 23:22:14.605 INFO Completion from [2] id:23 status:true
35903 Sep 22 23:22:14.605 INFO [24/752] Repair commands completed
35904 Sep 22 23:22:14.605 INFO Pop front: ReconcileIO { id: ReconciliationId(24), op: ExtentFlush { repair_id: ReconciliationId(24), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35905 Sep 22 23:22:14.605 INFO Sent repair work, now wait for resp
35906 Sep 22 23:22:14.605 INFO [0] received reconcile message
35907 Sep 22 23:22:14.605 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(24), op: ExtentFlush { repair_id: ReconciliationId(24), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35908 Sep 22 23:22:14.605 INFO [0] client ExtentFlush { repair_id: ReconciliationId(24), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35909 Sep 22 23:22:14.605 INFO [1] received reconcile message
35910 Sep 22 23:22:14.605 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(24), op: ExtentFlush { repair_id: ReconciliationId(24), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35911 Sep 22 23:22:14.605 INFO [1] client ExtentFlush { repair_id: ReconciliationId(24), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35912 Sep 22 23:22:14.605 INFO [2] received reconcile message
35913 Sep 22 23:22:14.605 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(24), op: ExtentFlush { repair_id: ReconciliationId(24), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35914 Sep 22 23:22:14.605 INFO [2] client ExtentFlush { repair_id: ReconciliationId(24), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35915 Sep 22 23:22:14.605 DEBG 24 Flush extent 92 with f:2 g:2
35916 Sep 22 23:22:14.605 DEBG Flush just extent 92 with f:2 and g:2
35917 Sep 22 23:22:14.605 DEBG [1] It's time to notify for 24
35918 Sep 22 23:22:14.605 INFO Completion from [1] id:24 status:true
35919 Sep 22 23:22:14.605 INFO [25/752] Repair commands completed
35920 Sep 22 23:22:14.605 INFO Pop front: ReconcileIO { id: ReconciliationId(25), op: ExtentClose { repair_id: ReconciliationId(25), extent_id: 92 }, state: ClientData([New, New, New]) }
35921 Sep 22 23:22:14.605 INFO Sent repair work, now wait for resp
35922 Sep 22 23:22:14.605 INFO [0] received reconcile message
35923 Sep 22 23:22:14.605 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(25), op: ExtentClose { repair_id: ReconciliationId(25), extent_id: 92 }, state: ClientData([InProgress, New, New]) }, : downstairs
35924 Sep 22 23:22:14.605 INFO [0] client ExtentClose { repair_id: ReconciliationId(25), extent_id: 92 }
35925 Sep 22 23:22:14.606 INFO [1] received reconcile message
35926 Sep 22 23:22:14.606 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(25), op: ExtentClose { repair_id: ReconciliationId(25), extent_id: 92 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35927 Sep 22 23:22:14.606 INFO [1] client ExtentClose { repair_id: ReconciliationId(25), extent_id: 92 }
35928 Sep 22 23:22:14.606 INFO [2] received reconcile message
35929 Sep 22 23:22:14.606 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(25), op: ExtentClose { repair_id: ReconciliationId(25), extent_id: 92 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35930 Sep 22 23:22:14.606 INFO [2] client ExtentClose { repair_id: ReconciliationId(25), extent_id: 92 }
35931 Sep 22 23:22:14.606 DEBG 25 Close extent 92
35932 Sep 22 23:22:14.606 DEBG 25 Close extent 92
35933 Sep 22 23:22:14.606 DEBG 25 Close extent 92
35934 Sep 22 23:22:14.607 DEBG [2] It's time to notify for 25
35935 Sep 22 23:22:14.607 INFO Completion from [2] id:25 status:true
35936 Sep 22 23:22:14.607 INFO [26/752] Repair commands completed
35937 Sep 22 23:22:14.607 INFO Pop front: ReconcileIO { id: ReconciliationId(26), op: ExtentRepair { repair_id: ReconciliationId(26), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35938 Sep 22 23:22:14.607 INFO Sent repair work, now wait for resp
35939 Sep 22 23:22:14.607 INFO [0] received reconcile message
35940 Sep 22 23:22:14.607 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(26), op: ExtentRepair { repair_id: ReconciliationId(26), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35941 Sep 22 23:22:14.607 INFO [0] client ExtentRepair { repair_id: ReconciliationId(26), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35942 Sep 22 23:22:14.607 INFO [0] Sending repair request ReconciliationId(26)
35943 Sep 22 23:22:14.607 INFO [1] received reconcile message
35944 Sep 22 23:22:14.607 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(26), op: ExtentRepair { repair_id: ReconciliationId(26), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35945 Sep 22 23:22:14.607 INFO [1] client ExtentRepair { repair_id: ReconciliationId(26), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35946 Sep 22 23:22:14.607 INFO [1] No action required ReconciliationId(26)
35947 Sep 22 23:22:14.607 INFO [2] received reconcile message
35948 Sep 22 23:22:14.607 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(26), op: ExtentRepair { repair_id: ReconciliationId(26), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35949 Sep 22 23:22:14.607 INFO [2] client ExtentRepair { repair_id: ReconciliationId(26), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
35950 Sep 22 23:22:14.607 INFO [2] No action required ReconciliationId(26)
35951 Sep 22 23:22:14.607 DEBG 26 Repair extent 92 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
35952 Sep 22 23:22:14.607 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/05C.copy"
35953 Sep 22 23:22:14.671 INFO accepted connection, remote_addr: 127.0.0.1:50918, local_addr: 127.0.0.1:52864, task: repair
35954 Sep 22 23:22:14.671 TRCE incoming request, uri: /extent/92/files, method: GET, req_id: 620a61dd-2d27-48e0-b04f-846e91ee0976, remote_addr: 127.0.0.1:50918, local_addr: 127.0.0.1:52864, task: repair
35955 Sep 22 23:22:14.671 INFO request completed, latency_us: 197, response_code: 200, uri: /extent/92/files, method: GET, req_id: 620a61dd-2d27-48e0-b04f-846e91ee0976, remote_addr: 127.0.0.1:50918, local_addr: 127.0.0.1:52864, task: repair
35956 Sep 22 23:22:14.671 INFO eid:92 Found repair files: ["05C", "05C.db"]
35957 Sep 22 23:22:14.672 TRCE incoming request, uri: /newextent/92/data, method: GET, req_id: 1663aa58-ba20-4776-9956-e0571654aa78, remote_addr: 127.0.0.1:50918, local_addr: 127.0.0.1:52864, task: repair
35958 Sep 22 23:22:14.672 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/92/data, method: GET, req_id: 1663aa58-ba20-4776-9956-e0571654aa78, remote_addr: 127.0.0.1:50918, local_addr: 127.0.0.1:52864, task: repair
35959 Sep 22 23:22:14.677 TRCE incoming request, uri: /newextent/92/db, method: GET, req_id: 7d6f3054-7044-4270-bd00-f78c6fe8e527, remote_addr: 127.0.0.1:50918, local_addr: 127.0.0.1:52864, task: repair
35960 Sep 22 23:22:14.677 INFO request completed, latency_us: 287, response_code: 200, uri: /newextent/92/db, method: GET, req_id: 7d6f3054-7044-4270-bd00-f78c6fe8e527, remote_addr: 127.0.0.1:50918, local_addr: 127.0.0.1:52864, task: repair
35961 Sep 22 23:22:14.678 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/05C.copy" to "/tmp/downstairs-zrMnlo6G/00/000/05C.replace"
35962 Sep 22 23:22:14.679 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35963 Sep 22 23:22:14.679 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/05C.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
35964 Sep 22 23:22:14.680 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/05C"
35965 Sep 22 23:22:14.680 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/05C.db"
35966 Sep 22 23:22:14.680 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35967 Sep 22 23:22:14.680 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/05C.replace" to "/tmp/downstairs-zrMnlo6G/00/000/05C.completed"
35968 Sep 22 23:22:14.680 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35969 Sep 22 23:22:14.680 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
35970 Sep 22 23:22:14.680 DEBG [0] It's time to notify for 26
35971 Sep 22 23:22:14.680 INFO Completion from [0] id:26 status:true
35972 Sep 22 23:22:14.680 INFO [27/752] Repair commands completed
35973 Sep 22 23:22:14.680 INFO Pop front: ReconcileIO { id: ReconciliationId(27), op: ExtentReopen { repair_id: ReconciliationId(27), extent_id: 92 }, state: ClientData([New, New, New]) }
35974 Sep 22 23:22:14.680 INFO Sent repair work, now wait for resp
35975 Sep 22 23:22:14.680 INFO [0] received reconcile message
35976 Sep 22 23:22:14.680 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(27), op: ExtentReopen { repair_id: ReconciliationId(27), extent_id: 92 }, state: ClientData([InProgress, New, New]) }, : downstairs
35977 Sep 22 23:22:14.680 INFO [0] client ExtentReopen { repair_id: ReconciliationId(27), extent_id: 92 }
35978 Sep 22 23:22:14.680 INFO [1] received reconcile message
35979 Sep 22 23:22:14.680 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(27), op: ExtentReopen { repair_id: ReconciliationId(27), extent_id: 92 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35980 Sep 22 23:22:14.680 INFO [1] client ExtentReopen { repair_id: ReconciliationId(27), extent_id: 92 }
35981 Sep 22 23:22:14.680 INFO [2] received reconcile message
35982 Sep 22 23:22:14.680 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(27), op: ExtentReopen { repair_id: ReconciliationId(27), extent_id: 92 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35983 Sep 22 23:22:14.680 INFO [2] client ExtentReopen { repair_id: ReconciliationId(27), extent_id: 92 }
35984 Sep 22 23:22:14.680 DEBG 27 Reopen extent 92
35985 Sep 22 23:22:14.681 DEBG 27 Reopen extent 92
35986 Sep 22 23:22:14.682 DEBG 27 Reopen extent 92
35987 Sep 22 23:22:14.682 DEBG [2] It's time to notify for 27
35988 Sep 22 23:22:14.682 INFO Completion from [2] id:27 status:true
35989 Sep 22 23:22:14.682 INFO [28/752] Repair commands completed
35990 Sep 22 23:22:14.682 INFO Pop front: ReconcileIO { id: ReconciliationId(28), op: ExtentFlush { repair_id: ReconciliationId(28), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35991 Sep 22 23:22:14.682 INFO Sent repair work, now wait for resp
35992 Sep 22 23:22:14.682 INFO [0] received reconcile message
35993 Sep 22 23:22:14.682 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(28), op: ExtentFlush { repair_id: ReconciliationId(28), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35994 Sep 22 23:22:14.682 INFO [0] client ExtentFlush { repair_id: ReconciliationId(28), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35995 Sep 22 23:22:14.682 INFO [1] received reconcile message
35996 Sep 22 23:22:14.682 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(28), op: ExtentFlush { repair_id: ReconciliationId(28), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35997 Sep 22 23:22:14.682 INFO [1] client ExtentFlush { repair_id: ReconciliationId(28), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35998 Sep 22 23:22:14.682 INFO [2] received reconcile message
35999 Sep 22 23:22:14.682 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(28), op: ExtentFlush { repair_id: ReconciliationId(28), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36000 Sep 22 23:22:14.682 INFO [2] client ExtentFlush { repair_id: ReconciliationId(28), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36001 Sep 22 23:22:14.682 DEBG 28 Flush extent 129 with f:2 g:2
36002 Sep 22 23:22:14.683 DEBG Flush just extent 129 with f:2 and g:2
36003 Sep 22 23:22:14.683 DEBG [1] It's time to notify for 28
36004 Sep 22 23:22:14.683 INFO Completion from [1] id:28 status:true
36005 Sep 22 23:22:14.683 INFO [29/752] Repair commands completed
36006 Sep 22 23:22:14.683 INFO Pop front: ReconcileIO { id: ReconciliationId(29), op: ExtentClose { repair_id: ReconciliationId(29), extent_id: 129 }, state: ClientData([New, New, New]) }
36007 Sep 22 23:22:14.683 INFO Sent repair work, now wait for resp
36008 Sep 22 23:22:14.683 INFO [0] received reconcile message
36009 Sep 22 23:22:14.683 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(29), op: ExtentClose { repair_id: ReconciliationId(29), extent_id: 129 }, state: ClientData([InProgress, New, New]) }, : downstairs
36010 Sep 22 23:22:14.683 INFO [0] client ExtentClose { repair_id: ReconciliationId(29), extent_id: 129 }
36011 Sep 22 23:22:14.683 INFO [1] received reconcile message
36012 Sep 22 23:22:14.683 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(29), op: ExtentClose { repair_id: ReconciliationId(29), extent_id: 129 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36013 Sep 22 23:22:14.683 INFO [1] client ExtentClose { repair_id: ReconciliationId(29), extent_id: 129 }
36014 Sep 22 23:22:14.683 INFO [2] received reconcile message
36015 Sep 22 23:22:14.683 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(29), op: ExtentClose { repair_id: ReconciliationId(29), extent_id: 129 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36016 Sep 22 23:22:14.683 INFO [2] client ExtentClose { repair_id: ReconciliationId(29), extent_id: 129 }
36017 Sep 22 23:22:14.683 DEBG 29 Close extent 129
36018 Sep 22 23:22:14.683 DEBG 29 Close extent 129
36019 Sep 22 23:22:14.684 DEBG 29 Close extent 129
36020 Sep 22 23:22:14.684 DEBG [2] It's time to notify for 29
36021 Sep 22 23:22:14.684 INFO Completion from [2] id:29 status:true
36022 Sep 22 23:22:14.684 INFO [30/752] Repair commands completed
36023 Sep 22 23:22:14.684 INFO Pop front: ReconcileIO { id: ReconciliationId(30), op: ExtentRepair { repair_id: ReconciliationId(30), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36024 Sep 22 23:22:14.684 INFO Sent repair work, now wait for resp
36025 Sep 22 23:22:14.684 INFO [0] received reconcile message
36026 Sep 22 23:22:14.684 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(30), op: ExtentRepair { repair_id: ReconciliationId(30), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36027 Sep 22 23:22:14.684 INFO [0] client ExtentRepair { repair_id: ReconciliationId(30), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36028 Sep 22 23:22:14.684 INFO [0] Sending repair request ReconciliationId(30)
36029 Sep 22 23:22:14.684 INFO [1] received reconcile message
36030 Sep 22 23:22:14.684 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(30), op: ExtentRepair { repair_id: ReconciliationId(30), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36031 Sep 22 23:22:14.684 INFO [1] client ExtentRepair { repair_id: ReconciliationId(30), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36032 Sep 22 23:22:14.684 INFO [1] No action required ReconciliationId(30)
36033 Sep 22 23:22:14.684 INFO [2] received reconcile message
36034 Sep 22 23:22:14.684 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(30), op: ExtentRepair { repair_id: ReconciliationId(30), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36035 Sep 22 23:22:14.684 INFO [2] client ExtentRepair { repair_id: ReconciliationId(30), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36036 Sep 22 23:22:14.684 INFO [2] No action required ReconciliationId(30)
36037 Sep 22 23:22:14.684 DEBG 30 Repair extent 129 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
36038 Sep 22 23:22:14.685 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/081.copy"
36039 Sep 22 23:22:14.705 DEBG up_ds_listen was notified
36040 Sep 22 23:22:14.705 DEBG up_ds_listen process 1072
36041 Sep 22 23:22:14.705 DEBG [A] ack job 1072:73, : downstairs
36042 Sep 22 23:22:14.705 DEBG up_ds_listen checked 1 jobs, back to waiting
36043 Sep 22 23:22:14.705 DEBG IO Flush 1074 has deps [JobId(1073), JobId(1072)]
36044 Sep 22 23:22:14.705 WARN returning error on read!
36045 Sep 22 23:22:14.705 DEBG Read :1073 deps:[JobId(1072)] res:false
36046 Sep 22 23:22:14.711 DEBG Read :1073 deps:[JobId(1072)] res:true
36047 Sep 22 23:22:14.733 ERRO [0] job id 1073 saw error GenericError("test error")
36048 Sep 22 23:22:14.735 DEBG Flush :1074 extent_limit None deps:[JobId(1073), JobId(1072)] res:true f:26 g:1
36049 Sep 22 23:22:14.735 INFO [lossy] sleeping 1 second
36050 Sep 22 23:22:14.748 INFO accepted connection, remote_addr: 127.0.0.1:65069, local_addr: 127.0.0.1:52864, task: repair
36051 Sep 22 23:22:14.749 TRCE incoming request, uri: /extent/129/files, method: GET, req_id: 7f3661a4-53bd-4465-8f89-127d245e5b6e, remote_addr: 127.0.0.1:65069, local_addr: 127.0.0.1:52864, task: repair
36052 Sep 22 23:22:14.749 INFO request completed, latency_us: 189, response_code: 200, uri: /extent/129/files, method: GET, req_id: 7f3661a4-53bd-4465-8f89-127d245e5b6e, remote_addr: 127.0.0.1:65069, local_addr: 127.0.0.1:52864, task: repair
36053 Sep 22 23:22:14.749 INFO eid:129 Found repair files: ["081", "081.db"]
36054 Sep 22 23:22:14.749 TRCE incoming request, uri: /newextent/129/data, method: GET, req_id: 867aca05-4a93-4317-bb7a-b02e9e74a39e, remote_addr: 127.0.0.1:65069, local_addr: 127.0.0.1:52864, task: repair
36055 Sep 22 23:22:14.750 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/129/data, method: GET, req_id: 867aca05-4a93-4317-bb7a-b02e9e74a39e, remote_addr: 127.0.0.1:65069, local_addr: 127.0.0.1:52864, task: repair
36056 Sep 22 23:22:14.755 TRCE incoming request, uri: /newextent/129/db, method: GET, req_id: 450b3e0d-5671-440a-a2a5-ecde4274eb0e, remote_addr: 127.0.0.1:65069, local_addr: 127.0.0.1:52864, task: repair
36057 Sep 22 23:22:14.755 INFO request completed, latency_us: 290, response_code: 200, uri: /newextent/129/db, method: GET, req_id: 450b3e0d-5671-440a-a2a5-ecde4274eb0e, remote_addr: 127.0.0.1:65069, local_addr: 127.0.0.1:52864, task: repair
36058 Sep 22 23:22:14.756 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/081.copy" to "/tmp/downstairs-zrMnlo6G/00/000/081.replace"
36059 Sep 22 23:22:14.756 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36060 Sep 22 23:22:14.757 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/081.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
36061 Sep 22 23:22:14.757 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/081"
36062 Sep 22 23:22:14.757 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/081.db"
36063 Sep 22 23:22:14.757 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36064 Sep 22 23:22:14.757 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/081.replace" to "/tmp/downstairs-zrMnlo6G/00/000/081.completed"
36065 Sep 22 23:22:14.757 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36066 Sep 22 23:22:14.757 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36067 Sep 22 23:22:14.758 DEBG [0] It's time to notify for 30
36068 Sep 22 23:22:14.758 INFO Completion from [0] id:30 status:true
36069 Sep 22 23:22:14.758 INFO [31/752] Repair commands completed
36070 Sep 22 23:22:14.758 INFO Pop front: ReconcileIO { id: ReconciliationId(31), op: ExtentReopen { repair_id: ReconciliationId(31), extent_id: 129 }, state: ClientData([New, New, New]) }
36071 Sep 22 23:22:14.758 INFO Sent repair work, now wait for resp
36072 Sep 22 23:22:14.758 INFO [0] received reconcile message
36073 Sep 22 23:22:14.758 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(31), op: ExtentReopen { repair_id: ReconciliationId(31), extent_id: 129 }, state: ClientData([InProgress, New, New]) }, : downstairs
36074 Sep 22 23:22:14.758 INFO [0] client ExtentReopen { repair_id: ReconciliationId(31), extent_id: 129 }
36075 Sep 22 23:22:14.758 INFO [1] received reconcile message
36076 Sep 22 23:22:14.758 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(31), op: ExtentReopen { repair_id: ReconciliationId(31), extent_id: 129 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36077 Sep 22 23:22:14.758 INFO [1] client ExtentReopen { repair_id: ReconciliationId(31), extent_id: 129 }
36078 Sep 22 23:22:14.758 INFO [2] received reconcile message
36079 Sep 22 23:22:14.758 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(31), op: ExtentReopen { repair_id: ReconciliationId(31), extent_id: 129 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36080 Sep 22 23:22:14.758 INFO [2] client ExtentReopen { repair_id: ReconciliationId(31), extent_id: 129 }
36081 Sep 22 23:22:14.758 DEBG 31 Reopen extent 129
36082 Sep 22 23:22:14.759 DEBG 31 Reopen extent 129
36083 Sep 22 23:22:14.759 DEBG 31 Reopen extent 129
36084 Sep 22 23:22:14.760 DEBG [2] It's time to notify for 31
36085 Sep 22 23:22:14.760 INFO Completion from [2] id:31 status:true
36086 Sep 22 23:22:14.760 INFO [32/752] Repair commands completed
36087 Sep 22 23:22:14.760 INFO Pop front: ReconcileIO { id: ReconciliationId(32), op: ExtentFlush { repair_id: ReconciliationId(32), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36088 Sep 22 23:22:14.760 INFO Sent repair work, now wait for resp
36089 Sep 22 23:22:14.760 INFO [0] received reconcile message
36090 Sep 22 23:22:14.760 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(32), op: ExtentFlush { repair_id: ReconciliationId(32), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36091 Sep 22 23:22:14.760 INFO [0] client ExtentFlush { repair_id: ReconciliationId(32), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36092 Sep 22 23:22:14.760 INFO [1] received reconcile message
36093 Sep 22 23:22:14.760 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(32), op: ExtentFlush { repair_id: ReconciliationId(32), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36094 Sep 22 23:22:14.760 INFO [1] client ExtentFlush { repair_id: ReconciliationId(32), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36095 Sep 22 23:22:14.760 INFO [2] received reconcile message
36096 Sep 22 23:22:14.760 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(32), op: ExtentFlush { repair_id: ReconciliationId(32), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36097 Sep 22 23:22:14.760 INFO [2] client ExtentFlush { repair_id: ReconciliationId(32), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36098 Sep 22 23:22:14.760 DEBG 32 Flush extent 132 with f:2 g:2
36099 Sep 22 23:22:14.760 DEBG Flush just extent 132 with f:2 and g:2
36100 Sep 22 23:22:14.760 DEBG [1] It's time to notify for 32
36101 Sep 22 23:22:14.760 INFO Completion from [1] id:32 status:true
36102 Sep 22 23:22:14.760 INFO [33/752] Repair commands completed
36103 Sep 22 23:22:14.760 INFO Pop front: ReconcileIO { id: ReconciliationId(33), op: ExtentClose { repair_id: ReconciliationId(33), extent_id: 132 }, state: ClientData([New, New, New]) }
36104 Sep 22 23:22:14.760 INFO Sent repair work, now wait for resp
36105 Sep 22 23:22:14.760 INFO [0] received reconcile message
36106 Sep 22 23:22:14.760 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(33), op: ExtentClose { repair_id: ReconciliationId(33), extent_id: 132 }, state: ClientData([InProgress, New, New]) }, : downstairs
36107 Sep 22 23:22:14.760 INFO [0] client ExtentClose { repair_id: ReconciliationId(33), extent_id: 132 }
36108 Sep 22 23:22:14.761 INFO [1] received reconcile message
36109 Sep 22 23:22:14.761 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(33), op: ExtentClose { repair_id: ReconciliationId(33), extent_id: 132 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36110 Sep 22 23:22:14.761 INFO [1] client ExtentClose { repair_id: ReconciliationId(33), extent_id: 132 }
36111 Sep 22 23:22:14.761 INFO [2] received reconcile message
36112 Sep 22 23:22:14.761 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(33), op: ExtentClose { repair_id: ReconciliationId(33), extent_id: 132 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36113 Sep 22 23:22:14.761 INFO [2] client ExtentClose { repair_id: ReconciliationId(33), extent_id: 132 }
36114 Sep 22 23:22:14.761 DEBG 33 Close extent 132
36115 Sep 22 23:22:14.761 DEBG 33 Close extent 132
36116 Sep 22 23:22:14.761 DEBG 33 Close extent 132
36117 Sep 22 23:22:14.762 DEBG [2] It's time to notify for 33
36118 Sep 22 23:22:14.762 INFO Completion from [2] id:33 status:true
36119 Sep 22 23:22:14.762 INFO [34/752] Repair commands completed
36120 Sep 22 23:22:14.762 INFO Pop front: ReconcileIO { id: ReconciliationId(34), op: ExtentRepair { repair_id: ReconciliationId(34), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36121 Sep 22 23:22:14.762 INFO Sent repair work, now wait for resp
36122 Sep 22 23:22:14.762 INFO [0] received reconcile message
36123 Sep 22 23:22:14.762 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(34), op: ExtentRepair { repair_id: ReconciliationId(34), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36124 Sep 22 23:22:14.762 INFO [0] client ExtentRepair { repair_id: ReconciliationId(34), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36125 Sep 22 23:22:14.762 INFO [0] Sending repair request ReconciliationId(34)
36126 Sep 22 23:22:14.762 INFO [1] received reconcile message
36127 Sep 22 23:22:14.762 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(34), op: ExtentRepair { repair_id: ReconciliationId(34), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36128 Sep 22 23:22:14.762 INFO [1] client ExtentRepair { repair_id: ReconciliationId(34), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36129 Sep 22 23:22:14.762 INFO [1] No action required ReconciliationId(34)
36130 Sep 22 23:22:14.762 INFO [2] received reconcile message
36131 Sep 22 23:22:14.762 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(34), op: ExtentRepair { repair_id: ReconciliationId(34), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36132 Sep 22 23:22:14.762 INFO [2] client ExtentRepair { repair_id: ReconciliationId(34), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36133 Sep 22 23:22:14.762 INFO [2] No action required ReconciliationId(34)
36134 Sep 22 23:22:14.762 DEBG 34 Repair extent 132 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
36135 Sep 22 23:22:14.762 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/084.copy"
36136 Sep 22 23:22:14.826 INFO accepted connection, remote_addr: 127.0.0.1:47553, local_addr: 127.0.0.1:52864, task: repair
36137 Sep 22 23:22:14.826 TRCE incoming request, uri: /extent/132/files, method: GET, req_id: 2e4c41b8-7d13-464c-b488-39823517979d, remote_addr: 127.0.0.1:47553, local_addr: 127.0.0.1:52864, task: repair
36138 Sep 22 23:22:14.826 INFO request completed, latency_us: 211, response_code: 200, uri: /extent/132/files, method: GET, req_id: 2e4c41b8-7d13-464c-b488-39823517979d, remote_addr: 127.0.0.1:47553, local_addr: 127.0.0.1:52864, task: repair
36139 Sep 22 23:22:14.826 INFO eid:132 Found repair files: ["084", "084.db"]
36140 Sep 22 23:22:14.827 TRCE incoming request, uri: /newextent/132/data, method: GET, req_id: df772583-ee58-4a3b-bd82-f2580c4d8093, remote_addr: 127.0.0.1:47553, local_addr: 127.0.0.1:52864, task: repair
36141 Sep 22 23:22:14.827 INFO request completed, latency_us: 319, response_code: 200, uri: /newextent/132/data, method: GET, req_id: df772583-ee58-4a3b-bd82-f2580c4d8093, remote_addr: 127.0.0.1:47553, local_addr: 127.0.0.1:52864, task: repair
36142 Sep 22 23:22:14.832 TRCE incoming request, uri: /newextent/132/db, method: GET, req_id: dd0bedee-1108-41b3-8516-4e7c351ee3e0, remote_addr: 127.0.0.1:47553, local_addr: 127.0.0.1:52864, task: repair
36143 Sep 22 23:22:14.832 INFO request completed, latency_us: 289, response_code: 200, uri: /newextent/132/db, method: GET, req_id: dd0bedee-1108-41b3-8516-4e7c351ee3e0, remote_addr: 127.0.0.1:47553, local_addr: 127.0.0.1:52864, task: repair
36144 Sep 22 23:22:14.833 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/084.copy" to "/tmp/downstairs-zrMnlo6G/00/000/084.replace"
36145 Sep 22 23:22:14.834 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36146 Sep 22 23:22:14.834 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/084.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
36147 Sep 22 23:22:14.835 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/084"
36148 Sep 22 23:22:14.835 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/084.db"
36149 Sep 22 23:22:14.835 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36150 Sep 22 23:22:14.835 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/084.replace" to "/tmp/downstairs-zrMnlo6G/00/000/084.completed"
36151 Sep 22 23:22:14.835 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36152 Sep 22 23:22:14.835 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36153 Sep 22 23:22:14.835 DEBG [0] It's time to notify for 34
36154 Sep 22 23:22:14.835 INFO Completion from [0] id:34 status:true
36155 Sep 22 23:22:14.835 INFO [35/752] Repair commands completed
36156 Sep 22 23:22:14.835 INFO Pop front: ReconcileIO { id: ReconciliationId(35), op: ExtentReopen { repair_id: ReconciliationId(35), extent_id: 132 }, state: ClientData([New, New, New]) }
36157 Sep 22 23:22:14.835 INFO Sent repair work, now wait for resp
36158 Sep 22 23:22:14.835 INFO [0] received reconcile message
36159 Sep 22 23:22:14.835 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(35), op: ExtentReopen { repair_id: ReconciliationId(35), extent_id: 132 }, state: ClientData([InProgress, New, New]) }, : downstairs
36160 Sep 22 23:22:14.835 INFO [0] client ExtentReopen { repair_id: ReconciliationId(35), extent_id: 132 }
36161 Sep 22 23:22:14.835 INFO [1] received reconcile message
36162 Sep 22 23:22:14.835 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(35), op: ExtentReopen { repair_id: ReconciliationId(35), extent_id: 132 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36163 Sep 22 23:22:14.835 INFO [1] client ExtentReopen { repair_id: ReconciliationId(35), extent_id: 132 }
36164 Sep 22 23:22:14.835 INFO [2] received reconcile message
36165 Sep 22 23:22:14.835 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(35), op: ExtentReopen { repair_id: ReconciliationId(35), extent_id: 132 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36166 Sep 22 23:22:14.835 INFO [2] client ExtentReopen { repair_id: ReconciliationId(35), extent_id: 132 }
36167 Sep 22 23:22:14.836 DEBG 35 Reopen extent 132
36168 Sep 22 23:22:14.836 DEBG 35 Reopen extent 132
36169 Sep 22 23:22:14.837 DEBG 35 Reopen extent 132
36170 Sep 22 23:22:14.837 DEBG [2] It's time to notify for 35
36171 Sep 22 23:22:14.837 INFO Completion from [2] id:35 status:true
36172 Sep 22 23:22:14.837 INFO [36/752] Repair commands completed
36173 Sep 22 23:22:14.837 INFO Pop front: ReconcileIO { id: ReconciliationId(36), op: ExtentFlush { repair_id: ReconciliationId(36), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36174 Sep 22 23:22:14.837 INFO Sent repair work, now wait for resp
36175 Sep 22 23:22:14.837 INFO [0] received reconcile message
36176 Sep 22 23:22:14.837 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(36), op: ExtentFlush { repair_id: ReconciliationId(36), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36177 Sep 22 23:22:14.837 INFO [0] client ExtentFlush { repair_id: ReconciliationId(36), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36178 Sep 22 23:22:14.838 INFO [1] received reconcile message
36179 Sep 22 23:22:14.838 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(36), op: ExtentFlush { repair_id: ReconciliationId(36), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36180 Sep 22 23:22:14.838 INFO [1] client ExtentFlush { repair_id: ReconciliationId(36), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36181 Sep 22 23:22:14.838 INFO [2] received reconcile message
36182 Sep 22 23:22:14.838 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(36), op: ExtentFlush { repair_id: ReconciliationId(36), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36183 Sep 22 23:22:14.838 INFO [2] client ExtentFlush { repair_id: ReconciliationId(36), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36184 Sep 22 23:22:14.838 DEBG 36 Flush extent 138 with f:2 g:2
36185 Sep 22 23:22:14.838 DEBG Flush just extent 138 with f:2 and g:2
36186 Sep 22 23:22:14.838 DEBG [1] It's time to notify for 36
36187 Sep 22 23:22:14.838 INFO Completion from [1] id:36 status:true
36188 Sep 22 23:22:14.838 INFO [37/752] Repair commands completed
36189 Sep 22 23:22:14.838 INFO Pop front: ReconcileIO { id: ReconciliationId(37), op: ExtentClose { repair_id: ReconciliationId(37), extent_id: 138 }, state: ClientData([New, New, New]) }
36190 Sep 22 23:22:14.838 INFO Sent repair work, now wait for resp
36191 Sep 22 23:22:14.838 INFO [0] received reconcile message
36192 Sep 22 23:22:14.838 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(37), op: ExtentClose { repair_id: ReconciliationId(37), extent_id: 138 }, state: ClientData([InProgress, New, New]) }, : downstairs
36193 Sep 22 23:22:14.838 INFO [0] client ExtentClose { repair_id: ReconciliationId(37), extent_id: 138 }
36194 Sep 22 23:22:14.838 INFO [1] received reconcile message
36195 Sep 22 23:22:14.838 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(37), op: ExtentClose { repair_id: ReconciliationId(37), extent_id: 138 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36196 Sep 22 23:22:14.838 INFO [1] client ExtentClose { repair_id: ReconciliationId(37), extent_id: 138 }
36197 Sep 22 23:22:14.838 INFO [2] received reconcile message
36198 Sep 22 23:22:14.838 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(37), op: ExtentClose { repair_id: ReconciliationId(37), extent_id: 138 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36199 Sep 22 23:22:14.838 INFO [2] client ExtentClose { repair_id: ReconciliationId(37), extent_id: 138 }
36200 Sep 22 23:22:14.838 DEBG 37 Close extent 138
36201 Sep 22 23:22:14.839 DEBG 37 Close extent 138
36202 Sep 22 23:22:14.839 DEBG 37 Close extent 138
36203 Sep 22 23:22:14.839 DEBG [2] It's time to notify for 37
36204 Sep 22 23:22:14.839 INFO Completion from [2] id:37 status:true
36205 Sep 22 23:22:14.839 INFO [38/752] Repair commands completed
36206 Sep 22 23:22:14.839 INFO Pop front: ReconcileIO { id: ReconciliationId(38), op: ExtentRepair { repair_id: ReconciliationId(38), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36207 Sep 22 23:22:14.839 INFO Sent repair work, now wait for resp
36208 Sep 22 23:22:14.839 INFO [0] received reconcile message
36209 Sep 22 23:22:14.839 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(38), op: ExtentRepair { repair_id: ReconciliationId(38), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36210 Sep 22 23:22:14.839 INFO [0] client ExtentRepair { repair_id: ReconciliationId(38), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36211 Sep 22 23:22:14.839 INFO [0] Sending repair request ReconciliationId(38)
36212 Sep 22 23:22:14.840 INFO [1] received reconcile message
36213 Sep 22 23:22:14.840 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(38), op: ExtentRepair { repair_id: ReconciliationId(38), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36214 Sep 22 23:22:14.840 INFO [1] client ExtentRepair { repair_id: ReconciliationId(38), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36215 Sep 22 23:22:14.840 INFO [1] No action required ReconciliationId(38)
36216 Sep 22 23:22:14.840 INFO [2] received reconcile message
36217 Sep 22 23:22:14.840 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(38), op: ExtentRepair { repair_id: ReconciliationId(38), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36218 Sep 22 23:22:14.840 INFO [2] client ExtentRepair { repair_id: ReconciliationId(38), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36219 Sep 22 23:22:14.840 INFO [2] No action required ReconciliationId(38)
36220 Sep 22 23:22:14.840 DEBG 38 Repair extent 138 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
36221 Sep 22 23:22:14.840 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/08A.copy"
36222 Sep 22 23:22:14.903 INFO accepted connection, remote_addr: 127.0.0.1:45746, local_addr: 127.0.0.1:52864, task: repair
36223 Sep 22 23:22:14.903 TRCE incoming request, uri: /extent/138/files, method: GET, req_id: 82e356fb-170d-41e3-90b8-0c3335940202, remote_addr: 127.0.0.1:45746, local_addr: 127.0.0.1:52864, task: repair
36224 Sep 22 23:22:14.903 INFO request completed, latency_us: 195, response_code: 200, uri: /extent/138/files, method: GET, req_id: 82e356fb-170d-41e3-90b8-0c3335940202, remote_addr: 127.0.0.1:45746, local_addr: 127.0.0.1:52864, task: repair
36225 Sep 22 23:22:14.904 INFO eid:138 Found repair files: ["08A", "08A.db"]
36226 Sep 22 23:22:14.904 TRCE incoming request, uri: /newextent/138/data, method: GET, req_id: 23032a33-1dfc-4476-8b8b-3d20ae4b259f, remote_addr: 127.0.0.1:45746, local_addr: 127.0.0.1:52864, task: repair
36227 Sep 22 23:22:14.904 INFO request completed, latency_us: 313, response_code: 200, uri: /newextent/138/data, method: GET, req_id: 23032a33-1dfc-4476-8b8b-3d20ae4b259f, remote_addr: 127.0.0.1:45746, local_addr: 127.0.0.1:52864, task: repair
36228 Sep 22 23:22:14.909 TRCE incoming request, uri: /newextent/138/db, method: GET, req_id: 859b53c4-b38e-4f40-a8b4-6e1506c81b5d, remote_addr: 127.0.0.1:45746, local_addr: 127.0.0.1:52864, task: repair
36229 Sep 22 23:22:14.910 INFO request completed, latency_us: 288, response_code: 200, uri: /newextent/138/db, method: GET, req_id: 859b53c4-b38e-4f40-a8b4-6e1506c81b5d, remote_addr: 127.0.0.1:45746, local_addr: 127.0.0.1:52864, task: repair
36230 Sep 22 23:22:14.911 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/08A.copy" to "/tmp/downstairs-zrMnlo6G/00/000/08A.replace"
36231 Sep 22 23:22:14.911 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36232 Sep 22 23:22:14.912 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/08A.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
36233 Sep 22 23:22:14.912 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/08A"
36234 Sep 22 23:22:14.912 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/08A.db"
36235 Sep 22 23:22:14.912 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36236 Sep 22 23:22:14.912 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/08A.replace" to "/tmp/downstairs-zrMnlo6G/00/000/08A.completed"
36237 Sep 22 23:22:14.912 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36238 Sep 22 23:22:14.912 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36239 Sep 22 23:22:14.912 DEBG [0] It's time to notify for 38
36240 Sep 22 23:22:14.912 INFO Completion from [0] id:38 status:true
36241 Sep 22 23:22:14.912 INFO [39/752] Repair commands completed
36242 Sep 22 23:22:14.912 INFO Pop front: ReconcileIO { id: ReconciliationId(39), op: ExtentReopen { repair_id: ReconciliationId(39), extent_id: 138 }, state: ClientData([New, New, New]) }
36243 Sep 22 23:22:14.913 INFO Sent repair work, now wait for resp
36244 Sep 22 23:22:14.913 INFO [0] received reconcile message
36245 Sep 22 23:22:14.913 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(39), op: ExtentReopen { repair_id: ReconciliationId(39), extent_id: 138 }, state: ClientData([InProgress, New, New]) }, : downstairs
36246 Sep 22 23:22:14.913 INFO [0] client ExtentReopen { repair_id: ReconciliationId(39), extent_id: 138 }
36247 Sep 22 23:22:14.913 INFO [1] received reconcile message
36248 Sep 22 23:22:14.913 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(39), op: ExtentReopen { repair_id: ReconciliationId(39), extent_id: 138 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36249 Sep 22 23:22:14.913 INFO [1] client ExtentReopen { repair_id: ReconciliationId(39), extent_id: 138 }
36250 Sep 22 23:22:14.913 INFO [2] received reconcile message
36251 Sep 22 23:22:14.913 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(39), op: ExtentReopen { repair_id: ReconciliationId(39), extent_id: 138 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36252 Sep 22 23:22:14.913 INFO [2] client ExtentReopen { repair_id: ReconciliationId(39), extent_id: 138 }
36253 Sep 22 23:22:14.913 DEBG 39 Reopen extent 138
36254 Sep 22 23:22:14.913 DEBG 39 Reopen extent 138
36255 Sep 22 23:22:14.914 DEBG 39 Reopen extent 138
36256 Sep 22 23:22:14.914 DEBG [2] It's time to notify for 39
36257 Sep 22 23:22:14.915 INFO Completion from [2] id:39 status:true
36258 Sep 22 23:22:14.915 INFO [40/752] Repair commands completed
36259 Sep 22 23:22:14.915 INFO Pop front: ReconcileIO { id: ReconciliationId(40), op: ExtentFlush { repair_id: ReconciliationId(40), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36260 Sep 22 23:22:14.915 INFO Sent repair work, now wait for resp
36261 Sep 22 23:22:14.915 INFO [0] received reconcile message
36262 Sep 22 23:22:14.915 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(40), op: ExtentFlush { repair_id: ReconciliationId(40), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36263 Sep 22 23:22:14.915 INFO [0] client ExtentFlush { repair_id: ReconciliationId(40), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36264 Sep 22 23:22:14.915 INFO [1] received reconcile message
36265 Sep 22 23:22:14.915 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(40), op: ExtentFlush { repair_id: ReconciliationId(40), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36266 Sep 22 23:22:14.915 INFO [1] client ExtentFlush { repair_id: ReconciliationId(40), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36267 Sep 22 23:22:14.915 INFO [2] received reconcile message
36268 Sep 22 23:22:14.915 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(40), op: ExtentFlush { repair_id: ReconciliationId(40), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36269 Sep 22 23:22:14.915 INFO [2] client ExtentFlush { repair_id: ReconciliationId(40), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36270 Sep 22 23:22:14.915 DEBG 40 Flush extent 147 with f:2 g:2
36271 Sep 22 23:22:14.915 DEBG Flush just extent 147 with f:2 and g:2
36272 Sep 22 23:22:14.915 DEBG [1] It's time to notify for 40
36273 Sep 22 23:22:14.915 INFO Completion from [1] id:40 status:true
36274 Sep 22 23:22:14.915 INFO [41/752] Repair commands completed
36275 Sep 22 23:22:14.915 INFO Pop front: ReconcileIO { id: ReconciliationId(41), op: ExtentClose { repair_id: ReconciliationId(41), extent_id: 147 }, state: ClientData([New, New, New]) }
36276 Sep 22 23:22:14.915 INFO Sent repair work, now wait for resp
36277 Sep 22 23:22:14.915 INFO [0] received reconcile message
36278 Sep 22 23:22:14.915 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(41), op: ExtentClose { repair_id: ReconciliationId(41), extent_id: 147 }, state: ClientData([InProgress, New, New]) }, : downstairs
36279 Sep 22 23:22:14.915 INFO [0] client ExtentClose { repair_id: ReconciliationId(41), extent_id: 147 }
36280 Sep 22 23:22:14.915 INFO [1] received reconcile message
36281 Sep 22 23:22:14.915 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(41), op: ExtentClose { repair_id: ReconciliationId(41), extent_id: 147 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36282 Sep 22 23:22:14.915 INFO [1] client ExtentClose { repair_id: ReconciliationId(41), extent_id: 147 }
36283 Sep 22 23:22:14.915 INFO [2] received reconcile message
36284 Sep 22 23:22:14.915 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(41), op: ExtentClose { repair_id: ReconciliationId(41), extent_id: 147 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36285 Sep 22 23:22:14.915 INFO [2] client ExtentClose { repair_id: ReconciliationId(41), extent_id: 147 }
36286 Sep 22 23:22:14.916 DEBG 41 Close extent 147
36287 Sep 22 23:22:14.916 DEBG 41 Close extent 147
36288 Sep 22 23:22:14.916 DEBG 41 Close extent 147
36289 Sep 22 23:22:14.916 DEBG [2] It's time to notify for 41
36290 Sep 22 23:22:14.916 INFO Completion from [2] id:41 status:true
36291 Sep 22 23:22:14.916 INFO [42/752] Repair commands completed
36292 Sep 22 23:22:14.917 INFO Pop front: ReconcileIO { id: ReconciliationId(42), op: ExtentRepair { repair_id: ReconciliationId(42), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36293 Sep 22 23:22:14.917 INFO Sent repair work, now wait for resp
36294 Sep 22 23:22:14.917 INFO [0] received reconcile message
36295 Sep 22 23:22:14.917 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(42), op: ExtentRepair { repair_id: ReconciliationId(42), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36296 Sep 22 23:22:14.917 INFO [0] client ExtentRepair { repair_id: ReconciliationId(42), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36297 Sep 22 23:22:14.917 INFO [0] Sending repair request ReconciliationId(42)
36298 Sep 22 23:22:14.917 INFO [1] received reconcile message
36299 Sep 22 23:22:14.917 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(42), op: ExtentRepair { repair_id: ReconciliationId(42), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36300 Sep 22 23:22:14.917 INFO [1] client ExtentRepair { repair_id: ReconciliationId(42), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36301 Sep 22 23:22:14.917 INFO [1] No action required ReconciliationId(42)
36302 Sep 22 23:22:14.917 INFO [2] received reconcile message
36303 Sep 22 23:22:14.917 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(42), op: ExtentRepair { repair_id: ReconciliationId(42), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36304 Sep 22 23:22:14.917 INFO [2] client ExtentRepair { repair_id: ReconciliationId(42), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36305 Sep 22 23:22:14.917 INFO [2] No action required ReconciliationId(42)
36306 Sep 22 23:22:14.917 DEBG 42 Repair extent 147 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
36307 Sep 22 23:22:14.917 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/093.copy"
36308 Sep 22 23:22:14.982 INFO accepted connection, remote_addr: 127.0.0.1:59665, local_addr: 127.0.0.1:52864, task: repair
36309 Sep 22 23:22:14.982 TRCE incoming request, uri: /extent/147/files, method: GET, req_id: 53ef9fd5-b327-4d2b-ac4f-2f06221d81cc, remote_addr: 127.0.0.1:59665, local_addr: 127.0.0.1:52864, task: repair
36310 Sep 22 23:22:14.982 INFO request completed, latency_us: 200, response_code: 200, uri: /extent/147/files, method: GET, req_id: 53ef9fd5-b327-4d2b-ac4f-2f06221d81cc, remote_addr: 127.0.0.1:59665, local_addr: 127.0.0.1:52864, task: repair
36311 Sep 22 23:22:14.982 INFO eid:147 Found repair files: ["093", "093.db"]
36312 Sep 22 23:22:14.983 TRCE incoming request, uri: /newextent/147/data, method: GET, req_id: 893f21d5-41de-41d6-8990-099df3e5d8c3, remote_addr: 127.0.0.1:59665, local_addr: 127.0.0.1:52864, task: repair
36313 Sep 22 23:22:14.983 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/147/data, method: GET, req_id: 893f21d5-41de-41d6-8990-099df3e5d8c3, remote_addr: 127.0.0.1:59665, local_addr: 127.0.0.1:52864, task: repair
36314 Sep 22 23:22:14.988 TRCE incoming request, uri: /newextent/147/db, method: GET, req_id: 1a9d2449-9c49-4b0c-ac50-95e955c41522, remote_addr: 127.0.0.1:59665, local_addr: 127.0.0.1:52864, task: repair
36315 Sep 22 23:22:14.988 INFO request completed, latency_us: 286, response_code: 200, uri: /newextent/147/db, method: GET, req_id: 1a9d2449-9c49-4b0c-ac50-95e955c41522, remote_addr: 127.0.0.1:59665, local_addr: 127.0.0.1:52864, task: repair
36316 Sep 22 23:22:14.990 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/093.copy" to "/tmp/downstairs-zrMnlo6G/00/000/093.replace"
36317 Sep 22 23:22:14.990 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36318 Sep 22 23:22:14.990 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/093.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
36319 Sep 22 23:22:14.991 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/093"
36320 Sep 22 23:22:14.991 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/093.db"
36321 Sep 22 23:22:14.991 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36322 Sep 22 23:22:14.991 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/093.replace" to "/tmp/downstairs-zrMnlo6G/00/000/093.completed"
36323 Sep 22 23:22:14.991 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36324 Sep 22 23:22:14.991 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36325 Sep 22 23:22:14.991 DEBG [0] It's time to notify for 42
36326 Sep 22 23:22:14.991 INFO Completion from [0] id:42 status:true
36327 Sep 22 23:22:14.991 INFO [43/752] Repair commands completed
36328 Sep 22 23:22:14.991 INFO Pop front: ReconcileIO { id: ReconciliationId(43), op: ExtentReopen { repair_id: ReconciliationId(43), extent_id: 147 }, state: ClientData([New, New, New]) }
36329 Sep 22 23:22:14.991 INFO Sent repair work, now wait for resp
36330 Sep 22 23:22:14.991 INFO [0] received reconcile message
36331 Sep 22 23:22:14.991 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(43), op: ExtentReopen { repair_id: ReconciliationId(43), extent_id: 147 }, state: ClientData([InProgress, New, New]) }, : downstairs
36332 Sep 22 23:22:14.991 INFO [0] client ExtentReopen { repair_id: ReconciliationId(43), extent_id: 147 }
36333 Sep 22 23:22:14.991 INFO [1] received reconcile message
36334 Sep 22 23:22:14.991 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(43), op: ExtentReopen { repair_id: ReconciliationId(43), extent_id: 147 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36335 Sep 22 23:22:14.991 INFO [1] client ExtentReopen { repair_id: ReconciliationId(43), extent_id: 147 }
36336 Sep 22 23:22:14.991 INFO [2] received reconcile message
36337 Sep 22 23:22:14.991 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(43), op: ExtentReopen { repair_id: ReconciliationId(43), extent_id: 147 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36338 Sep 22 23:22:14.991 INFO [2] client ExtentReopen { repair_id: ReconciliationId(43), extent_id: 147 }
36339 Sep 22 23:22:14.992 DEBG 43 Reopen extent 147
36340 Sep 22 23:22:14.992 DEBG 43 Reopen extent 147
36341 Sep 22 23:22:14.993 DEBG 43 Reopen extent 147
36342 Sep 22 23:22:14.993 DEBG [2] It's time to notify for 43
36343 Sep 22 23:22:14.993 INFO Completion from [2] id:43 status:true
36344 Sep 22 23:22:14.993 INFO [44/752] Repair commands completed
36345 Sep 22 23:22:14.993 INFO Pop front: ReconcileIO { id: ReconciliationId(44), op: ExtentFlush { repair_id: ReconciliationId(44), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36346 Sep 22 23:22:14.993 INFO Sent repair work, now wait for resp
36347 Sep 22 23:22:14.993 INFO [0] received reconcile message
36348 Sep 22 23:22:14.993 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(44), op: ExtentFlush { repair_id: ReconciliationId(44), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36349 Sep 22 23:22:14.993 INFO [0] client ExtentFlush { repair_id: ReconciliationId(44), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36350 Sep 22 23:22:14.993 INFO [1] received reconcile message
36351 Sep 22 23:22:14.993 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(44), op: ExtentFlush { repair_id: ReconciliationId(44), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36352 Sep 22 23:22:14.993 INFO [1] client ExtentFlush { repair_id: ReconciliationId(44), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36353 Sep 22 23:22:14.993 INFO [2] received reconcile message
36354 Sep 22 23:22:14.993 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(44), op: ExtentFlush { repair_id: ReconciliationId(44), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36355 Sep 22 23:22:14.993 INFO [2] client ExtentFlush { repair_id: ReconciliationId(44), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36356 Sep 22 23:22:14.994 DEBG 44 Flush extent 67 with f:2 g:2
36357 Sep 22 23:22:14.994 DEBG Flush just extent 67 with f:2 and g:2
36358 Sep 22 23:22:14.994 DEBG [1] It's time to notify for 44
36359 Sep 22 23:22:14.994 INFO Completion from [1] id:44 status:true
36360 Sep 22 23:22:14.994 INFO [45/752] Repair commands completed
36361 Sep 22 23:22:14.994 INFO Pop front: ReconcileIO { id: ReconciliationId(45), op: ExtentClose { repair_id: ReconciliationId(45), extent_id: 67 }, state: ClientData([New, New, New]) }
36362 Sep 22 23:22:14.994 INFO Sent repair work, now wait for resp
36363 Sep 22 23:22:14.994 INFO [0] received reconcile message
36364 Sep 22 23:22:14.994 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(45), op: ExtentClose { repair_id: ReconciliationId(45), extent_id: 67 }, state: ClientData([InProgress, New, New]) }, : downstairs
36365 Sep 22 23:22:14.994 INFO [0] client ExtentClose { repair_id: ReconciliationId(45), extent_id: 67 }
36366 Sep 22 23:22:14.994 INFO [1] received reconcile message
36367 Sep 22 23:22:14.994 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(45), op: ExtentClose { repair_id: ReconciliationId(45), extent_id: 67 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36368 Sep 22 23:22:14.994 INFO [1] client ExtentClose { repair_id: ReconciliationId(45), extent_id: 67 }
36369 Sep 22 23:22:14.994 INFO [2] received reconcile message
36370 Sep 22 23:22:14.994 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(45), op: ExtentClose { repair_id: ReconciliationId(45), extent_id: 67 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36371 Sep 22 23:22:14.994 INFO [2] client ExtentClose { repair_id: ReconciliationId(45), extent_id: 67 }
36372 Sep 22 23:22:14.994 DEBG 45 Close extent 67
36373 Sep 22 23:22:14.994 DEBG 45 Close extent 67
36374 Sep 22 23:22:14.995 DEBG 45 Close extent 67
36375 Sep 22 23:22:14.995 DEBG [2] It's time to notify for 45
36376 Sep 22 23:22:14.995 INFO Completion from [2] id:45 status:true
36377 Sep 22 23:22:14.995 INFO [46/752] Repair commands completed
36378 Sep 22 23:22:14.995 INFO Pop front: ReconcileIO { id: ReconciliationId(46), op: ExtentRepair { repair_id: ReconciliationId(46), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36379 Sep 22 23:22:14.995 INFO Sent repair work, now wait for resp
36380 Sep 22 23:22:14.995 INFO [0] received reconcile message
36381 Sep 22 23:22:14.995 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(46), op: ExtentRepair { repair_id: ReconciliationId(46), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36382 Sep 22 23:22:14.995 INFO [0] client ExtentRepair { repair_id: ReconciliationId(46), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36383 Sep 22 23:22:14.995 INFO [0] Sending repair request ReconciliationId(46)
36384 Sep 22 23:22:14.995 INFO [1] received reconcile message
36385 Sep 22 23:22:14.995 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(46), op: ExtentRepair { repair_id: ReconciliationId(46), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36386 Sep 22 23:22:14.995 INFO [1] client ExtentRepair { repair_id: ReconciliationId(46), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36387 Sep 22 23:22:14.995 INFO [1] No action required ReconciliationId(46)
36388 Sep 22 23:22:14.995 INFO [2] received reconcile message
36389 Sep 22 23:22:14.995 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(46), op: ExtentRepair { repair_id: ReconciliationId(46), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36390 Sep 22 23:22:14.995 INFO [2] client ExtentRepair { repair_id: ReconciliationId(46), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36391 Sep 22 23:22:14.995 INFO [2] No action required ReconciliationId(46)
36392 Sep 22 23:22:14.996 DEBG 46 Repair extent 67 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
36393 Sep 22 23:22:14.996 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/043.copy"
36394 Sep 22 23:22:15.060 INFO accepted connection, remote_addr: 127.0.0.1:33675, local_addr: 127.0.0.1:52864, task: repair
36395 Sep 22 23:22:15.060 TRCE incoming request, uri: /extent/67/files, method: GET, req_id: 3d0c8210-afa1-48c4-8e18-c4a6b56d50c5, remote_addr: 127.0.0.1:33675, local_addr: 127.0.0.1:52864, task: repair
36396 Sep 22 23:22:15.060 INFO [0] 9144e02c-c312-47c4-9b1c-f03618834608 looper connected, looper: 0
36397 Sep 22 23:22:15.060 INFO [0] Proc runs for 127.0.0.1:42129 in state Replaced
36398 Sep 22 23:22:15.060 INFO accepted connection from 127.0.0.1:33363, task: main
36399 Sep 22 23:22:15.060 INFO request completed, latency_us: 400, response_code: 200, uri: /extent/67/files, method: GET, req_id: 3d0c8210-afa1-48c4-8e18-c4a6b56d50c5, remote_addr: 127.0.0.1:33675, local_addr: 127.0.0.1:52864, task: repair
36400 Sep 22 23:22:15.061 INFO eid:67 Found repair files: ["043", "043.db"]
36401 Sep 22 23:22:15.061 TRCE incoming request, uri: /newextent/67/data, method: GET, req_id: b8bfbe25-c2cb-4746-917f-dac2a72e966c, remote_addr: 127.0.0.1:33675, local_addr: 127.0.0.1:52864, task: repair
36402 Sep 22 23:22:15.061 INFO request completed, latency_us: 340, response_code: 200, uri: /newextent/67/data, method: GET, req_id: b8bfbe25-c2cb-4746-917f-dac2a72e966c, remote_addr: 127.0.0.1:33675, local_addr: 127.0.0.1:52864, task: repair
36403 Sep 22 23:22:15.067 TRCE incoming request, uri: /newextent/67/db, method: GET, req_id: 2849dfc1-62ce-4081-b43e-dd85371c722b, remote_addr: 127.0.0.1:33675, local_addr: 127.0.0.1:52864, task: repair
36404 Sep 22 23:22:15.067 INFO request completed, latency_us: 389, response_code: 200, uri: /newextent/67/db, method: GET, req_id: 2849dfc1-62ce-4081-b43e-dd85371c722b, remote_addr: 127.0.0.1:33675, local_addr: 127.0.0.1:52864, task: repair
36405 Sep 22 23:22:15.068 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/043.copy" to "/tmp/downstairs-zrMnlo6G/00/000/043.replace"
36406 Sep 22 23:22:15.068 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36407 Sep 22 23:22:15.069 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/043.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
36408 Sep 22 23:22:15.069 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/043"
36409 Sep 22 23:22:15.069 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/043.db"
36410 Sep 22 23:22:15.069 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36411 Sep 22 23:22:15.069 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/043.replace" to "/tmp/downstairs-zrMnlo6G/00/000/043.completed"
36412 Sep 22 23:22:15.069 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36413 Sep 22 23:22:15.069 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36414 Sep 22 23:22:15.069 INFO Connection request from 9144e02c-c312-47c4-9b1c-f03618834608 with version 4, task: proc
36415 Sep 22 23:22:15.070 INFO upstairs UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } connected, version 4, task: proc
36416 Sep 22 23:22:15.070 DEBG [0] It's time to notify for 46
36417 Sep 22 23:22:15.070 INFO [0] 9144e02c-c312-47c4-9b1c-f03618834608 (cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f) Replaced Disconnected Disconnected ds_transition to WaitActive
36418 thread 'test::integration_test_volume_replace_downstairs_then_takeover' panicked at '[0] 9144e02c-c312-47c4-9b1c-f03618834608 Initializing Negotiation failed, Replaced -> WaitActive', upstairs/src/lib.rs:6349:21
36419 note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
36420 Sep 22 23:22:15.070 INFO Completion from [0] id:46 status:true
36421 Sep 22 23:22:15.070 INFO [47/752] Repair commands completed
36422 Sep 22 23:22:15.070 INFO Pop front: ReconcileIO { id: ReconciliationId(47), op: ExtentReopen { repair_id: ReconciliationId(47), extent_id: 67 }, state: ClientData([New, New, New]) }
36423 Sep 22 23:22:15.070 INFO Sent repair work, now wait for resp
36424 Sep 22 23:22:15.070 INFO [0] received reconcile message
36425 Sep 22 23:22:15.070 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(47), op: ExtentReopen { repair_id: ReconciliationId(47), extent_id: 67 }, state: ClientData([InProgress, New, New]) }, : downstairs
36426 Sep 22 23:22:15.070 INFO [0] client ExtentReopen { repair_id: ReconciliationId(47), extent_id: 67 }
36427 Sep 22 23:22:15.070 INFO [1] received reconcile message
36428 Sep 22 23:22:15.071 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(47), op: ExtentReopen { repair_id: ReconciliationId(47), extent_id: 67 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36429 Sep 22 23:22:15.071 INFO [1] client ExtentReopen { repair_id: ReconciliationId(47), extent_id: 67 }
36430 Sep 22 23:22:15.071 INFO [2] received reconcile message
36431 Sep 22 23:22:15.071 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(47), op: ExtentReopen { repair_id: ReconciliationId(47), extent_id: 67 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36432 Sep 22 23:22:15.071 INFO [2] client ExtentReopen { repair_id: ReconciliationId(47), extent_id: 67 }
36433 Sep 22 23:22:15.071 WARN UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } cannot grab lock, does not match UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: ec8dc28c-8140-4e2d-b538-f84f8abf147b, gen: 2 }!
36434 Sep 22 23:22:15.071 INFO upstairs UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } disconnected, Upstairs is not active, task: proc
36435 Sep 22 23:22:15.071 INFO connection (127.0.0.1:33363): all done
36436 Sep 22 23:22:15.071 DEBG 47 Reopen extent 67
36437 Sep 22 23:22:15.072 DEBG 47 Reopen extent 67
36438 Sep 22 23:22:15.072 DEBG 47 Reopen extent 67
36439 Sep 22 23:22:15.073 DEBG [2] It's time to notify for 47
36440 Sep 22 23:22:15.073 INFO Completion from [2] id:47 status:true
36441 Sep 22 23:22:15.073 INFO [48/752] Repair commands completed
36442 Sep 22 23:22:15.073 INFO Pop front: ReconcileIO { id: ReconciliationId(48), op: ExtentFlush { repair_id: ReconciliationId(48), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36443 Sep 22 23:22:15.073 INFO Sent repair work, now wait for resp
36444 Sep 22 23:22:15.073 INFO [0] received reconcile message
36445 Sep 22 23:22:15.073 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(48), op: ExtentFlush { repair_id: ReconciliationId(48), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36446 Sep 22 23:22:15.073 INFO [0] client ExtentFlush { repair_id: ReconciliationId(48), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36447 Sep 22 23:22:15.073 INFO [1] received reconcile message
36448 Sep 22 23:22:15.073 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(48), op: ExtentFlush { repair_id: ReconciliationId(48), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36449 Sep 22 23:22:15.073 INFO [1] client ExtentFlush { repair_id: ReconciliationId(48), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36450 Sep 22 23:22:15.073 INFO [2] received reconcile message
36451 Sep 22 23:22:15.073 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(48), op: ExtentFlush { repair_id: ReconciliationId(48), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36452 Sep 22 23:22:15.073 INFO [2] client ExtentFlush { repair_id: ReconciliationId(48), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36453 Sep 22 23:22:15.073 DEBG 48 Flush extent 2 with f:2 g:2
36454 Sep 22 23:22:15.073 DEBG Flush just extent 2 with f:2 and g:2
36455 Sep 22 23:22:15.073 DEBG [1] It's time to notify for 48
36456 Sep 22 23:22:15.073 INFO Completion from [1] id:48 status:true
36457 Sep 22 23:22:15.073 INFO [49/752] Repair commands completed
36458 Sep 22 23:22:15.073 INFO Pop front: ReconcileIO { id: ReconciliationId(49), op: ExtentClose { repair_id: ReconciliationId(49), extent_id: 2 }, state: ClientData([New, New, New]) }
36459 Sep 22 23:22:15.073 INFO Sent repair work, now wait for resp
36460 Sep 22 23:22:15.073 INFO [0] received reconcile message
36461 Sep 22 23:22:15.073 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(49), op: ExtentClose { repair_id: ReconciliationId(49), extent_id: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36462 Sep 22 23:22:15.073 INFO [0] client ExtentClose { repair_id: ReconciliationId(49), extent_id: 2 }
36463 Sep 22 23:22:15.073 INFO [1] received reconcile message
36464 Sep 22 23:22:15.073 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(49), op: ExtentClose { repair_id: ReconciliationId(49), extent_id: 2 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36465 Sep 22 23:22:15.074 INFO [1] client ExtentClose { repair_id: ReconciliationId(49), extent_id: 2 }
36466 Sep 22 23:22:15.074 INFO [2] received reconcile message
36467 Sep 22 23:22:15.074 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(49), op: ExtentClose { repair_id: ReconciliationId(49), extent_id: 2 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36468 Sep 22 23:22:15.074 INFO [2] client ExtentClose { repair_id: ReconciliationId(49), extent_id: 2 }
36469 Sep 22 23:22:15.074 DEBG 49 Close extent 2
36470 Sep 22 23:22:15.074 DEBG 49 Close extent 2
36471 Sep 22 23:22:15.074 DEBG 49 Close extent 2
36472 Sep 22 23:22:15.075 DEBG [2] It's time to notify for 49
36473 Sep 22 23:22:15.075 INFO Completion from [2] id:49 status:true
36474 Sep 22 23:22:15.075 INFO [50/752] Repair commands completed
36475 Sep 22 23:22:15.075 INFO Pop front: ReconcileIO { id: ReconciliationId(50), op: ExtentRepair { repair_id: ReconciliationId(50), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36476 Sep 22 23:22:15.075 INFO Sent repair work, now wait for resp
36477 Sep 22 23:22:15.075 INFO [0] received reconcile message
36478 Sep 22 23:22:15.075 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(50), op: ExtentRepair { repair_id: ReconciliationId(50), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36479 Sep 22 23:22:15.075 INFO [0] client ExtentRepair { repair_id: ReconciliationId(50), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36480 Sep 22 23:22:15.075 INFO [0] Sending repair request ReconciliationId(50)
36481 Sep 22 23:22:15.075 INFO [1] received reconcile message
36482 Sep 22 23:22:15.075 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(50), op: ExtentRepair { repair_id: ReconciliationId(50), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36483 Sep 22 23:22:15.075 INFO [1] client ExtentRepair { repair_id: ReconciliationId(50), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36484 Sep 22 23:22:15.075 INFO [1] No action required ReconciliationId(50)
36485 Sep 22 23:22:15.075 INFO [2] received reconcile message
36486 Sep 22 23:22:15.075 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(50), op: ExtentRepair { repair_id: ReconciliationId(50), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36487 Sep 22 23:22:15.075 INFO [2] client ExtentRepair { repair_id: ReconciliationId(50), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36488 Sep 22 23:22:15.075 INFO [2] No action required ReconciliationId(50)
36489 Sep 22 23:22:15.075 DEBG 50 Repair extent 2 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
36490 Sep 22 23:22:15.075 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/002.copy"
36491 Sep 22 23:22:15.117 DEBG [0] Read AckReady 1073, : downstairs
36492 Sep 22 23:22:15.118 DEBG up_ds_listen was notified
36493 Sep 22 23:22:15.118 DEBG up_ds_listen process 1073
36494 Sep 22 23:22:15.118 DEBG [A] ack job 1073:74, : downstairs
36495 Sep 22 23:22:15.137 INFO accepted connection, remote_addr: 127.0.0.1:44318, local_addr: 127.0.0.1:52864, task: repair
36496 Sep 22 23:22:15.137 TRCE incoming request, uri: /extent/2/files, method: GET, req_id: ce42ef6e-a041-4c31-bc0c-64c8f3ce57b2, remote_addr: 127.0.0.1:44318, local_addr: 127.0.0.1:52864, task: repair
36497 Sep 22 23:22:15.137 INFO [1] 9144e02c-c312-47c4-9b1c-f03618834608 looper connected, looper: 1
36498 Sep 22 23:22:15.138 INFO [1] Proc runs for 127.0.0.1:48339 in state Disconnected
36499 Sep 22 23:22:15.138 INFO [2] 9144e02c-c312-47c4-9b1c-f03618834608 looper connected, looper: 2
36500 Sep 22 23:22:15.138 INFO [2] Proc runs for 127.0.0.1:33021 in state Disconnected
36501 Sep 22 23:22:15.138 INFO accepted connection from 127.0.0.1:42838, task: main
36502 Sep 22 23:22:15.138 INFO accepted connection from 127.0.0.1:44046, task: main
36503 Sep 22 23:22:15.138 INFO request completed, latency_us: 521, response_code: 200, uri: /extent/2/files, method: GET, req_id: ce42ef6e-a041-4c31-bc0c-64c8f3ce57b2, remote_addr: 127.0.0.1:44318, local_addr: 127.0.0.1:52864, task: repair
36504 Sep 22 23:22:15.138 INFO Connection request from 9144e02c-c312-47c4-9b1c-f03618834608 with version 4, task: proc
36505 Sep 22 23:22:15.138 INFO upstairs UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } connected, version 4, task: proc
36506 Sep 22 23:22:15.138 INFO Connection request from 9144e02c-c312-47c4-9b1c-f03618834608 with version 4, task: proc
36507 Sep 22 23:22:15.138 INFO upstairs UpstairsConnection { upstairs_id: 9144e02c-c312-47c4-9b1c-f03618834608, session_id: cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f, gen: 1 } connected, version 4, task: proc
36508 Sep 22 23:22:15.138 INFO eid:2 Found repair files: ["002", "002.db"]
36509 Sep 22 23:22:15.139 TRCE incoming request, uri: /newextent/2/data, method: GET, req_id: 3308ce35-403c-4e61-9152-eeec5a770056, remote_addr: 127.0.0.1:44318, local_addr: 127.0.0.1:52864, task: repair
36510 Sep 22 23:22:15.139 INFO [1] 9144e02c-c312-47c4-9b1c-f03618834608 (cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f) Replaced Disconnected Disconnected ds_transition to WaitActive
36511 Sep 22 23:22:15.139 INFO [1] Transition from Disconnected to WaitActive
36512 Sep 22 23:22:15.139 INFO [2] 9144e02c-c312-47c4-9b1c-f03618834608 (cdb4ad0d-846c-40eb-b2e0-a9a03f8a5f6f) Replaced WaitActive Disconnected ds_transition to WaitActive
36513 Sep 22 23:22:15.139 INFO [2] Transition from Disconnected to WaitActive
36514 Sep 22 23:22:15.139 INFO request completed, latency_us: 481, response_code: 200, uri: /newextent/2/data, method: GET, req_id: 3308ce35-403c-4e61-9152-eeec5a770056, remote_addr: 127.0.0.1:44318, local_addr: 127.0.0.1:52864, task: repair
36515 Sep 22 23:22:15.144 TRCE incoming request, uri: /newextent/2/db, method: GET, req_id: 0bac4694-f818-4f86-895d-2c5a3b34d737, remote_addr: 127.0.0.1:44318, local_addr: 127.0.0.1:52864, task: repair
36516 Sep 22 23:22:15.144 INFO request completed, latency_us: 287, response_code: 200, uri: /newextent/2/db, method: GET, req_id: 0bac4694-f818-4f86-895d-2c5a3b34d737, remote_addr: 127.0.0.1:44318, local_addr: 127.0.0.1:52864, task: repair
36517 Sep 22 23:22:15.146 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/002.copy" to "/tmp/downstairs-zrMnlo6G/00/000/002.replace"
36518 Sep 22 23:22:15.146 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36519 Sep 22 23:22:15.146 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/002.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
36520 Sep 22 23:22:15.147 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/002"
36521 Sep 22 23:22:15.147 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/002.db"
36522 Sep 22 23:22:15.147 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36523 Sep 22 23:22:15.147 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/002.replace" to "/tmp/downstairs-zrMnlo6G/00/000/002.completed"
36524 Sep 22 23:22:15.147 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36525 Sep 22 23:22:15.147 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36526 Sep 22 23:22:15.147 DEBG [0] It's time to notify for 50
36527 Sep 22 23:22:15.147 INFO Completion from [0] id:50 status:true
36528 Sep 22 23:22:15.147 INFO [51/752] Repair commands completed
36529 Sep 22 23:22:15.147 INFO Pop front: ReconcileIO { id: ReconciliationId(51), op: ExtentReopen { repair_id: ReconciliationId(51), extent_id: 2 }, state: ClientData([New, New, New]) }
36530 Sep 22 23:22:15.147 INFO Sent repair work, now wait for resp
36531 Sep 22 23:22:15.147 INFO [0] received reconcile message
36532 Sep 22 23:22:15.147 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(51), op: ExtentReopen { repair_id: ReconciliationId(51), extent_id: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36533 Sep 22 23:22:15.147 INFO [0] client ExtentReopen { repair_id: ReconciliationId(51), extent_id: 2 }
36534 Sep 22 23:22:15.147 INFO [1] received reconcile message
36535 Sep 22 23:22:15.147 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(51), op: ExtentReopen { repair_id: ReconciliationId(51), extent_id: 2 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36536 Sep 22 23:22:15.147 INFO [1] client ExtentReopen { repair_id: ReconciliationId(51), extent_id: 2 }
36537 Sep 22 23:22:15.147 INFO [2] received reconcile message
36538 Sep 22 23:22:15.147 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(51), op: ExtentReopen { repair_id: ReconciliationId(51), extent_id: 2 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36539 Sep 22 23:22:15.147 INFO [2] client ExtentReopen { repair_id: ReconciliationId(51), extent_id: 2 }
36540 Sep 22 23:22:15.148 DEBG 51 Reopen extent 2
36541 Sep 22 23:22:15.148 DEBG 51 Reopen extent 2
36542 Sep 22 23:22:15.149 DEBG 51 Reopen extent 2
36543 Sep 22 23:22:15.149 DEBG [2] It's time to notify for 51
36544 Sep 22 23:22:15.149 INFO Completion from [2] id:51 status:true
36545 Sep 22 23:22:15.149 INFO [52/752] Repair commands completed
36546 Sep 22 23:22:15.149 INFO Pop front: ReconcileIO { id: ReconciliationId(52), op: ExtentFlush { repair_id: ReconciliationId(52), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36547 Sep 22 23:22:15.149 INFO Sent repair work, now wait for resp
36548 Sep 22 23:22:15.149 INFO [0] received reconcile message
36549 Sep 22 23:22:15.149 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(52), op: ExtentFlush { repair_id: ReconciliationId(52), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36550 Sep 22 23:22:15.149 INFO [0] client ExtentFlush { repair_id: ReconciliationId(52), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36551 Sep 22 23:22:15.149 INFO [1] received reconcile message
36552 Sep 22 23:22:15.149 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(52), op: ExtentFlush { repair_id: ReconciliationId(52), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36553 Sep 22 23:22:15.149 INFO [1] client ExtentFlush { repair_id: ReconciliationId(52), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36554 Sep 22 23:22:15.149 INFO [2] received reconcile message
36555 Sep 22 23:22:15.149 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(52), op: ExtentFlush { repair_id: ReconciliationId(52), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36556 Sep 22 23:22:15.149 INFO [2] client ExtentFlush { repair_id: ReconciliationId(52), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36557 Sep 22 23:22:15.150 DEBG 52 Flush extent 25 with f:2 g:2
36558 Sep 22 23:22:15.150 DEBG Flush just extent 25 with f:2 and g:2
36559 Sep 22 23:22:15.150 DEBG [1] It's time to notify for 52
36560 Sep 22 23:22:15.150 INFO Completion from [1] id:52 status:true
36561 Sep 22 23:22:15.150 INFO [53/752] Repair commands completed
36562 Sep 22 23:22:15.150 INFO Pop front: ReconcileIO { id: ReconciliationId(53), op: ExtentClose { repair_id: ReconciliationId(53), extent_id: 25 }, state: ClientData([New, New, New]) }
36563 Sep 22 23:22:15.150 INFO Sent repair work, now wait for resp
36564 Sep 22 23:22:15.150 INFO [0] received reconcile message
36565 Sep 22 23:22:15.150 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(53), op: ExtentClose { repair_id: ReconciliationId(53), extent_id: 25 }, state: ClientData([InProgress, New, New]) }, : downstairs
36566 Sep 22 23:22:15.150 INFO [0] client ExtentClose { repair_id: ReconciliationId(53), extent_id: 25 }
36567 Sep 22 23:22:15.150 INFO [1] received reconcile message
36568 Sep 22 23:22:15.150 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(53), op: ExtentClose { repair_id: ReconciliationId(53), extent_id: 25 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36569 Sep 22 23:22:15.150 INFO [1] client ExtentClose { repair_id: ReconciliationId(53), extent_id: 25 }
36570 Sep 22 23:22:15.150 INFO [2] received reconcile message
36571 Sep 22 23:22:15.150 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(53), op: ExtentClose { repair_id: ReconciliationId(53), extent_id: 25 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36572 Sep 22 23:22:15.150 INFO [2] client ExtentClose { repair_id: ReconciliationId(53), extent_id: 25 }
36573 Sep 22 23:22:15.150 DEBG 53 Close extent 25
36574 Sep 22 23:22:15.150 DEBG 53 Close extent 25
36575 Sep 22 23:22:15.151 DEBG 53 Close extent 25
36576 Sep 22 23:22:15.151 DEBG [2] It's time to notify for 53
36577 Sep 22 23:22:15.151 INFO Completion from [2] id:53 status:true
36578 Sep 22 23:22:15.151 INFO [54/752] Repair commands completed
36579 Sep 22 23:22:15.151 INFO Pop front: ReconcileIO { id: ReconciliationId(54), op: ExtentRepair { repair_id: ReconciliationId(54), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36580 Sep 22 23:22:15.151 INFO Sent repair work, now wait for resp
36581 Sep 22 23:22:15.151 INFO [0] received reconcile message
36582 Sep 22 23:22:15.151 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(54), op: ExtentRepair { repair_id: ReconciliationId(54), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36583 Sep 22 23:22:15.151 INFO [0] client ExtentRepair { repair_id: ReconciliationId(54), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36584 Sep 22 23:22:15.151 INFO [0] Sending repair request ReconciliationId(54)
36585 Sep 22 23:22:15.151 INFO [1] received reconcile message
36586 Sep 22 23:22:15.151 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(54), op: ExtentRepair { repair_id: ReconciliationId(54), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36587 Sep 22 23:22:15.151 INFO [1] client ExtentRepair { repair_id: ReconciliationId(54), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36588 Sep 22 23:22:15.151 INFO [1] No action required ReconciliationId(54)
36589 Sep 22 23:22:15.151 INFO [2] received reconcile message
36590 Sep 22 23:22:15.151 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(54), op: ExtentRepair { repair_id: ReconciliationId(54), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36591 Sep 22 23:22:15.151 INFO [2] client ExtentRepair { repair_id: ReconciliationId(54), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36592 Sep 22 23:22:15.151 INFO [2] No action required ReconciliationId(54)
36593 Sep 22 23:22:15.152 DEBG 54 Repair extent 25 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
36594 Sep 22 23:22:15.152 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/019.copy"
36595 Sep 22 23:22:15.171 DEBG up_ds_listen checked 1 jobs, back to waiting
36596 Sep 22 23:22:15.178 DEBG Read :1073 deps:[JobId(1072)] res:true
36597 Sep 22 23:22:15.199 DEBG Flush :1072 extent_limit None deps:[JobId(1071), JobId(1070)] res:true f:25 g:1
36598 Sep 22 23:22:15.199 INFO [lossy] skipping 1073
36599 Sep 22 23:22:15.199 INFO [lossy] skipping 1073
36600 Sep 22 23:22:15.206 DEBG Read :1073 deps:[JobId(1072)] res:true
36601 Sep 22 23:22:15.215 INFO accepted connection, remote_addr: 127.0.0.1:61012, local_addr: 127.0.0.1:52864, task: repair
36602 Sep 22 23:22:15.215 TRCE incoming request, uri: /extent/25/files, method: GET, req_id: 9e179dd3-65c4-47fe-8168-ad8fdf6dde8a, remote_addr: 127.0.0.1:61012, local_addr: 127.0.0.1:52864, task: repair
36603 Sep 22 23:22:15.215 INFO request completed, latency_us: 213, response_code: 200, uri: /extent/25/files, method: GET, req_id: 9e179dd3-65c4-47fe-8168-ad8fdf6dde8a, remote_addr: 127.0.0.1:61012, local_addr: 127.0.0.1:52864, task: repair
36604 Sep 22 23:22:15.215 INFO eid:25 Found repair files: ["019", "019.db"]
36605 Sep 22 23:22:15.216 TRCE incoming request, uri: /newextent/25/data, method: GET, req_id: 00fefafb-175a-442e-9964-f817695700bb, remote_addr: 127.0.0.1:61012, local_addr: 127.0.0.1:52864, task: repair
36606 Sep 22 23:22:15.216 INFO request completed, latency_us: 320, response_code: 200, uri: /newextent/25/data, method: GET, req_id: 00fefafb-175a-442e-9964-f817695700bb, remote_addr: 127.0.0.1:61012, local_addr: 127.0.0.1:52864, task: repair
36607 Sep 22 23:22:15.221 TRCE incoming request, uri: /newextent/25/db, method: GET, req_id: f35c4604-c36f-4059-8ac9-848a04cc458a, remote_addr: 127.0.0.1:61012, local_addr: 127.0.0.1:52864, task: repair
36608 Sep 22 23:22:15.221 INFO request completed, latency_us: 289, response_code: 200, uri: /newextent/25/db, method: GET, req_id: f35c4604-c36f-4059-8ac9-848a04cc458a, remote_addr: 127.0.0.1:61012, local_addr: 127.0.0.1:52864, task: repair
36609 Sep 22 23:22:15.222 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/019.copy" to "/tmp/downstairs-zrMnlo6G/00/000/019.replace"
36610 Sep 22 23:22:15.222 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36611 Sep 22 23:22:15.223 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/019.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
36612 Sep 22 23:22:15.223 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/019"
36613 Sep 22 23:22:15.224 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/019.db"
36614 Sep 22 23:22:15.224 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36615 Sep 22 23:22:15.224 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/019.replace" to "/tmp/downstairs-zrMnlo6G/00/000/019.completed"
36616 Sep 22 23:22:15.224 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36617 Sep 22 23:22:15.224 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36618 Sep 22 23:22:15.224 DEBG [0] It's time to notify for 54
36619 Sep 22 23:22:15.224 INFO Completion from [0] id:54 status:true
36620 Sep 22 23:22:15.224 INFO [55/752] Repair commands completed
36621 Sep 22 23:22:15.224 INFO Pop front: ReconcileIO { id: ReconciliationId(55), op: ExtentReopen { repair_id: ReconciliationId(55), extent_id: 25 }, state: ClientData([New, New, New]) }
36622 Sep 22 23:22:15.224 INFO Sent repair work, now wait for resp
36623 Sep 22 23:22:15.224 INFO [0] received reconcile message
36624 Sep 22 23:22:15.224 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(55), op: ExtentReopen { repair_id: ReconciliationId(55), extent_id: 25 }, state: ClientData([InProgress, New, New]) }, : downstairs
36625 Sep 22 23:22:15.224 INFO [0] client ExtentReopen { repair_id: ReconciliationId(55), extent_id: 25 }
36626 Sep 22 23:22:15.224 INFO [1] received reconcile message
36627 Sep 22 23:22:15.224 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(55), op: ExtentReopen { repair_id: ReconciliationId(55), extent_id: 25 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36628 Sep 22 23:22:15.224 INFO [1] client ExtentReopen { repair_id: ReconciliationId(55), extent_id: 25 }
36629 Sep 22 23:22:15.224 INFO [2] received reconcile message
36630 Sep 22 23:22:15.224 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(55), op: ExtentReopen { repair_id: ReconciliationId(55), extent_id: 25 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36631 Sep 22 23:22:15.224 INFO [2] client ExtentReopen { repair_id: ReconciliationId(55), extent_id: 25 }
36632 Sep 22 23:22:15.224 DEBG 55 Reopen extent 25
36633 Sep 22 23:22:15.225 DEBG 55 Reopen extent 25
36634 Sep 22 23:22:15.226 DEBG 55 Reopen extent 25
36635 Sep 22 23:22:15.226 DEBG [2] It's time to notify for 55
36636 Sep 22 23:22:15.226 INFO Completion from [2] id:55 status:true
36637 Sep 22 23:22:15.226 INFO [56/752] Repair commands completed
36638 Sep 22 23:22:15.226 INFO Pop front: ReconcileIO { id: ReconciliationId(56), op: ExtentFlush { repair_id: ReconciliationId(56), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36639 Sep 22 23:22:15.226 INFO Sent repair work, now wait for resp
36640 Sep 22 23:22:15.226 INFO [0] received reconcile message
36641 Sep 22 23:22:15.226 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(56), op: ExtentFlush { repair_id: ReconciliationId(56), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36642 Sep 22 23:22:15.226 INFO [0] client ExtentFlush { repair_id: ReconciliationId(56), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36643 Sep 22 23:22:15.226 INFO [1] received reconcile message
36644 Sep 22 23:22:15.226 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(56), op: ExtentFlush { repair_id: ReconciliationId(56), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36645 Sep 22 23:22:15.226 INFO [1] client ExtentFlush { repair_id: ReconciliationId(56), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36646 Sep 22 23:22:15.227 INFO [2] received reconcile message
36647 Sep 22 23:22:15.227 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(56), op: ExtentFlush { repair_id: ReconciliationId(56), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36648 Sep 22 23:22:15.227 INFO [2] client ExtentFlush { repair_id: ReconciliationId(56), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36649 Sep 22 23:22:15.227 DEBG 56 Flush extent 152 with f:2 g:2
36650 Sep 22 23:22:15.227 DEBG Flush just extent 152 with f:2 and g:2
36651 Sep 22 23:22:15.227 DEBG [1] It's time to notify for 56
36652 Sep 22 23:22:15.227 INFO Completion from [1] id:56 status:true
36653 Sep 22 23:22:15.227 INFO [57/752] Repair commands completed
36654 Sep 22 23:22:15.227 INFO Pop front: ReconcileIO { id: ReconciliationId(57), op: ExtentClose { repair_id: ReconciliationId(57), extent_id: 152 }, state: ClientData([New, New, New]) }
36655 Sep 22 23:22:15.227 INFO Sent repair work, now wait for resp
36656 Sep 22 23:22:15.227 INFO [0] received reconcile message
36657 Sep 22 23:22:15.227 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(57), op: ExtentClose { repair_id: ReconciliationId(57), extent_id: 152 }, state: ClientData([InProgress, New, New]) }, : downstairs
36658 Sep 22 23:22:15.227 INFO [0] client ExtentClose { repair_id: ReconciliationId(57), extent_id: 152 }
36659 Sep 22 23:22:15.227 INFO [1] received reconcile message
36660 Sep 22 23:22:15.227 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(57), op: ExtentClose { repair_id: ReconciliationId(57), extent_id: 152 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36661 Sep 22 23:22:15.227 INFO [1] client ExtentClose { repair_id: ReconciliationId(57), extent_id: 152 }
36662 Sep 22 23:22:15.227 INFO [2] received reconcile message
36663 Sep 22 23:22:15.227 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(57), op: ExtentClose { repair_id: ReconciliationId(57), extent_id: 152 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36664 Sep 22 23:22:15.227 INFO [2] client ExtentClose { repair_id: ReconciliationId(57), extent_id: 152 }
36665 Sep 22 23:22:15.227 DEBG 57 Close extent 152
36666 Sep 22 23:22:15.227 DEBG IO Read 1075 has deps [JobId(1074)]
36667 Sep 22 23:22:15.227 DEBG IO Flush 1076 has deps [JobId(1075), JobId(1074)]
36668 Sep 22 23:22:15.227 DEBG 57 Close extent 152
36669 Sep 22 23:22:15.228 DEBG [rc] retire 1072 clears [JobId(1071), JobId(1072)], : downstairs
36670 Sep 22 23:22:15.228 DEBG 57 Close extent 152
36671 Sep 22 23:22:15.228 DEBG [2] It's time to notify for 57
36672 Sep 22 23:22:15.228 INFO Completion from [2] id:57 status:true
36673 Sep 22 23:22:15.228 INFO [58/752] Repair commands completed
36674 Sep 22 23:22:15.228 INFO Pop front: ReconcileIO { id: ReconciliationId(58), op: ExtentRepair { repair_id: ReconciliationId(58), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36675 Sep 22 23:22:15.228 INFO Sent repair work, now wait for resp
36676 Sep 22 23:22:15.228 INFO [0] received reconcile message
36677 Sep 22 23:22:15.228 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(58), op: ExtentRepair { repair_id: ReconciliationId(58), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36678 Sep 22 23:22:15.228 INFO [0] client ExtentRepair { repair_id: ReconciliationId(58), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36679 Sep 22 23:22:15.228 INFO [0] Sending repair request ReconciliationId(58)
36680 Sep 22 23:22:15.228 INFO [1] received reconcile message
36681 Sep 22 23:22:15.228 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(58), op: ExtentRepair { repair_id: ReconciliationId(58), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36682 Sep 22 23:22:15.228 INFO [1] client ExtentRepair { repair_id: ReconciliationId(58), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36683 Sep 22 23:22:15.228 INFO [1] No action required ReconciliationId(58)
36684 Sep 22 23:22:15.229 INFO [2] received reconcile message
36685 Sep 22 23:22:15.229 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(58), op: ExtentRepair { repair_id: ReconciliationId(58), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36686 Sep 22 23:22:15.229 INFO [2] client ExtentRepair { repair_id: ReconciliationId(58), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36687 Sep 22 23:22:15.229 INFO [2] No action required ReconciliationId(58)
36688 Sep 22 23:22:15.229 DEBG 58 Repair extent 152 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
36689 Sep 22 23:22:15.229 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/098.copy"
36690 Sep 22 23:22:15.242 WARN returning error on flush!
36691 Sep 22 23:22:15.242 DEBG Flush :1074 extent_limit None deps:[JobId(1073), JobId(1072)] res:false f:26 g:1
36692 Sep 22 23:22:15.242 INFO [lossy] skipping 1074
36693 Sep 22 23:22:15.242 DEBG Flush :1074 extent_limit None deps:[JobId(1073), JobId(1072)] res:true f:26 g:1
36694 Sep 22 23:22:15.242 INFO [lossy] skipping 1075
36695 Sep 22 23:22:15.248 DEBG Read :1075 deps:[JobId(1074)] res:true
36696 Sep 22 23:22:15.270 INFO [lossy] sleeping 1 second
36697 Sep 22 23:22:15.290 INFO accepted connection, remote_addr: 127.0.0.1:36859, local_addr: 127.0.0.1:52864, task: repair
36698 Sep 22 23:22:15.290 TRCE incoming request, uri: /extent/152/files, method: GET, req_id: e23c5b15-f803-483f-a1fb-369149642967, remote_addr: 127.0.0.1:36859, local_addr: 127.0.0.1:52864, task: repair
36699 Sep 22 23:22:15.291 INFO request completed, latency_us: 271, response_code: 200, uri: /extent/152/files, method: GET, req_id: e23c5b15-f803-483f-a1fb-369149642967, remote_addr: 127.0.0.1:36859, local_addr: 127.0.0.1:52864, task: repair
36700 Sep 22 23:22:15.291 INFO eid:152 Found repair files: ["098", "098.db"]
36701 Sep 22 23:22:15.291 TRCE incoming request, uri: /newextent/152/data, method: GET, req_id: 3812f098-8d70-4b13-8b92-a01bf068aa03, remote_addr: 127.0.0.1:36859, local_addr: 127.0.0.1:52864, task: repair
36702 Sep 22 23:22:15.292 INFO request completed, latency_us: 359, response_code: 200, uri: /newextent/152/data, method: GET, req_id: 3812f098-8d70-4b13-8b92-a01bf068aa03, remote_addr: 127.0.0.1:36859, local_addr: 127.0.0.1:52864, task: repair
36703 Sep 22 23:22:15.297 TRCE incoming request, uri: /newextent/152/db, method: GET, req_id: 09656b5a-b661-4cc9-8a44-78a034cb4528, remote_addr: 127.0.0.1:36859, local_addr: 127.0.0.1:52864, task: repair
36704 Sep 22 23:22:15.297 INFO request completed, latency_us: 287, response_code: 200, uri: /newextent/152/db, method: GET, req_id: 09656b5a-b661-4cc9-8a44-78a034cb4528, remote_addr: 127.0.0.1:36859, local_addr: 127.0.0.1:52864, task: repair
36705 Sep 22 23:22:15.298 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/098.copy" to "/tmp/downstairs-zrMnlo6G/00/000/098.replace"
36706 Sep 22 23:22:15.298 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36707 Sep 22 23:22:15.299 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/098.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
36708 Sep 22 23:22:15.299 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/098"
36709 Sep 22 23:22:15.300 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/098.db"
36710 Sep 22 23:22:15.300 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36711 Sep 22 23:22:15.300 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/098.replace" to "/tmp/downstairs-zrMnlo6G/00/000/098.completed"
36712 Sep 22 23:22:15.300 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36713 Sep 22 23:22:15.300 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36714 Sep 22 23:22:15.300 DEBG [0] It's time to notify for 58
36715 Sep 22 23:22:15.300 INFO Completion from [0] id:58 status:true
36716 Sep 22 23:22:15.300 INFO [59/752] Repair commands completed
36717 Sep 22 23:22:15.300 INFO Pop front: ReconcileIO { id: ReconciliationId(59), op: ExtentReopen { repair_id: ReconciliationId(59), extent_id: 152 }, state: ClientData([New, New, New]) }
36718 Sep 22 23:22:15.300 INFO Sent repair work, now wait for resp
36719 Sep 22 23:22:15.300 INFO [0] received reconcile message
36720 Sep 22 23:22:15.300 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(59), op: ExtentReopen { repair_id: ReconciliationId(59), extent_id: 152 }, state: ClientData([InProgress, New, New]) }, : downstairs
36721 Sep 22 23:22:15.300 INFO [0] client ExtentReopen { repair_id: ReconciliationId(59), extent_id: 152 }
36722 Sep 22 23:22:15.300 INFO [1] received reconcile message
36723 Sep 22 23:22:15.300 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(59), op: ExtentReopen { repair_id: ReconciliationId(59), extent_id: 152 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36724 Sep 22 23:22:15.300 INFO [1] client ExtentReopen { repair_id: ReconciliationId(59), extent_id: 152 }
36725 Sep 22 23:22:15.300 INFO [2] received reconcile message
36726 Sep 22 23:22:15.300 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(59), op: ExtentReopen { repair_id: ReconciliationId(59), extent_id: 152 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36727 Sep 22 23:22:15.300 INFO [2] client ExtentReopen { repair_id: ReconciliationId(59), extent_id: 152 }
36728 Sep 22 23:22:15.301 DEBG 59 Reopen extent 152
36729 Sep 22 23:22:15.301 DEBG 59 Reopen extent 152
36730 Sep 22 23:22:15.302 DEBG 59 Reopen extent 152
36731 Sep 22 23:22:15.302 DEBG [2] It's time to notify for 59
36732 Sep 22 23:22:15.302 INFO Completion from [2] id:59 status:true
36733 Sep 22 23:22:15.302 INFO [60/752] Repair commands completed
36734 Sep 22 23:22:15.302 INFO Pop front: ReconcileIO { id: ReconciliationId(60), op: ExtentFlush { repair_id: ReconciliationId(60), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36735 Sep 22 23:22:15.303 INFO Sent repair work, now wait for resp
36736 Sep 22 23:22:15.303 INFO [0] received reconcile message
36737 Sep 22 23:22:15.303 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(60), op: ExtentFlush { repair_id: ReconciliationId(60), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36738 Sep 22 23:22:15.303 INFO [0] client ExtentFlush { repair_id: ReconciliationId(60), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36739 Sep 22 23:22:15.303 INFO [1] received reconcile message
36740 Sep 22 23:22:15.303 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(60), op: ExtentFlush { repair_id: ReconciliationId(60), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36741 Sep 22 23:22:15.303 INFO [1] client ExtentFlush { repair_id: ReconciliationId(60), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36742 Sep 22 23:22:15.303 INFO [2] received reconcile message
36743 Sep 22 23:22:15.303 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(60), op: ExtentFlush { repair_id: ReconciliationId(60), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36744 Sep 22 23:22:15.303 INFO [2] client ExtentFlush { repair_id: ReconciliationId(60), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36745 Sep 22 23:22:15.303 DEBG 60 Flush extent 153 with f:2 g:2
36746 Sep 22 23:22:15.303 DEBG Flush just extent 153 with f:2 and g:2
36747 Sep 22 23:22:15.303 DEBG [1] It's time to notify for 60
36748 Sep 22 23:22:15.303 INFO Completion from [1] id:60 status:true
36749 Sep 22 23:22:15.303 INFO [61/752] Repair commands completed
36750 Sep 22 23:22:15.303 INFO Pop front: ReconcileIO { id: ReconciliationId(61), op: ExtentClose { repair_id: ReconciliationId(61), extent_id: 153 }, state: ClientData([New, New, New]) }
36751 Sep 22 23:22:15.303 INFO Sent repair work, now wait for resp
36752 Sep 22 23:22:15.303 INFO [0] received reconcile message
36753 Sep 22 23:22:15.303 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(61), op: ExtentClose { repair_id: ReconciliationId(61), extent_id: 153 }, state: ClientData([InProgress, New, New]) }, : downstairs
36754 Sep 22 23:22:15.303 INFO [0] client ExtentClose { repair_id: ReconciliationId(61), extent_id: 153 }
36755 Sep 22 23:22:15.303 INFO [1] received reconcile message
36756 Sep 22 23:22:15.303 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(61), op: ExtentClose { repair_id: ReconciliationId(61), extent_id: 153 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36757 Sep 22 23:22:15.303 INFO [1] client ExtentClose { repair_id: ReconciliationId(61), extent_id: 153 }
36758 Sep 22 23:22:15.303 INFO [2] received reconcile message
36759 Sep 22 23:22:15.303 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(61), op: ExtentClose { repair_id: ReconciliationId(61), extent_id: 153 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36760 Sep 22 23:22:15.303 INFO [2] client ExtentClose { repair_id: ReconciliationId(61), extent_id: 153 }
36761 Sep 22 23:22:15.303 DEBG 61 Close extent 153
36762 Sep 22 23:22:15.304 DEBG 61 Close extent 153
36763 Sep 22 23:22:15.304 DEBG 61 Close extent 153
36764 Sep 22 23:22:15.304 DEBG [2] It's time to notify for 61
36765 Sep 22 23:22:15.304 INFO Completion from [2] id:61 status:true
36766 Sep 22 23:22:15.304 INFO [62/752] Repair commands completed
36767 Sep 22 23:22:15.304 INFO Pop front: ReconcileIO { id: ReconciliationId(62), op: ExtentRepair { repair_id: ReconciliationId(62), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36768 Sep 22 23:22:15.304 INFO Sent repair work, now wait for resp
36769 Sep 22 23:22:15.305 INFO [0] received reconcile message
36770 Sep 22 23:22:15.305 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(62), op: ExtentRepair { repair_id: ReconciliationId(62), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36771 Sep 22 23:22:15.305 INFO [0] client ExtentRepair { repair_id: ReconciliationId(62), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36772 Sep 22 23:22:15.305 INFO [0] Sending repair request ReconciliationId(62)
36773 Sep 22 23:22:15.305 INFO [1] received reconcile message
36774 Sep 22 23:22:15.305 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(62), op: ExtentRepair { repair_id: ReconciliationId(62), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36775 Sep 22 23:22:15.305 INFO [1] client ExtentRepair { repair_id: ReconciliationId(62), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36776 Sep 22 23:22:15.305 INFO [1] No action required ReconciliationId(62)
36777 Sep 22 23:22:15.305 INFO [2] received reconcile message
36778 Sep 22 23:22:15.305 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(62), op: ExtentRepair { repair_id: ReconciliationId(62), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36779 Sep 22 23:22:15.305 INFO [2] client ExtentRepair { repair_id: ReconciliationId(62), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36780 Sep 22 23:22:15.305 INFO [2] No action required ReconciliationId(62)
36781 Sep 22 23:22:15.305 DEBG 62 Repair extent 153 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
36782 Sep 22 23:22:15.305 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/099.copy"
36783 Sep 22 23:22:15.367 INFO accepted connection, remote_addr: 127.0.0.1:42918, local_addr: 127.0.0.1:52864, task: repair
36784 Sep 22 23:22:15.368 TRCE incoming request, uri: /extent/153/files, method: GET, req_id: a2534261-e377-44aa-8f56-bd13b383c871, remote_addr: 127.0.0.1:42918, local_addr: 127.0.0.1:52864, task: repair
36785 Sep 22 23:22:15.368 INFO request completed, latency_us: 258, response_code: 200, uri: /extent/153/files, method: GET, req_id: a2534261-e377-44aa-8f56-bd13b383c871, remote_addr: 127.0.0.1:42918, local_addr: 127.0.0.1:52864, task: repair
36786 Sep 22 23:22:15.368 INFO eid:153 Found repair files: ["099", "099.db"]
36787 Sep 22 23:22:15.369 TRCE incoming request, uri: /newextent/153/data, method: GET, req_id: 351ef690-c10c-41d9-9401-c9e825acf2a5, remote_addr: 127.0.0.1:42918, local_addr: 127.0.0.1:52864, task: repair
36788 Sep 22 23:22:15.369 INFO request completed, latency_us: 355, response_code: 200, uri: /newextent/153/data, method: GET, req_id: 351ef690-c10c-41d9-9401-c9e825acf2a5, remote_addr: 127.0.0.1:42918, local_addr: 127.0.0.1:52864, task: repair
36789 Sep 22 23:22:15.374 TRCE incoming request, uri: /newextent/153/db, method: GET, req_id: 3c84bb53-b1c7-4667-9499-442227b1160b, remote_addr: 127.0.0.1:42918, local_addr: 127.0.0.1:52864, task: repair
36790 Sep 22 23:22:15.374 INFO request completed, latency_us: 315, response_code: 200, uri: /newextent/153/db, method: GET, req_id: 3c84bb53-b1c7-4667-9499-442227b1160b, remote_addr: 127.0.0.1:42918, local_addr: 127.0.0.1:52864, task: repair
36791 Sep 22 23:22:15.376 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/099.copy" to "/tmp/downstairs-zrMnlo6G/00/000/099.replace"
36792 Sep 22 23:22:15.376 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36793 Sep 22 23:22:15.377 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/099.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
36794 Sep 22 23:22:15.377 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/099"
36795 Sep 22 23:22:15.377 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/099.db"
36796 Sep 22 23:22:15.377 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36797 Sep 22 23:22:15.377 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/099.replace" to "/tmp/downstairs-zrMnlo6G/00/000/099.completed"
36798 Sep 22 23:22:15.377 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36799 Sep 22 23:22:15.377 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36800 Sep 22 23:22:15.378 DEBG [0] It's time to notify for 62
36801 Sep 22 23:22:15.378 INFO Completion from [0] id:62 status:true
36802 Sep 22 23:22:15.378 INFO [63/752] Repair commands completed
36803 Sep 22 23:22:15.378 INFO Pop front: ReconcileIO { id: ReconciliationId(63), op: ExtentReopen { repair_id: ReconciliationId(63), extent_id: 153 }, state: ClientData([New, New, New]) }
36804 Sep 22 23:22:15.378 INFO Sent repair work, now wait for resp
36805 Sep 22 23:22:15.378 INFO [0] received reconcile message
36806 Sep 22 23:22:15.378 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(63), op: ExtentReopen { repair_id: ReconciliationId(63), extent_id: 153 }, state: ClientData([InProgress, New, New]) }, : downstairs
36807 Sep 22 23:22:15.378 INFO [0] client ExtentReopen { repair_id: ReconciliationId(63), extent_id: 153 }
36808 Sep 22 23:22:15.378 INFO [1] received reconcile message
36809 Sep 22 23:22:15.378 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(63), op: ExtentReopen { repair_id: ReconciliationId(63), extent_id: 153 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36810 Sep 22 23:22:15.378 INFO [1] client ExtentReopen { repair_id: ReconciliationId(63), extent_id: 153 }
36811 Sep 22 23:22:15.378 INFO [2] received reconcile message
36812 Sep 22 23:22:15.378 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(63), op: ExtentReopen { repair_id: ReconciliationId(63), extent_id: 153 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36813 Sep 22 23:22:15.378 INFO [2] client ExtentReopen { repair_id: ReconciliationId(63), extent_id: 153 }
36814 Sep 22 23:22:15.378 DEBG 63 Reopen extent 153
36815 Sep 22 23:22:15.379 DEBG 63 Reopen extent 153
36816 Sep 22 23:22:15.379 DEBG 63 Reopen extent 153
36817 Sep 22 23:22:15.380 DEBG [2] It's time to notify for 63
36818 Sep 22 23:22:15.380 INFO Completion from [2] id:63 status:true
36819 Sep 22 23:22:15.380 INFO [64/752] Repair commands completed
36820 Sep 22 23:22:15.380 INFO Pop front: ReconcileIO { id: ReconciliationId(64), op: ExtentFlush { repair_id: ReconciliationId(64), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36821 Sep 22 23:22:15.380 INFO Sent repair work, now wait for resp
36822 Sep 22 23:22:15.380 INFO [0] received reconcile message
36823 Sep 22 23:22:15.380 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(64), op: ExtentFlush { repair_id: ReconciliationId(64), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36824 Sep 22 23:22:15.380 INFO [0] client ExtentFlush { repair_id: ReconciliationId(64), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36825 Sep 22 23:22:15.380 INFO [1] received reconcile message
36826 Sep 22 23:22:15.380 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(64), op: ExtentFlush { repair_id: ReconciliationId(64), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36827 Sep 22 23:22:15.380 INFO [1] client ExtentFlush { repair_id: ReconciliationId(64), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36828 Sep 22 23:22:15.380 INFO [2] received reconcile message
36829 Sep 22 23:22:15.380 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(64), op: ExtentFlush { repair_id: ReconciliationId(64), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36830 Sep 22 23:22:15.380 INFO [2] client ExtentFlush { repair_id: ReconciliationId(64), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36831 Sep 22 23:22:15.381 DEBG 64 Flush extent 156 with f:2 g:2
36832 Sep 22 23:22:15.381 DEBG Flush just extent 156 with f:2 and g:2
36833 Sep 22 23:22:15.381 DEBG [1] It's time to notify for 64
36834 Sep 22 23:22:15.381 INFO Completion from [1] id:64 status:true
36835 Sep 22 23:22:15.381 INFO [65/752] Repair commands completed
36836 Sep 22 23:22:15.381 INFO Pop front: ReconcileIO { id: ReconciliationId(65), op: ExtentClose { repair_id: ReconciliationId(65), extent_id: 156 }, state: ClientData([New, New, New]) }
36837 Sep 22 23:22:15.381 INFO Sent repair work, now wait for resp
36838 Sep 22 23:22:15.381 INFO [0] received reconcile message
36839 Sep 22 23:22:15.381 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(65), op: ExtentClose { repair_id: ReconciliationId(65), extent_id: 156 }, state: ClientData([InProgress, New, New]) }, : downstairs
36840 Sep 22 23:22:15.381 INFO [0] client ExtentClose { repair_id: ReconciliationId(65), extent_id: 156 }
36841 Sep 22 23:22:15.381 INFO [1] received reconcile message
36842 Sep 22 23:22:15.381 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(65), op: ExtentClose { repair_id: ReconciliationId(65), extent_id: 156 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36843 Sep 22 23:22:15.381 INFO [1] client ExtentClose { repair_id: ReconciliationId(65), extent_id: 156 }
36844 Sep 22 23:22:15.381 INFO [2] received reconcile message
36845 Sep 22 23:22:15.381 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(65), op: ExtentClose { repair_id: ReconciliationId(65), extent_id: 156 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36846 Sep 22 23:22:15.381 INFO [2] client ExtentClose { repair_id: ReconciliationId(65), extent_id: 156 }
36847 Sep 22 23:22:15.381 DEBG 65 Close extent 156
36848 Sep 22 23:22:15.381 DEBG 65 Close extent 156
36849 Sep 22 23:22:15.382 DEBG 65 Close extent 156
36850 Sep 22 23:22:15.382 DEBG [2] It's time to notify for 65
36851 Sep 22 23:22:15.382 INFO Completion from [2] id:65 status:true
36852 Sep 22 23:22:15.382 INFO [66/752] Repair commands completed
36853 Sep 22 23:22:15.382 INFO Pop front: ReconcileIO { id: ReconciliationId(66), op: ExtentRepair { repair_id: ReconciliationId(66), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36854 Sep 22 23:22:15.382 INFO Sent repair work, now wait for resp
36855 Sep 22 23:22:15.382 INFO [0] received reconcile message
36856 Sep 22 23:22:15.382 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(66), op: ExtentRepair { repair_id: ReconciliationId(66), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36857 Sep 22 23:22:15.382 INFO [0] client ExtentRepair { repair_id: ReconciliationId(66), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36858 Sep 22 23:22:15.382 INFO [0] Sending repair request ReconciliationId(66)
36859 Sep 22 23:22:15.382 INFO [1] received reconcile message
36860 Sep 22 23:22:15.382 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(66), op: ExtentRepair { repair_id: ReconciliationId(66), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36861 Sep 22 23:22:15.383 INFO [1] client ExtentRepair { repair_id: ReconciliationId(66), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36862 Sep 22 23:22:15.383 INFO [1] No action required ReconciliationId(66)
36863 Sep 22 23:22:15.383 INFO [2] received reconcile message
36864 Sep 22 23:22:15.383 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(66), op: ExtentRepair { repair_id: ReconciliationId(66), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36865 Sep 22 23:22:15.383 INFO [2] client ExtentRepair { repair_id: ReconciliationId(66), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36866 Sep 22 23:22:15.383 INFO [2] No action required ReconciliationId(66)
36867 Sep 22 23:22:15.383 DEBG 66 Repair extent 156 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
36868 Sep 22 23:22:15.383 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/09C.copy"
36869 Sep 22 23:22:15.444 INFO accepted connection, remote_addr: 127.0.0.1:59169, local_addr: 127.0.0.1:52864, task: repair
36870 Sep 22 23:22:15.445 TRCE incoming request, uri: /extent/156/files, method: GET, req_id: 46731d56-3f76-4508-bc3c-d5f9d0869ff2, remote_addr: 127.0.0.1:59169, local_addr: 127.0.0.1:52864, task: repair
36871 Sep 22 23:22:15.445 INFO request completed, latency_us: 224, response_code: 200, uri: /extent/156/files, method: GET, req_id: 46731d56-3f76-4508-bc3c-d5f9d0869ff2, remote_addr: 127.0.0.1:59169, local_addr: 127.0.0.1:52864, task: repair
36872 Sep 22 23:22:15.445 INFO eid:156 Found repair files: ["09C", "09C.db"]
36873 Sep 22 23:22:15.445 TRCE incoming request, uri: /newextent/156/data, method: GET, req_id: 1449cc2f-b18c-4016-9559-6085307b4d8c, remote_addr: 127.0.0.1:59169, local_addr: 127.0.0.1:52864, task: repair
36874 Sep 22 23:22:15.446 INFO request completed, latency_us: 327, response_code: 200, uri: /newextent/156/data, method: GET, req_id: 1449cc2f-b18c-4016-9559-6085307b4d8c, remote_addr: 127.0.0.1:59169, local_addr: 127.0.0.1:52864, task: repair
36875 Sep 22 23:22:15.450 TRCE incoming request, uri: /newextent/156/db, method: GET, req_id: 8e681b2a-a7b9-4e8f-9583-8241ccec89ec, remote_addr: 127.0.0.1:59169, local_addr: 127.0.0.1:52864, task: repair
36876 Sep 22 23:22:15.451 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/156/db, method: GET, req_id: 8e681b2a-a7b9-4e8f-9583-8241ccec89ec, remote_addr: 127.0.0.1:59169, local_addr: 127.0.0.1:52864, task: repair
36877 Sep 22 23:22:15.452 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/09C.copy" to "/tmp/downstairs-zrMnlo6G/00/000/09C.replace"
36878 Sep 22 23:22:15.452 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36879 Sep 22 23:22:15.453 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/09C.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
36880 Sep 22 23:22:15.453 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/09C"
36881 Sep 22 23:22:15.453 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/09C.db"
36882 Sep 22 23:22:15.453 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36883 Sep 22 23:22:15.453 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/09C.replace" to "/tmp/downstairs-zrMnlo6G/00/000/09C.completed"
36884 Sep 22 23:22:15.453 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36885 Sep 22 23:22:15.453 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36886 Sep 22 23:22:15.453 DEBG [0] It's time to notify for 66
36887 Sep 22 23:22:15.454 INFO Completion from [0] id:66 status:true
36888 Sep 22 23:22:15.454 INFO [67/752] Repair commands completed
36889 Sep 22 23:22:15.454 INFO Pop front: ReconcileIO { id: ReconciliationId(67), op: ExtentReopen { repair_id: ReconciliationId(67), extent_id: 156 }, state: ClientData([New, New, New]) }
36890 Sep 22 23:22:15.454 INFO Sent repair work, now wait for resp
36891 Sep 22 23:22:15.454 INFO [0] received reconcile message
36892 Sep 22 23:22:15.454 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(67), op: ExtentReopen { repair_id: ReconciliationId(67), extent_id: 156 }, state: ClientData([InProgress, New, New]) }, : downstairs
36893 Sep 22 23:22:15.454 INFO [0] client ExtentReopen { repair_id: ReconciliationId(67), extent_id: 156 }
36894 Sep 22 23:22:15.454 INFO [1] received reconcile message
36895 Sep 22 23:22:15.454 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(67), op: ExtentReopen { repair_id: ReconciliationId(67), extent_id: 156 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36896 Sep 22 23:22:15.454 INFO [1] client ExtentReopen { repair_id: ReconciliationId(67), extent_id: 156 }
36897 Sep 22 23:22:15.454 INFO [2] received reconcile message
36898 Sep 22 23:22:15.454 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(67), op: ExtentReopen { repair_id: ReconciliationId(67), extent_id: 156 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36899 Sep 22 23:22:15.454 INFO [2] client ExtentReopen { repair_id: ReconciliationId(67), extent_id: 156 }
36900 Sep 22 23:22:15.454 DEBG 67 Reopen extent 156
36901 Sep 22 23:22:15.455 DEBG 67 Reopen extent 156
36902 Sep 22 23:22:15.455 DEBG 67 Reopen extent 156
36903 Sep 22 23:22:15.456 DEBG [2] It's time to notify for 67
36904 Sep 22 23:22:15.456 INFO Completion from [2] id:67 status:true
36905 Sep 22 23:22:15.456 INFO [68/752] Repair commands completed
36906 Sep 22 23:22:15.456 INFO Pop front: ReconcileIO { id: ReconciliationId(68), op: ExtentFlush { repair_id: ReconciliationId(68), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36907 Sep 22 23:22:15.456 INFO Sent repair work, now wait for resp
36908 Sep 22 23:22:15.456 INFO [0] received reconcile message
36909 Sep 22 23:22:15.456 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(68), op: ExtentFlush { repair_id: ReconciliationId(68), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36910 Sep 22 23:22:15.456 INFO [0] client ExtentFlush { repair_id: ReconciliationId(68), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36911 Sep 22 23:22:15.456 INFO [1] received reconcile message
36912 Sep 22 23:22:15.456 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(68), op: ExtentFlush { repair_id: ReconciliationId(68), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36913 Sep 22 23:22:15.456 INFO [1] client ExtentFlush { repair_id: ReconciliationId(68), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36914 Sep 22 23:22:15.456 INFO [2] received reconcile message
36915 Sep 22 23:22:15.456 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(68), op: ExtentFlush { repair_id: ReconciliationId(68), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36916 Sep 22 23:22:15.456 INFO [2] client ExtentFlush { repair_id: ReconciliationId(68), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36917 Sep 22 23:22:15.456 DEBG 68 Flush extent 125 with f:2 g:2
36918 Sep 22 23:22:15.456 DEBG Flush just extent 125 with f:2 and g:2
36919 Sep 22 23:22:15.456 DEBG [1] It's time to notify for 68
36920 Sep 22 23:22:15.456 INFO Completion from [1] id:68 status:true
36921 Sep 22 23:22:15.457 INFO [69/752] Repair commands completed
36922 Sep 22 23:22:15.457 INFO Pop front: ReconcileIO { id: ReconciliationId(69), op: ExtentClose { repair_id: ReconciliationId(69), extent_id: 125 }, state: ClientData([New, New, New]) }
36923 Sep 22 23:22:15.457 INFO Sent repair work, now wait for resp
36924 Sep 22 23:22:15.457 INFO [0] received reconcile message
36925 Sep 22 23:22:15.457 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(69), op: ExtentClose { repair_id: ReconciliationId(69), extent_id: 125 }, state: ClientData([InProgress, New, New]) }, : downstairs
36926 Sep 22 23:22:15.457 INFO [0] client ExtentClose { repair_id: ReconciliationId(69), extent_id: 125 }
36927 Sep 22 23:22:15.457 INFO [1] received reconcile message
36928 Sep 22 23:22:15.457 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(69), op: ExtentClose { repair_id: ReconciliationId(69), extent_id: 125 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36929 Sep 22 23:22:15.457 INFO [1] client ExtentClose { repair_id: ReconciliationId(69), extent_id: 125 }
36930 Sep 22 23:22:15.457 INFO [2] received reconcile message
36931 Sep 22 23:22:15.457 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(69), op: ExtentClose { repair_id: ReconciliationId(69), extent_id: 125 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36932 Sep 22 23:22:15.457 INFO [2] client ExtentClose { repair_id: ReconciliationId(69), extent_id: 125 }
36933 Sep 22 23:22:15.457 DEBG 69 Close extent 125
36934 Sep 22 23:22:15.457 DEBG 69 Close extent 125
36935 Sep 22 23:22:15.457 DEBG 69 Close extent 125
36936 Sep 22 23:22:15.458 DEBG [2] It's time to notify for 69
36937 Sep 22 23:22:15.458 INFO Completion from [2] id:69 status:true
36938 Sep 22 23:22:15.458 INFO [70/752] Repair commands completed
36939 Sep 22 23:22:15.458 INFO Pop front: ReconcileIO { id: ReconciliationId(70), op: ExtentRepair { repair_id: ReconciliationId(70), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36940 Sep 22 23:22:15.458 INFO Sent repair work, now wait for resp
36941 Sep 22 23:22:15.458 INFO [0] received reconcile message
36942 Sep 22 23:22:15.458 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(70), op: ExtentRepair { repair_id: ReconciliationId(70), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36943 Sep 22 23:22:15.458 INFO [0] client ExtentRepair { repair_id: ReconciliationId(70), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36944 Sep 22 23:22:15.458 INFO [0] Sending repair request ReconciliationId(70)
36945 Sep 22 23:22:15.458 INFO [1] received reconcile message
36946 Sep 22 23:22:15.458 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(70), op: ExtentRepair { repair_id: ReconciliationId(70), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36947 Sep 22 23:22:15.458 INFO [1] client ExtentRepair { repair_id: ReconciliationId(70), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36948 Sep 22 23:22:15.458 INFO [1] No action required ReconciliationId(70)
36949 Sep 22 23:22:15.458 INFO [2] received reconcile message
36950 Sep 22 23:22:15.458 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(70), op: ExtentRepair { repair_id: ReconciliationId(70), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36951 Sep 22 23:22:15.458 INFO [2] client ExtentRepair { repair_id: ReconciliationId(70), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
36952 Sep 22 23:22:15.458 INFO [2] No action required ReconciliationId(70)
36953 Sep 22 23:22:15.458 DEBG 70 Repair extent 125 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
36954 Sep 22 23:22:15.458 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/07D.copy"
36955 Sep 22 23:22:15.524 INFO accepted connection, remote_addr: 127.0.0.1:62531, local_addr: 127.0.0.1:52864, task: repair
36956 Sep 22 23:22:15.524 TRCE incoming request, uri: /extent/125/files, method: GET, req_id: 3df48713-5d1e-49f8-bf89-93e0af94c2a9, remote_addr: 127.0.0.1:62531, local_addr: 127.0.0.1:52864, task: repair
36957 Sep 22 23:22:15.525 INFO request completed, latency_us: 214, response_code: 200, uri: /extent/125/files, method: GET, req_id: 3df48713-5d1e-49f8-bf89-93e0af94c2a9, remote_addr: 127.0.0.1:62531, local_addr: 127.0.0.1:52864, task: repair
36958 Sep 22 23:22:15.525 INFO eid:125 Found repair files: ["07D", "07D.db"]
36959 Sep 22 23:22:15.525 TRCE incoming request, uri: /newextent/125/data, method: GET, req_id: 0317ecf8-f154-4af2-b370-ab8b5b565f14, remote_addr: 127.0.0.1:62531, local_addr: 127.0.0.1:52864, task: repair
36960 Sep 22 23:22:15.525 INFO request completed, latency_us: 331, response_code: 200, uri: /newextent/125/data, method: GET, req_id: 0317ecf8-f154-4af2-b370-ab8b5b565f14, remote_addr: 127.0.0.1:62531, local_addr: 127.0.0.1:52864, task: repair
36961 Sep 22 23:22:15.530 TRCE incoming request, uri: /newextent/125/db, method: GET, req_id: 3f7382de-7458-4d96-80ed-e88314d8e5c4, remote_addr: 127.0.0.1:62531, local_addr: 127.0.0.1:52864, task: repair
36962 Sep 22 23:22:15.530 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/125/db, method: GET, req_id: 3f7382de-7458-4d96-80ed-e88314d8e5c4, remote_addr: 127.0.0.1:62531, local_addr: 127.0.0.1:52864, task: repair
36963 Sep 22 23:22:15.531 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/07D.copy" to "/tmp/downstairs-zrMnlo6G/00/000/07D.replace"
36964 Sep 22 23:22:15.531 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36965 Sep 22 23:22:15.532 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/07D.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
36966 Sep 22 23:22:15.533 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/07D"
36967 Sep 22 23:22:15.533 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/07D.db"
36968 Sep 22 23:22:15.533 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36969 Sep 22 23:22:15.533 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/07D.replace" to "/tmp/downstairs-zrMnlo6G/00/000/07D.completed"
36970 Sep 22 23:22:15.533 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36971 Sep 22 23:22:15.533 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
36972 Sep 22 23:22:15.533 DEBG [0] It's time to notify for 70
36973 Sep 22 23:22:15.533 INFO Completion from [0] id:70 status:true
36974 Sep 22 23:22:15.533 INFO [71/752] Repair commands completed
36975 Sep 22 23:22:15.533 INFO Pop front: ReconcileIO { id: ReconciliationId(71), op: ExtentReopen { repair_id: ReconciliationId(71), extent_id: 125 }, state: ClientData([New, New, New]) }
36976 Sep 22 23:22:15.533 INFO Sent repair work, now wait for resp
36977 Sep 22 23:22:15.533 INFO [0] received reconcile message
36978 Sep 22 23:22:15.533 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(71), op: ExtentReopen { repair_id: ReconciliationId(71), extent_id: 125 }, state: ClientData([InProgress, New, New]) }, : downstairs
36979 Sep 22 23:22:15.533 INFO [0] client ExtentReopen { repair_id: ReconciliationId(71), extent_id: 125 }
36980 Sep 22 23:22:15.533 INFO [1] received reconcile message
36981 Sep 22 23:22:15.533 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(71), op: ExtentReopen { repair_id: ReconciliationId(71), extent_id: 125 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36982 Sep 22 23:22:15.533 INFO [1] client ExtentReopen { repair_id: ReconciliationId(71), extent_id: 125 }
36983 Sep 22 23:22:15.533 INFO [2] received reconcile message
36984 Sep 22 23:22:15.533 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(71), op: ExtentReopen { repair_id: ReconciliationId(71), extent_id: 125 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36985 Sep 22 23:22:15.533 INFO [2] client ExtentReopen { repair_id: ReconciliationId(71), extent_id: 125 }
36986 Sep 22 23:22:15.534 DEBG 71 Reopen extent 125
36987 Sep 22 23:22:15.534 DEBG 71 Reopen extent 125
36988 Sep 22 23:22:15.535 DEBG 71 Reopen extent 125
36989 Sep 22 23:22:15.535 DEBG [2] It's time to notify for 71
36990 Sep 22 23:22:15.535 INFO Completion from [2] id:71 status:true
36991 Sep 22 23:22:15.535 INFO [72/752] Repair commands completed
36992 Sep 22 23:22:15.535 INFO Pop front: ReconcileIO { id: ReconciliationId(72), op: ExtentFlush { repair_id: ReconciliationId(72), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36993 Sep 22 23:22:15.535 INFO Sent repair work, now wait for resp
36994 Sep 22 23:22:15.535 INFO [0] received reconcile message
36995 Sep 22 23:22:15.535 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(72), op: ExtentFlush { repair_id: ReconciliationId(72), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36996 Sep 22 23:22:15.535 INFO [0] client ExtentFlush { repair_id: ReconciliationId(72), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36997 Sep 22 23:22:15.535 INFO [1] received reconcile message
36998 Sep 22 23:22:15.535 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(72), op: ExtentFlush { repair_id: ReconciliationId(72), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36999 Sep 22 23:22:15.535 INFO [1] client ExtentFlush { repair_id: ReconciliationId(72), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37000 Sep 22 23:22:15.535 INFO [2] received reconcile message
37001 Sep 22 23:22:15.536 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(72), op: ExtentFlush { repair_id: ReconciliationId(72), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37002 Sep 22 23:22:15.536 INFO [2] client ExtentFlush { repair_id: ReconciliationId(72), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37003 Sep 22 23:22:15.536 DEBG 72 Flush extent 37 with f:2 g:2
37004 Sep 22 23:22:15.536 DEBG Flush just extent 37 with f:2 and g:2
37005 Sep 22 23:22:15.536 DEBG [1] It's time to notify for 72
37006 Sep 22 23:22:15.536 INFO Completion from [1] id:72 status:true
37007 Sep 22 23:22:15.536 INFO [73/752] Repair commands completed
37008 Sep 22 23:22:15.536 INFO Pop front: ReconcileIO { id: ReconciliationId(73), op: ExtentClose { repair_id: ReconciliationId(73), extent_id: 37 }, state: ClientData([New, New, New]) }
37009 Sep 22 23:22:15.536 INFO Sent repair work, now wait for resp
37010 Sep 22 23:22:15.536 INFO [0] received reconcile message
37011 Sep 22 23:22:15.536 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(73), op: ExtentClose { repair_id: ReconciliationId(73), extent_id: 37 }, state: ClientData([InProgress, New, New]) }, : downstairs
37012 Sep 22 23:22:15.536 INFO [0] client ExtentClose { repair_id: ReconciliationId(73), extent_id: 37 }
37013 Sep 22 23:22:15.536 INFO [1] received reconcile message
37014 Sep 22 23:22:15.536 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(73), op: ExtentClose { repair_id: ReconciliationId(73), extent_id: 37 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37015 Sep 22 23:22:15.536 INFO [1] client ExtentClose { repair_id: ReconciliationId(73), extent_id: 37 }
37016 Sep 22 23:22:15.536 INFO [2] received reconcile message
37017 Sep 22 23:22:15.536 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(73), op: ExtentClose { repair_id: ReconciliationId(73), extent_id: 37 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37018 Sep 22 23:22:15.536 INFO [2] client ExtentClose { repair_id: ReconciliationId(73), extent_id: 37 }
37019 Sep 22 23:22:15.536 DEBG 73 Close extent 37
37020 Sep 22 23:22:15.536 DEBG 73 Close extent 37
37021 Sep 22 23:22:15.537 DEBG 73 Close extent 37
37022 Sep 22 23:22:15.537 DEBG [2] It's time to notify for 73
37023 Sep 22 23:22:15.537 INFO Completion from [2] id:73 status:true
37024 Sep 22 23:22:15.537 INFO [74/752] Repair commands completed
37025 Sep 22 23:22:15.537 INFO Pop front: ReconcileIO { id: ReconciliationId(74), op: ExtentRepair { repair_id: ReconciliationId(74), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37026 Sep 22 23:22:15.537 INFO Sent repair work, now wait for resp
37027 Sep 22 23:22:15.537 INFO [0] received reconcile message
37028 Sep 22 23:22:15.537 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(74), op: ExtentRepair { repair_id: ReconciliationId(74), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37029 Sep 22 23:22:15.537 INFO [0] client ExtentRepair { repair_id: ReconciliationId(74), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37030 Sep 22 23:22:15.537 INFO [0] Sending repair request ReconciliationId(74)
37031 Sep 22 23:22:15.537 INFO [1] received reconcile message
37032 Sep 22 23:22:15.537 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(74), op: ExtentRepair { repair_id: ReconciliationId(74), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37033 Sep 22 23:22:15.538 INFO [1] client ExtentRepair { repair_id: ReconciliationId(74), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37034 Sep 22 23:22:15.538 INFO [1] No action required ReconciliationId(74)
37035 Sep 22 23:22:15.538 INFO [2] received reconcile message
37036 Sep 22 23:22:15.538 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(74), op: ExtentRepair { repair_id: ReconciliationId(74), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37037 Sep 22 23:22:15.538 INFO [2] client ExtentRepair { repair_id: ReconciliationId(74), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37038 Sep 22 23:22:15.538 INFO [2] No action required ReconciliationId(74)
37039 Sep 22 23:22:15.538 DEBG 74 Repair extent 37 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
37040 Sep 22 23:22:15.538 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/025.copy"
37041 Sep 22 23:22:15.600 INFO accepted connection, remote_addr: 127.0.0.1:57514, local_addr: 127.0.0.1:52864, task: repair
37042 Sep 22 23:22:15.600 TRCE incoming request, uri: /extent/37/files, method: GET, req_id: 7af24fd8-4dcb-4498-baed-2b5b2cad8bba, remote_addr: 127.0.0.1:57514, local_addr: 127.0.0.1:52864, task: repair
37043 Sep 22 23:22:15.601 INFO request completed, latency_us: 200, response_code: 200, uri: /extent/37/files, method: GET, req_id: 7af24fd8-4dcb-4498-baed-2b5b2cad8bba, remote_addr: 127.0.0.1:57514, local_addr: 127.0.0.1:52864, task: repair
37044 Sep 22 23:22:15.601 INFO eid:37 Found repair files: ["025", "025.db"]
37045 Sep 22 23:22:15.601 TRCE incoming request, uri: /newextent/37/data, method: GET, req_id: fc418cd1-5c07-4cc5-94c5-22717f8bad8d, remote_addr: 127.0.0.1:57514, local_addr: 127.0.0.1:52864, task: repair
37046 Sep 22 23:22:15.601 INFO request completed, latency_us: 320, response_code: 200, uri: /newextent/37/data, method: GET, req_id: fc418cd1-5c07-4cc5-94c5-22717f8bad8d, remote_addr: 127.0.0.1:57514, local_addr: 127.0.0.1:52864, task: repair
37047 Sep 22 23:22:15.606 TRCE incoming request, uri: /newextent/37/db, method: GET, req_id: 0be68d3e-964d-47a7-a49b-7dbb2ceddaa8, remote_addr: 127.0.0.1:57514, local_addr: 127.0.0.1:52864, task: repair
37048 Sep 22 23:22:15.606 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/37/db, method: GET, req_id: 0be68d3e-964d-47a7-a49b-7dbb2ceddaa8, remote_addr: 127.0.0.1:57514, local_addr: 127.0.0.1:52864, task: repair
37049 Sep 22 23:22:15.607 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/025.copy" to "/tmp/downstairs-zrMnlo6G/00/000/025.replace"
37050 Sep 22 23:22:15.608 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37051 Sep 22 23:22:15.608 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/025.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
37052 Sep 22 23:22:15.609 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/025"
37053 Sep 22 23:22:15.609 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/025.db"
37054 Sep 22 23:22:15.609 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37055 Sep 22 23:22:15.609 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/025.replace" to "/tmp/downstairs-zrMnlo6G/00/000/025.completed"
37056 Sep 22 23:22:15.609 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37057 Sep 22 23:22:15.609 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37058 Sep 22 23:22:15.609 DEBG [0] It's time to notify for 74
37059 Sep 22 23:22:15.609 INFO Completion from [0] id:74 status:true
37060 Sep 22 23:22:15.609 INFO [75/752] Repair commands completed
37061 Sep 22 23:22:15.609 INFO Pop front: ReconcileIO { id: ReconciliationId(75), op: ExtentReopen { repair_id: ReconciliationId(75), extent_id: 37 }, state: ClientData([New, New, New]) }
37062 Sep 22 23:22:15.609 INFO Sent repair work, now wait for resp
37063 Sep 22 23:22:15.609 INFO [0] received reconcile message
37064 Sep 22 23:22:15.609 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(75), op: ExtentReopen { repair_id: ReconciliationId(75), extent_id: 37 }, state: ClientData([InProgress, New, New]) }, : downstairs
37065 Sep 22 23:22:15.609 INFO [0] client ExtentReopen { repair_id: ReconciliationId(75), extent_id: 37 }
37066 Sep 22 23:22:15.609 INFO [1] received reconcile message
37067 Sep 22 23:22:15.609 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(75), op: ExtentReopen { repair_id: ReconciliationId(75), extent_id: 37 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37068 Sep 22 23:22:15.609 INFO [1] client ExtentReopen { repair_id: ReconciliationId(75), extent_id: 37 }
37069 Sep 22 23:22:15.609 INFO [2] received reconcile message
37070 Sep 22 23:22:15.609 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(75), op: ExtentReopen { repair_id: ReconciliationId(75), extent_id: 37 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37071 Sep 22 23:22:15.609 INFO [2] client ExtentReopen { repair_id: ReconciliationId(75), extent_id: 37 }
37072 Sep 22 23:22:15.610 DEBG 75 Reopen extent 37
37073 Sep 22 23:22:15.610 DEBG 75 Reopen extent 37
37074 Sep 22 23:22:15.611 DEBG 75 Reopen extent 37
37075 Sep 22 23:22:15.611 DEBG [2] It's time to notify for 75
37076 Sep 22 23:22:15.611 INFO Completion from [2] id:75 status:true
37077 Sep 22 23:22:15.611 INFO [76/752] Repair commands completed
37078 Sep 22 23:22:15.611 INFO Pop front: ReconcileIO { id: ReconciliationId(76), op: ExtentFlush { repair_id: ReconciliationId(76), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37079 Sep 22 23:22:15.611 INFO Sent repair work, now wait for resp
37080 Sep 22 23:22:15.611 INFO [0] received reconcile message
37081 Sep 22 23:22:15.611 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(76), op: ExtentFlush { repair_id: ReconciliationId(76), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37082 Sep 22 23:22:15.611 INFO [0] client ExtentFlush { repair_id: ReconciliationId(76), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37083 Sep 22 23:22:15.611 INFO [1] received reconcile message
37084 Sep 22 23:22:15.611 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(76), op: ExtentFlush { repair_id: ReconciliationId(76), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37085 Sep 22 23:22:15.611 INFO [1] client ExtentFlush { repair_id: ReconciliationId(76), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37086 Sep 22 23:22:15.612 INFO [2] received reconcile message
37087 Sep 22 23:22:15.612 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(76), op: ExtentFlush { repair_id: ReconciliationId(76), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37088 Sep 22 23:22:15.612 INFO [2] client ExtentFlush { repair_id: ReconciliationId(76), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37089 Sep 22 23:22:15.612 DEBG 76 Flush extent 150 with f:2 g:2
37090 Sep 22 23:22:15.612 DEBG Flush just extent 150 with f:2 and g:2
37091 Sep 22 23:22:15.612 DEBG [1] It's time to notify for 76
37092 Sep 22 23:22:15.612 INFO Completion from [1] id:76 status:true
37093 Sep 22 23:22:15.612 INFO [77/752] Repair commands completed
37094 Sep 22 23:22:15.612 INFO Pop front: ReconcileIO { id: ReconciliationId(77), op: ExtentClose { repair_id: ReconciliationId(77), extent_id: 150 }, state: ClientData([New, New, New]) }
37095 Sep 22 23:22:15.612 INFO Sent repair work, now wait for resp
37096 Sep 22 23:22:15.612 INFO [0] received reconcile message
37097 Sep 22 23:22:15.612 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(77), op: ExtentClose { repair_id: ReconciliationId(77), extent_id: 150 }, state: ClientData([InProgress, New, New]) }, : downstairs
37098 Sep 22 23:22:15.612 INFO [0] client ExtentClose { repair_id: ReconciliationId(77), extent_id: 150 }
37099 Sep 22 23:22:15.612 INFO [1] received reconcile message
37100 Sep 22 23:22:15.612 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(77), op: ExtentClose { repair_id: ReconciliationId(77), extent_id: 150 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37101 Sep 22 23:22:15.612 INFO [1] client ExtentClose { repair_id: ReconciliationId(77), extent_id: 150 }
37102 Sep 22 23:22:15.612 INFO [2] received reconcile message
37103 Sep 22 23:22:15.612 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(77), op: ExtentClose { repair_id: ReconciliationId(77), extent_id: 150 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37104 Sep 22 23:22:15.612 INFO [2] client ExtentClose { repair_id: ReconciliationId(77), extent_id: 150 }
37105 Sep 22 23:22:15.612 DEBG 77 Close extent 150
37106 Sep 22 23:22:15.613 DEBG 77 Close extent 150
37107 Sep 22 23:22:15.613 DEBG 77 Close extent 150
37108 Sep 22 23:22:15.613 DEBG [2] It's time to notify for 77
37109 Sep 22 23:22:15.613 INFO Completion from [2] id:77 status:true
37110 Sep 22 23:22:15.613 INFO [78/752] Repair commands completed
37111 Sep 22 23:22:15.613 INFO Pop front: ReconcileIO { id: ReconciliationId(78), op: ExtentRepair { repair_id: ReconciliationId(78), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37112 Sep 22 23:22:15.613 INFO Sent repair work, now wait for resp
37113 Sep 22 23:22:15.613 INFO [0] received reconcile message
37114 Sep 22 23:22:15.613 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(78), op: ExtentRepair { repair_id: ReconciliationId(78), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37115 Sep 22 23:22:15.613 INFO [0] client ExtentRepair { repair_id: ReconciliationId(78), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37116 Sep 22 23:22:15.613 INFO [0] Sending repair request ReconciliationId(78)
37117 Sep 22 23:22:15.613 INFO [1] received reconcile message
37118 Sep 22 23:22:15.614 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(78), op: ExtentRepair { repair_id: ReconciliationId(78), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37119 Sep 22 23:22:15.614 INFO [1] client ExtentRepair { repair_id: ReconciliationId(78), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37120 Sep 22 23:22:15.614 INFO [1] No action required ReconciliationId(78)
37121 Sep 22 23:22:15.614 INFO [2] received reconcile message
37122 Sep 22 23:22:15.614 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(78), op: ExtentRepair { repair_id: ReconciliationId(78), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37123 Sep 22 23:22:15.614 INFO [2] client ExtentRepair { repair_id: ReconciliationId(78), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37124 Sep 22 23:22:15.614 INFO [2] No action required ReconciliationId(78)
37125 Sep 22 23:22:15.614 DEBG 78 Repair extent 150 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
37126 Sep 22 23:22:15.614 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/096.copy"
37127 Sep 22 23:22:15.678 INFO accepted connection, remote_addr: 127.0.0.1:45268, local_addr: 127.0.0.1:52864, task: repair
37128 Sep 22 23:22:15.678 TRCE incoming request, uri: /extent/150/files, method: GET, req_id: e526fbce-6edb-4294-8c12-ba8212f7fad6, remote_addr: 127.0.0.1:45268, local_addr: 127.0.0.1:52864, task: repair
37129 Sep 22 23:22:15.678 INFO request completed, latency_us: 206, response_code: 200, uri: /extent/150/files, method: GET, req_id: e526fbce-6edb-4294-8c12-ba8212f7fad6, remote_addr: 127.0.0.1:45268, local_addr: 127.0.0.1:52864, task: repair
37130 Sep 22 23:22:15.678 INFO eid:150 Found repair files: ["096", "096.db"]
37131 Sep 22 23:22:15.679 TRCE incoming request, uri: /newextent/150/data, method: GET, req_id: bc0ecba7-668d-47f5-b3f0-02ead4ae64c8, remote_addr: 127.0.0.1:45268, local_addr: 127.0.0.1:52864, task: repair
37132 Sep 22 23:22:15.679 INFO request completed, latency_us: 315, response_code: 200, uri: /newextent/150/data, method: GET, req_id: bc0ecba7-668d-47f5-b3f0-02ead4ae64c8, remote_addr: 127.0.0.1:45268, local_addr: 127.0.0.1:52864, task: repair
37133 Sep 22 23:22:15.684 TRCE incoming request, uri: /newextent/150/db, method: GET, req_id: a38ed99e-fdb0-40d6-af21-1a177c6c6f1d, remote_addr: 127.0.0.1:45268, local_addr: 127.0.0.1:52864, task: repair
37134 Sep 22 23:22:15.684 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/150/db, method: GET, req_id: a38ed99e-fdb0-40d6-af21-1a177c6c6f1d, remote_addr: 127.0.0.1:45268, local_addr: 127.0.0.1:52864, task: repair
37135 Sep 22 23:22:15.685 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/096.copy" to "/tmp/downstairs-zrMnlo6G/00/000/096.replace"
37136 Sep 22 23:22:15.685 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37137 Sep 22 23:22:15.686 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/096.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
37138 Sep 22 23:22:15.686 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/096"
37139 Sep 22 23:22:15.687 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/096.db"
37140 Sep 22 23:22:15.687 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37141 Sep 22 23:22:15.687 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/096.replace" to "/tmp/downstairs-zrMnlo6G/00/000/096.completed"
37142 Sep 22 23:22:15.687 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37143 Sep 22 23:22:15.687 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37144 Sep 22 23:22:15.687 DEBG [0] It's time to notify for 78
37145 Sep 22 23:22:15.687 INFO Completion from [0] id:78 status:true
37146 Sep 22 23:22:15.687 INFO [79/752] Repair commands completed
37147 Sep 22 23:22:15.687 INFO Pop front: ReconcileIO { id: ReconciliationId(79), op: ExtentReopen { repair_id: ReconciliationId(79), extent_id: 150 }, state: ClientData([New, New, New]) }
37148 Sep 22 23:22:15.687 INFO Sent repair work, now wait for resp
37149 Sep 22 23:22:15.687 INFO [0] received reconcile message
37150 Sep 22 23:22:15.687 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(79), op: ExtentReopen { repair_id: ReconciliationId(79), extent_id: 150 }, state: ClientData([InProgress, New, New]) }, : downstairs
37151 Sep 22 23:22:15.687 INFO [0] client ExtentReopen { repair_id: ReconciliationId(79), extent_id: 150 }
37152 Sep 22 23:22:15.687 INFO [1] received reconcile message
37153 Sep 22 23:22:15.687 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(79), op: ExtentReopen { repair_id: ReconciliationId(79), extent_id: 150 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37154 Sep 22 23:22:15.687 INFO [1] client ExtentReopen { repair_id: ReconciliationId(79), extent_id: 150 }
37155 Sep 22 23:22:15.687 INFO [2] received reconcile message
37156 Sep 22 23:22:15.687 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(79), op: ExtentReopen { repair_id: ReconciliationId(79), extent_id: 150 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37157 Sep 22 23:22:15.687 INFO [2] client ExtentReopen { repair_id: ReconciliationId(79), extent_id: 150 }
37158 Sep 22 23:22:15.687 DEBG 79 Reopen extent 150
37159 Sep 22 23:22:15.688 DEBG 79 Reopen extent 150
37160 Sep 22 23:22:15.689 DEBG 79 Reopen extent 150
37161 Sep 22 23:22:15.689 DEBG [2] It's time to notify for 79
37162 Sep 22 23:22:15.689 INFO Completion from [2] id:79 status:true
37163 Sep 22 23:22:15.689 INFO [80/752] Repair commands completed
37164 Sep 22 23:22:15.689 INFO Pop front: ReconcileIO { id: ReconciliationId(80), op: ExtentFlush { repair_id: ReconciliationId(80), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37165 Sep 22 23:22:15.689 INFO Sent repair work, now wait for resp
37166 Sep 22 23:22:15.689 INFO [0] received reconcile message
37167 Sep 22 23:22:15.689 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(80), op: ExtentFlush { repair_id: ReconciliationId(80), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37168 Sep 22 23:22:15.689 INFO [0] client ExtentFlush { repair_id: ReconciliationId(80), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37169 Sep 22 23:22:15.689 INFO [1] received reconcile message
37170 Sep 22 23:22:15.689 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(80), op: ExtentFlush { repair_id: ReconciliationId(80), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37171 Sep 22 23:22:15.689 INFO [1] client ExtentFlush { repair_id: ReconciliationId(80), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37172 Sep 22 23:22:15.689 INFO [2] received reconcile message
37173 Sep 22 23:22:15.689 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(80), op: ExtentFlush { repair_id: ReconciliationId(80), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37174 Sep 22 23:22:15.689 INFO [2] client ExtentFlush { repair_id: ReconciliationId(80), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37175 Sep 22 23:22:15.690 DEBG 80 Flush extent 157 with f:2 g:2
37176 Sep 22 23:22:15.690 DEBG Flush just extent 157 with f:2 and g:2
37177 Sep 22 23:22:15.690 DEBG [1] It's time to notify for 80
37178 Sep 22 23:22:15.690 INFO Completion from [1] id:80 status:true
37179 Sep 22 23:22:15.690 INFO [81/752] Repair commands completed
37180 Sep 22 23:22:15.690 INFO Pop front: ReconcileIO { id: ReconciliationId(81), op: ExtentClose { repair_id: ReconciliationId(81), extent_id: 157 }, state: ClientData([New, New, New]) }
37181 Sep 22 23:22:15.690 INFO Sent repair work, now wait for resp
37182 Sep 22 23:22:15.690 INFO [0] received reconcile message
37183 Sep 22 23:22:15.690 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(81), op: ExtentClose { repair_id: ReconciliationId(81), extent_id: 157 }, state: ClientData([InProgress, New, New]) }, : downstairs
37184 Sep 22 23:22:15.690 INFO [0] client ExtentClose { repair_id: ReconciliationId(81), extent_id: 157 }
37185 Sep 22 23:22:15.690 INFO [1] received reconcile message
37186 Sep 22 23:22:15.690 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(81), op: ExtentClose { repair_id: ReconciliationId(81), extent_id: 157 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37187 Sep 22 23:22:15.690 INFO [1] client ExtentClose { repair_id: ReconciliationId(81), extent_id: 157 }
37188 Sep 22 23:22:15.690 INFO [2] received reconcile message
37189 Sep 22 23:22:15.690 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(81), op: ExtentClose { repair_id: ReconciliationId(81), extent_id: 157 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37190 Sep 22 23:22:15.690 INFO [2] client ExtentClose { repair_id: ReconciliationId(81), extent_id: 157 }
37191 Sep 22 23:22:15.690 DEBG 81 Close extent 157
37192 Sep 22 23:22:15.690 DEBG 81 Close extent 157
37193 Sep 22 23:22:15.691 DEBG 81 Close extent 157
37194 Sep 22 23:22:15.691 DEBG [2] It's time to notify for 81
37195 Sep 22 23:22:15.691 INFO Completion from [2] id:81 status:true
37196 Sep 22 23:22:15.691 INFO [82/752] Repair commands completed
37197 Sep 22 23:22:15.691 INFO Pop front: ReconcileIO { id: ReconciliationId(82), op: ExtentRepair { repair_id: ReconciliationId(82), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37198 Sep 22 23:22:15.691 INFO Sent repair work, now wait for resp
37199 Sep 22 23:22:15.691 INFO [0] received reconcile message
37200 Sep 22 23:22:15.691 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(82), op: ExtentRepair { repair_id: ReconciliationId(82), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37201 Sep 22 23:22:15.691 INFO [0] client ExtentRepair { repair_id: ReconciliationId(82), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37202 Sep 22 23:22:15.691 INFO [0] Sending repair request ReconciliationId(82)
37203 Sep 22 23:22:15.691 INFO [1] received reconcile message
37204 Sep 22 23:22:15.691 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(82), op: ExtentRepair { repair_id: ReconciliationId(82), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37205 Sep 22 23:22:15.691 INFO [1] client ExtentRepair { repair_id: ReconciliationId(82), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37206 Sep 22 23:22:15.691 INFO [1] No action required ReconciliationId(82)
37207 Sep 22 23:22:15.692 INFO [2] received reconcile message
37208 Sep 22 23:22:15.692 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(82), op: ExtentRepair { repair_id: ReconciliationId(82), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37209 Sep 22 23:22:15.692 INFO [2] client ExtentRepair { repair_id: ReconciliationId(82), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37210 Sep 22 23:22:15.692 INFO [2] No action required ReconciliationId(82)
37211 Sep 22 23:22:15.692 DEBG 82 Repair extent 157 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
37212 Sep 22 23:22:15.692 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/09D.copy"
37213 Sep 22 23:22:15.705 ERRO [1] job id 1074 saw error GenericError("test error")
37214 Sep 22 23:22:15.755 INFO accepted connection, remote_addr: 127.0.0.1:64720, local_addr: 127.0.0.1:52864, task: repair
37215 Sep 22 23:22:15.755 TRCE incoming request, uri: /extent/157/files, method: GET, req_id: 8c12b9be-fe98-4557-92f3-e30d68c5feca, remote_addr: 127.0.0.1:64720, local_addr: 127.0.0.1:52864, task: repair
37216 Sep 22 23:22:15.756 INFO request completed, latency_us: 196, response_code: 200, uri: /extent/157/files, method: GET, req_id: 8c12b9be-fe98-4557-92f3-e30d68c5feca, remote_addr: 127.0.0.1:64720, local_addr: 127.0.0.1:52864, task: repair
37217 Sep 22 23:22:15.756 INFO eid:157 Found repair files: ["09D", "09D.db"]
37218 Sep 22 23:22:15.756 TRCE incoming request, uri: /newextent/157/data, method: GET, req_id: 26b5106a-4395-4895-a18c-07c4e90d4627, remote_addr: 127.0.0.1:64720, local_addr: 127.0.0.1:52864, task: repair
37219 Sep 22 23:22:15.756 INFO request completed, latency_us: 321, response_code: 200, uri: /newextent/157/data, method: GET, req_id: 26b5106a-4395-4895-a18c-07c4e90d4627, remote_addr: 127.0.0.1:64720, local_addr: 127.0.0.1:52864, task: repair
37220 Sep 22 23:22:15.761 TRCE incoming request, uri: /newextent/157/db, method: GET, req_id: 9ea01387-2957-4061-b26b-39bfa8708fe1, remote_addr: 127.0.0.1:64720, local_addr: 127.0.0.1:52864, task: repair
37221 Sep 22 23:22:15.761 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/157/db, method: GET, req_id: 9ea01387-2957-4061-b26b-39bfa8708fe1, remote_addr: 127.0.0.1:64720, local_addr: 127.0.0.1:52864, task: repair
37222 Sep 22 23:22:15.762 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/09D.copy" to "/tmp/downstairs-zrMnlo6G/00/000/09D.replace"
37223 Sep 22 23:22:15.762 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37224 Sep 22 23:22:15.763 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/09D.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
37225 Sep 22 23:22:15.763 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/09D"
37226 Sep 22 23:22:15.763 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/09D.db"
37227 Sep 22 23:22:15.763 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37228 Sep 22 23:22:15.763 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/09D.replace" to "/tmp/downstairs-zrMnlo6G/00/000/09D.completed"
37229 Sep 22 23:22:15.763 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37230 Sep 22 23:22:15.764 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37231 Sep 22 23:22:15.764 DEBG [0] It's time to notify for 82
37232 Sep 22 23:22:15.764 INFO Completion from [0] id:82 status:true
37233 Sep 22 23:22:15.764 INFO [83/752] Repair commands completed
37234 Sep 22 23:22:15.764 INFO Pop front: ReconcileIO { id: ReconciliationId(83), op: ExtentReopen { repair_id: ReconciliationId(83), extent_id: 157 }, state: ClientData([New, New, New]) }
37235 Sep 22 23:22:15.764 INFO Sent repair work, now wait for resp
37236 Sep 22 23:22:15.764 INFO [0] received reconcile message
37237 Sep 22 23:22:15.764 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(83), op: ExtentReopen { repair_id: ReconciliationId(83), extent_id: 157 }, state: ClientData([InProgress, New, New]) }, : downstairs
37238 Sep 22 23:22:15.764 INFO [0] client ExtentReopen { repair_id: ReconciliationId(83), extent_id: 157 }
37239 Sep 22 23:22:15.764 INFO [1] received reconcile message
37240 Sep 22 23:22:15.764 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(83), op: ExtentReopen { repair_id: ReconciliationId(83), extent_id: 157 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37241 Sep 22 23:22:15.764 INFO [1] client ExtentReopen { repair_id: ReconciliationId(83), extent_id: 157 }
37242 Sep 22 23:22:15.764 INFO [2] received reconcile message
37243 Sep 22 23:22:15.764 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(83), op: ExtentReopen { repair_id: ReconciliationId(83), extent_id: 157 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37244 Sep 22 23:22:15.764 INFO [2] client ExtentReopen { repair_id: ReconciliationId(83), extent_id: 157 }
37245 Sep 22 23:22:15.764 DEBG 83 Reopen extent 157
37246 Sep 22 23:22:15.765 DEBG 83 Reopen extent 157
37247 Sep 22 23:22:15.765 DEBG 83 Reopen extent 157
37248 Sep 22 23:22:15.766 DEBG [2] It's time to notify for 83
37249 Sep 22 23:22:15.766 INFO Completion from [2] id:83 status:true
37250 Sep 22 23:22:15.766 INFO [84/752] Repair commands completed
37251 Sep 22 23:22:15.766 INFO Pop front: ReconcileIO { id: ReconciliationId(84), op: ExtentFlush { repair_id: ReconciliationId(84), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37252 Sep 22 23:22:15.766 INFO Sent repair work, now wait for resp
37253 Sep 22 23:22:15.766 INFO [0] received reconcile message
37254 Sep 22 23:22:15.766 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(84), op: ExtentFlush { repair_id: ReconciliationId(84), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37255 Sep 22 23:22:15.766 INFO [0] client ExtentFlush { repair_id: ReconciliationId(84), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37256 Sep 22 23:22:15.766 INFO [1] received reconcile message
37257 Sep 22 23:22:15.766 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(84), op: ExtentFlush { repair_id: ReconciliationId(84), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37258 Sep 22 23:22:15.766 INFO [1] client ExtentFlush { repair_id: ReconciliationId(84), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37259 Sep 22 23:22:15.766 INFO [2] received reconcile message
37260 Sep 22 23:22:15.766 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(84), op: ExtentFlush { repair_id: ReconciliationId(84), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37261 Sep 22 23:22:15.766 INFO [2] client ExtentFlush { repair_id: ReconciliationId(84), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37262 Sep 22 23:22:15.766 DEBG 84 Flush extent 89 with f:2 g:2
37263 Sep 22 23:22:15.766 DEBG Flush just extent 89 with f:2 and g:2
37264 Sep 22 23:22:15.767 DEBG [1] It's time to notify for 84
37265 Sep 22 23:22:15.767 INFO Completion from [1] id:84 status:true
37266 Sep 22 23:22:15.767 INFO [85/752] Repair commands completed
37267 Sep 22 23:22:15.767 INFO Pop front: ReconcileIO { id: ReconciliationId(85), op: ExtentClose { repair_id: ReconciliationId(85), extent_id: 89 }, state: ClientData([New, New, New]) }
37268 Sep 22 23:22:15.767 INFO Sent repair work, now wait for resp
37269 Sep 22 23:22:15.767 INFO [0] received reconcile message
37270 Sep 22 23:22:15.767 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(85), op: ExtentClose { repair_id: ReconciliationId(85), extent_id: 89 }, state: ClientData([InProgress, New, New]) }, : downstairs
37271 Sep 22 23:22:15.767 INFO [0] client ExtentClose { repair_id: ReconciliationId(85), extent_id: 89 }
37272 Sep 22 23:22:15.767 INFO [1] received reconcile message
37273 Sep 22 23:22:15.767 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(85), op: ExtentClose { repair_id: ReconciliationId(85), extent_id: 89 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37274 Sep 22 23:22:15.767 INFO [1] client ExtentClose { repair_id: ReconciliationId(85), extent_id: 89 }
37275 Sep 22 23:22:15.767 INFO [2] received reconcile message
37276 Sep 22 23:22:15.767 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(85), op: ExtentClose { repair_id: ReconciliationId(85), extent_id: 89 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37277 Sep 22 23:22:15.767 INFO [2] client ExtentClose { repair_id: ReconciliationId(85), extent_id: 89 }
37278 Sep 22 23:22:15.767 DEBG 85 Close extent 89
37279 Sep 22 23:22:15.767 DEBG 85 Close extent 89
37280 Sep 22 23:22:15.768 DEBG 85 Close extent 89
37281 Sep 22 23:22:15.768 DEBG [2] It's time to notify for 85
37282 Sep 22 23:22:15.768 INFO Completion from [2] id:85 status:true
37283 Sep 22 23:22:15.768 INFO [86/752] Repair commands completed
37284 Sep 22 23:22:15.768 INFO Pop front: ReconcileIO { id: ReconciliationId(86), op: ExtentRepair { repair_id: ReconciliationId(86), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37285 Sep 22 23:22:15.768 INFO Sent repair work, now wait for resp
37286 Sep 22 23:22:15.768 INFO [0] received reconcile message
37287 Sep 22 23:22:15.768 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(86), op: ExtentRepair { repair_id: ReconciliationId(86), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37288 Sep 22 23:22:15.768 INFO [0] client ExtentRepair { repair_id: ReconciliationId(86), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37289 Sep 22 23:22:15.768 INFO [0] Sending repair request ReconciliationId(86)
37290 Sep 22 23:22:15.768 INFO [1] received reconcile message
37291 Sep 22 23:22:15.768 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(86), op: ExtentRepair { repair_id: ReconciliationId(86), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37292 Sep 22 23:22:15.768 INFO [1] client ExtentRepair { repair_id: ReconciliationId(86), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37293 Sep 22 23:22:15.768 INFO [1] No action required ReconciliationId(86)
37294 Sep 22 23:22:15.768 INFO [2] received reconcile message
37295 Sep 22 23:22:15.768 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(86), op: ExtentRepair { repair_id: ReconciliationId(86), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37296 Sep 22 23:22:15.768 INFO [2] client ExtentRepair { repair_id: ReconciliationId(86), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37297 Sep 22 23:22:15.768 INFO [2] No action required ReconciliationId(86)
37298 Sep 22 23:22:15.768 DEBG 86 Repair extent 89 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
37299 Sep 22 23:22:15.769 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/059.copy"
37300 Sep 22 23:22:15.832 INFO accepted connection, remote_addr: 127.0.0.1:35193, local_addr: 127.0.0.1:52864, task: repair
37301 Sep 22 23:22:15.832 TRCE incoming request, uri: /extent/89/files, method: GET, req_id: 53b35606-20fb-47ef-9b2d-0ce135f7e118, remote_addr: 127.0.0.1:35193, local_addr: 127.0.0.1:52864, task: repair
37302 Sep 22 23:22:15.832 INFO request completed, latency_us: 199, response_code: 200, uri: /extent/89/files, method: GET, req_id: 53b35606-20fb-47ef-9b2d-0ce135f7e118, remote_addr: 127.0.0.1:35193, local_addr: 127.0.0.1:52864, task: repair
37303 Sep 22 23:22:15.833 INFO eid:89 Found repair files: ["059", "059.db"]
37304 Sep 22 23:22:15.833 TRCE incoming request, uri: /newextent/89/data, method: GET, req_id: 7145ee05-1083-42cb-847d-7f05a954c766, remote_addr: 127.0.0.1:35193, local_addr: 127.0.0.1:52864, task: repair
37305 Sep 22 23:22:15.833 INFO request completed, latency_us: 261, response_code: 200, uri: /newextent/89/data, method: GET, req_id: 7145ee05-1083-42cb-847d-7f05a954c766, remote_addr: 127.0.0.1:35193, local_addr: 127.0.0.1:52864, task: repair
37306 Sep 22 23:22:15.838 TRCE incoming request, uri: /newextent/89/db, method: GET, req_id: 85225e57-66b7-429d-b744-19e112df8e3a, remote_addr: 127.0.0.1:35193, local_addr: 127.0.0.1:52864, task: repair
37307 Sep 22 23:22:15.838 INFO request completed, latency_us: 307, response_code: 200, uri: /newextent/89/db, method: GET, req_id: 85225e57-66b7-429d-b744-19e112df8e3a, remote_addr: 127.0.0.1:35193, local_addr: 127.0.0.1:52864, task: repair
37308 Sep 22 23:22:15.839 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/059.copy" to "/tmp/downstairs-zrMnlo6G/00/000/059.replace"
37309 Sep 22 23:22:15.839 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37310 Sep 22 23:22:15.840 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/059.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
37311 Sep 22 23:22:15.840 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/059"
37312 Sep 22 23:22:15.841 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/059.db"
37313 Sep 22 23:22:15.841 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37314 Sep 22 23:22:15.841 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/059.replace" to "/tmp/downstairs-zrMnlo6G/00/000/059.completed"
37315 Sep 22 23:22:15.841 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37316 Sep 22 23:22:15.841 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37317 Sep 22 23:22:15.841 DEBG [0] It's time to notify for 86
37318 Sep 22 23:22:15.841 INFO Completion from [0] id:86 status:true
37319 Sep 22 23:22:15.841 INFO [87/752] Repair commands completed
37320 Sep 22 23:22:15.841 INFO Pop front: ReconcileIO { id: ReconciliationId(87), op: ExtentReopen { repair_id: ReconciliationId(87), extent_id: 89 }, state: ClientData([New, New, New]) }
37321 Sep 22 23:22:15.841 INFO Sent repair work, now wait for resp
37322 Sep 22 23:22:15.841 INFO [0] received reconcile message
37323 Sep 22 23:22:15.841 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(87), op: ExtentReopen { repair_id: ReconciliationId(87), extent_id: 89 }, state: ClientData([InProgress, New, New]) }, : downstairs
37324 Sep 22 23:22:15.841 INFO [0] client ExtentReopen { repair_id: ReconciliationId(87), extent_id: 89 }
37325 Sep 22 23:22:15.841 INFO [1] received reconcile message
37326 Sep 22 23:22:15.841 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(87), op: ExtentReopen { repair_id: ReconciliationId(87), extent_id: 89 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37327 Sep 22 23:22:15.841 INFO [1] client ExtentReopen { repair_id: ReconciliationId(87), extent_id: 89 }
37328 Sep 22 23:22:15.841 INFO [2] received reconcile message
37329 Sep 22 23:22:15.841 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(87), op: ExtentReopen { repair_id: ReconciliationId(87), extent_id: 89 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37330 Sep 22 23:22:15.841 INFO [2] client ExtentReopen { repair_id: ReconciliationId(87), extent_id: 89 }
37331 Sep 22 23:22:15.842 DEBG 87 Reopen extent 89
37332 Sep 22 23:22:15.842 DEBG 87 Reopen extent 89
37333 Sep 22 23:22:15.843 DEBG 87 Reopen extent 89
37334 Sep 22 23:22:15.843 DEBG [2] It's time to notify for 87
37335 Sep 22 23:22:15.843 INFO Completion from [2] id:87 status:true
37336 Sep 22 23:22:15.843 INFO [88/752] Repair commands completed
37337 Sep 22 23:22:15.843 INFO Pop front: ReconcileIO { id: ReconciliationId(88), op: ExtentFlush { repair_id: ReconciliationId(88), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37338 Sep 22 23:22:15.843 INFO Sent repair work, now wait for resp
37339 Sep 22 23:22:15.843 INFO [0] received reconcile message
37340 Sep 22 23:22:15.843 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(88), op: ExtentFlush { repair_id: ReconciliationId(88), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37341 Sep 22 23:22:15.843 INFO [0] client ExtentFlush { repair_id: ReconciliationId(88), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37342 Sep 22 23:22:15.843 INFO [1] received reconcile message
37343 Sep 22 23:22:15.843 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(88), op: ExtentFlush { repair_id: ReconciliationId(88), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37344 Sep 22 23:22:15.843 INFO [1] client ExtentFlush { repair_id: ReconciliationId(88), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37345 Sep 22 23:22:15.843 INFO [2] received reconcile message
37346 Sep 22 23:22:15.843 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(88), op: ExtentFlush { repair_id: ReconciliationId(88), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37347 Sep 22 23:22:15.844 INFO [2] client ExtentFlush { repair_id: ReconciliationId(88), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37348 Sep 22 23:22:15.844 DEBG 88 Flush extent 163 with f:2 g:2
37349 Sep 22 23:22:15.844 DEBG Flush just extent 163 with f:2 and g:2
37350 Sep 22 23:22:15.844 DEBG [1] It's time to notify for 88
37351 Sep 22 23:22:15.844 INFO Completion from [1] id:88 status:true
37352 Sep 22 23:22:15.844 INFO [89/752] Repair commands completed
37353 Sep 22 23:22:15.844 INFO Pop front: ReconcileIO { id: ReconciliationId(89), op: ExtentClose { repair_id: ReconciliationId(89), extent_id: 163 }, state: ClientData([New, New, New]) }
37354 Sep 22 23:22:15.844 INFO Sent repair work, now wait for resp
37355 Sep 22 23:22:15.844 INFO [0] received reconcile message
37356 Sep 22 23:22:15.844 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(89), op: ExtentClose { repair_id: ReconciliationId(89), extent_id: 163 }, state: ClientData([InProgress, New, New]) }, : downstairs
37357 Sep 22 23:22:15.844 INFO [0] client ExtentClose { repair_id: ReconciliationId(89), extent_id: 163 }
37358 Sep 22 23:22:15.844 INFO [1] received reconcile message
37359 Sep 22 23:22:15.844 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(89), op: ExtentClose { repair_id: ReconciliationId(89), extent_id: 163 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37360 Sep 22 23:22:15.844 INFO [1] client ExtentClose { repair_id: ReconciliationId(89), extent_id: 163 }
37361 Sep 22 23:22:15.844 INFO [2] received reconcile message
37362 Sep 22 23:22:15.844 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(89), op: ExtentClose { repair_id: ReconciliationId(89), extent_id: 163 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37363 Sep 22 23:22:15.844 INFO [2] client ExtentClose { repair_id: ReconciliationId(89), extent_id: 163 }
37364 Sep 22 23:22:15.844 DEBG 89 Close extent 163
37365 Sep 22 23:22:15.844 DEBG 89 Close extent 163
37366 Sep 22 23:22:15.845 DEBG 89 Close extent 163
37367 Sep 22 23:22:15.845 DEBG [2] It's time to notify for 89
37368 Sep 22 23:22:15.845 INFO Completion from [2] id:89 status:true
37369 Sep 22 23:22:15.845 INFO [90/752] Repair commands completed
37370 Sep 22 23:22:15.845 INFO Pop front: ReconcileIO { id: ReconciliationId(90), op: ExtentRepair { repair_id: ReconciliationId(90), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37371 Sep 22 23:22:15.845 INFO Sent repair work, now wait for resp
37372 Sep 22 23:22:15.845 INFO [0] received reconcile message
37373 Sep 22 23:22:15.845 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(90), op: ExtentRepair { repair_id: ReconciliationId(90), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37374 Sep 22 23:22:15.845 INFO [0] client ExtentRepair { repair_id: ReconciliationId(90), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37375 Sep 22 23:22:15.845 INFO [0] Sending repair request ReconciliationId(90)
37376 Sep 22 23:22:15.845 INFO [1] received reconcile message
37377 Sep 22 23:22:15.845 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(90), op: ExtentRepair { repair_id: ReconciliationId(90), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37378 Sep 22 23:22:15.845 INFO [1] client ExtentRepair { repair_id: ReconciliationId(90), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37379 Sep 22 23:22:15.845 INFO [1] No action required ReconciliationId(90)
37380 Sep 22 23:22:15.846 INFO [2] received reconcile message
37381 Sep 22 23:22:15.846 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(90), op: ExtentRepair { repair_id: ReconciliationId(90), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37382 Sep 22 23:22:15.846 INFO [2] client ExtentRepair { repair_id: ReconciliationId(90), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37383 Sep 22 23:22:15.846 INFO [2] No action required ReconciliationId(90)
37384 Sep 22 23:22:15.846 DEBG 90 Repair extent 163 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
37385 Sep 22 23:22:15.846 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0A3.copy"
37386 Sep 22 23:22:15.911 INFO accepted connection, remote_addr: 127.0.0.1:41776, local_addr: 127.0.0.1:52864, task: repair
37387 Sep 22 23:22:15.911 TRCE incoming request, uri: /extent/163/files, method: GET, req_id: 4b8df42a-63fb-4714-b9b9-63311dfe1784, remote_addr: 127.0.0.1:41776, local_addr: 127.0.0.1:52864, task: repair
37388 Sep 22 23:22:15.911 INFO request completed, latency_us: 252, response_code: 200, uri: /extent/163/files, method: GET, req_id: 4b8df42a-63fb-4714-b9b9-63311dfe1784, remote_addr: 127.0.0.1:41776, local_addr: 127.0.0.1:52864, task: repair
37389 Sep 22 23:22:15.912 INFO eid:163 Found repair files: ["0A3", "0A3.db"]
37390 Sep 22 23:22:15.912 TRCE incoming request, uri: /newextent/163/data, method: GET, req_id: b21bef45-b759-4fbe-94ac-c94f7aa6227f, remote_addr: 127.0.0.1:41776, local_addr: 127.0.0.1:52864, task: repair
37391 Sep 22 23:22:15.912 INFO request completed, latency_us: 273, response_code: 200, uri: /newextent/163/data, method: GET, req_id: b21bef45-b759-4fbe-94ac-c94f7aa6227f, remote_addr: 127.0.0.1:41776, local_addr: 127.0.0.1:52864, task: repair
37392 Sep 22 23:22:15.917 TRCE incoming request, uri: /newextent/163/db, method: GET, req_id: fd32cda4-b468-4212-b722-9f9da9789063, remote_addr: 127.0.0.1:41776, local_addr: 127.0.0.1:52864, task: repair
37393 Sep 22 23:22:15.917 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/163/db, method: GET, req_id: fd32cda4-b468-4212-b722-9f9da9789063, remote_addr: 127.0.0.1:41776, local_addr: 127.0.0.1:52864, task: repair
37394 Sep 22 23:22:15.918 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0A3.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0A3.replace"
37395 Sep 22 23:22:15.918 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37396 Sep 22 23:22:15.919 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0A3.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
37397 Sep 22 23:22:15.919 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A3"
37398 Sep 22 23:22:15.920 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A3.db"
37399 Sep 22 23:22:15.920 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37400 Sep 22 23:22:15.920 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0A3.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0A3.completed"
37401 Sep 22 23:22:15.920 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37402 Sep 22 23:22:15.920 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37403 Sep 22 23:22:15.920 DEBG [0] It's time to notify for 90
37404 Sep 22 23:22:15.920 INFO Completion from [0] id:90 status:true
37405 Sep 22 23:22:15.920 INFO [91/752] Repair commands completed
37406 Sep 22 23:22:15.920 INFO Pop front: ReconcileIO { id: ReconciliationId(91), op: ExtentReopen { repair_id: ReconciliationId(91), extent_id: 163 }, state: ClientData([New, New, New]) }
37407 Sep 22 23:22:15.920 INFO Sent repair work, now wait for resp
37408 Sep 22 23:22:15.920 INFO [0] received reconcile message
37409 Sep 22 23:22:15.920 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(91), op: ExtentReopen { repair_id: ReconciliationId(91), extent_id: 163 }, state: ClientData([InProgress, New, New]) }, : downstairs
37410 Sep 22 23:22:15.920 INFO [0] client ExtentReopen { repair_id: ReconciliationId(91), extent_id: 163 }
37411 Sep 22 23:22:15.920 INFO [1] received reconcile message
37412 Sep 22 23:22:15.920 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(91), op: ExtentReopen { repair_id: ReconciliationId(91), extent_id: 163 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37413 Sep 22 23:22:15.920 INFO [1] client ExtentReopen { repair_id: ReconciliationId(91), extent_id: 163 }
37414 Sep 22 23:22:15.920 INFO [2] received reconcile message
37415 Sep 22 23:22:15.920 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(91), op: ExtentReopen { repair_id: ReconciliationId(91), extent_id: 163 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37416 Sep 22 23:22:15.920 INFO [2] client ExtentReopen { repair_id: ReconciliationId(91), extent_id: 163 }
37417 Sep 22 23:22:15.921 DEBG 91 Reopen extent 163
37418 Sep 22 23:22:15.921 DEBG 91 Reopen extent 163
37419 Sep 22 23:22:15.922 DEBG 91 Reopen extent 163
37420 Sep 22 23:22:15.922 DEBG [2] It's time to notify for 91
37421 Sep 22 23:22:15.922 INFO Completion from [2] id:91 status:true
37422 Sep 22 23:22:15.922 INFO [92/752] Repair commands completed
37423 Sep 22 23:22:15.922 INFO Pop front: ReconcileIO { id: ReconciliationId(92), op: ExtentFlush { repair_id: ReconciliationId(92), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37424 Sep 22 23:22:15.922 INFO Sent repair work, now wait for resp
37425 Sep 22 23:22:15.923 INFO [0] received reconcile message
37426 Sep 22 23:22:15.923 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(92), op: ExtentFlush { repair_id: ReconciliationId(92), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37427 Sep 22 23:22:15.923 INFO [0] client ExtentFlush { repair_id: ReconciliationId(92), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37428 Sep 22 23:22:15.923 INFO [1] received reconcile message
37429 Sep 22 23:22:15.923 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(92), op: ExtentFlush { repair_id: ReconciliationId(92), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37430 Sep 22 23:22:15.923 INFO [1] client ExtentFlush { repair_id: ReconciliationId(92), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37431 Sep 22 23:22:15.923 INFO [2] received reconcile message
37432 Sep 22 23:22:15.923 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(92), op: ExtentFlush { repair_id: ReconciliationId(92), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37433 Sep 22 23:22:15.923 INFO [2] client ExtentFlush { repair_id: ReconciliationId(92), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37434 Sep 22 23:22:15.923 DEBG 92 Flush extent 73 with f:2 g:2
37435 Sep 22 23:22:15.923 DEBG Flush just extent 73 with f:2 and g:2
37436 Sep 22 23:22:15.923 DEBG [1] It's time to notify for 92
37437 Sep 22 23:22:15.923 INFO Completion from [1] id:92 status:true
37438 Sep 22 23:22:15.923 INFO [93/752] Repair commands completed
37439 Sep 22 23:22:15.923 INFO Pop front: ReconcileIO { id: ReconciliationId(93), op: ExtentClose { repair_id: ReconciliationId(93), extent_id: 73 }, state: ClientData([New, New, New]) }
37440 Sep 22 23:22:15.923 INFO Sent repair work, now wait for resp
37441 Sep 22 23:22:15.923 INFO [0] received reconcile message
37442 Sep 22 23:22:15.923 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(93), op: ExtentClose { repair_id: ReconciliationId(93), extent_id: 73 }, state: ClientData([InProgress, New, New]) }, : downstairs
37443 Sep 22 23:22:15.923 INFO [0] client ExtentClose { repair_id: ReconciliationId(93), extent_id: 73 }
37444 Sep 22 23:22:15.923 INFO [1] received reconcile message
37445 Sep 22 23:22:15.923 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(93), op: ExtentClose { repair_id: ReconciliationId(93), extent_id: 73 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37446 Sep 22 23:22:15.923 INFO [1] client ExtentClose { repair_id: ReconciliationId(93), extent_id: 73 }
37447 Sep 22 23:22:15.923 INFO [2] received reconcile message
37448 Sep 22 23:22:15.923 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(93), op: ExtentClose { repair_id: ReconciliationId(93), extent_id: 73 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37449 Sep 22 23:22:15.923 INFO [2] client ExtentClose { repair_id: ReconciliationId(93), extent_id: 73 }
37450 Sep 22 23:22:15.923 DEBG 93 Close extent 73
37451 Sep 22 23:22:15.924 DEBG 93 Close extent 73
37452 Sep 22 23:22:15.924 DEBG 93 Close extent 73
37453 Sep 22 23:22:15.924 DEBG [2] It's time to notify for 93
37454 Sep 22 23:22:15.924 INFO Completion from [2] id:93 status:true
37455 Sep 22 23:22:15.924 INFO [94/752] Repair commands completed
37456 Sep 22 23:22:15.924 INFO Pop front: ReconcileIO { id: ReconciliationId(94), op: ExtentRepair { repair_id: ReconciliationId(94), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37457 Sep 22 23:22:15.925 INFO Sent repair work, now wait for resp
37458 Sep 22 23:22:15.925 INFO [0] received reconcile message
37459 Sep 22 23:22:15.925 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(94), op: ExtentRepair { repair_id: ReconciliationId(94), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37460 Sep 22 23:22:15.925 INFO [0] client ExtentRepair { repair_id: ReconciliationId(94), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37461 Sep 22 23:22:15.925 INFO [0] Sending repair request ReconciliationId(94)
37462 Sep 22 23:22:15.925 INFO [1] received reconcile message
37463 Sep 22 23:22:15.925 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(94), op: ExtentRepair { repair_id: ReconciliationId(94), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37464 Sep 22 23:22:15.925 INFO [1] client ExtentRepair { repair_id: ReconciliationId(94), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37465 Sep 22 23:22:15.925 INFO [1] No action required ReconciliationId(94)
37466 Sep 22 23:22:15.925 INFO [2] received reconcile message
37467 Sep 22 23:22:15.925 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(94), op: ExtentRepair { repair_id: ReconciliationId(94), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37468 Sep 22 23:22:15.925 INFO [2] client ExtentRepair { repair_id: ReconciliationId(94), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37469 Sep 22 23:22:15.925 INFO [2] No action required ReconciliationId(94)
37470 Sep 22 23:22:15.925 DEBG 94 Repair extent 73 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
37471 Sep 22 23:22:15.925 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/049.copy"
37472 Sep 22 23:22:15.988 INFO accepted connection, remote_addr: 127.0.0.1:62296, local_addr: 127.0.0.1:52864, task: repair
37473 Sep 22 23:22:15.989 TRCE incoming request, uri: /extent/73/files, method: GET, req_id: 8b2b40d1-7e8f-4c9f-ab22-ac6c68c226a6, remote_addr: 127.0.0.1:62296, local_addr: 127.0.0.1:52864, task: repair
37474 Sep 22 23:22:15.989 INFO request completed, latency_us: 207, response_code: 200, uri: /extent/73/files, method: GET, req_id: 8b2b40d1-7e8f-4c9f-ab22-ac6c68c226a6, remote_addr: 127.0.0.1:62296, local_addr: 127.0.0.1:52864, task: repair
37475 Sep 22 23:22:15.989 INFO eid:73 Found repair files: ["049", "049.db"]
37476 Sep 22 23:22:15.989 TRCE incoming request, uri: /newextent/73/data, method: GET, req_id: eaef0f02-7fa8-4ef7-b9e6-312d13eb4d7d, remote_addr: 127.0.0.1:62296, local_addr: 127.0.0.1:52864, task: repair
37477 Sep 22 23:22:15.990 INFO request completed, latency_us: 307, response_code: 200, uri: /newextent/73/data, method: GET, req_id: eaef0f02-7fa8-4ef7-b9e6-312d13eb4d7d, remote_addr: 127.0.0.1:62296, local_addr: 127.0.0.1:52864, task: repair
37478 Sep 22 23:22:15.994 TRCE incoming request, uri: /newextent/73/db, method: GET, req_id: 3457bfc3-78ad-49c3-b233-c0783c537d4f, remote_addr: 127.0.0.1:62296, local_addr: 127.0.0.1:52864, task: repair
37479 Sep 22 23:22:15.994 INFO request completed, latency_us: 297, response_code: 200, uri: /newextent/73/db, method: GET, req_id: 3457bfc3-78ad-49c3-b233-c0783c537d4f, remote_addr: 127.0.0.1:62296, local_addr: 127.0.0.1:52864, task: repair
37480 Sep 22 23:22:15.996 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/049.copy" to "/tmp/downstairs-zrMnlo6G/00/000/049.replace"
37481 Sep 22 23:22:15.996 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37482 Sep 22 23:22:15.996 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/049.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
37483 Sep 22 23:22:15.997 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/049"
37484 Sep 22 23:22:15.997 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/049.db"
37485 Sep 22 23:22:15.997 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37486 Sep 22 23:22:15.997 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/049.replace" to "/tmp/downstairs-zrMnlo6G/00/000/049.completed"
37487 Sep 22 23:22:15.997 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37488 Sep 22 23:22:15.997 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37489 Sep 22 23:22:15.997 DEBG [0] It's time to notify for 94
37490 Sep 22 23:22:15.997 INFO Completion from [0] id:94 status:true
37491 Sep 22 23:22:15.997 INFO [95/752] Repair commands completed
37492 Sep 22 23:22:15.997 INFO Pop front: ReconcileIO { id: ReconciliationId(95), op: ExtentReopen { repair_id: ReconciliationId(95), extent_id: 73 }, state: ClientData([New, New, New]) }
37493 Sep 22 23:22:15.997 INFO Sent repair work, now wait for resp
37494 Sep 22 23:22:15.997 INFO [0] received reconcile message
37495 Sep 22 23:22:15.997 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(95), op: ExtentReopen { repair_id: ReconciliationId(95), extent_id: 73 }, state: ClientData([InProgress, New, New]) }, : downstairs
37496 Sep 22 23:22:15.997 INFO [0] client ExtentReopen { repair_id: ReconciliationId(95), extent_id: 73 }
37497 Sep 22 23:22:15.997 INFO [1] received reconcile message
37498 Sep 22 23:22:15.997 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(95), op: ExtentReopen { repair_id: ReconciliationId(95), extent_id: 73 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37499 Sep 22 23:22:15.997 INFO [1] client ExtentReopen { repair_id: ReconciliationId(95), extent_id: 73 }
37500 Sep 22 23:22:15.997 INFO [2] received reconcile message
37501 Sep 22 23:22:15.998 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(95), op: ExtentReopen { repair_id: ReconciliationId(95), extent_id: 73 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37502 Sep 22 23:22:15.998 INFO [2] client ExtentReopen { repair_id: ReconciliationId(95), extent_id: 73 }
37503 Sep 22 23:22:15.998 DEBG 95 Reopen extent 73
37504 Sep 22 23:22:15.998 DEBG 95 Reopen extent 73
37505 Sep 22 23:22:15.999 DEBG 95 Reopen extent 73
37506 Sep 22 23:22:15.999 DEBG [2] It's time to notify for 95
37507 Sep 22 23:22:15.999 INFO Completion from [2] id:95 status:true
37508 Sep 22 23:22:15.999 INFO [96/752] Repair commands completed
37509 Sep 22 23:22:15.999 INFO Pop front: ReconcileIO { id: ReconciliationId(96), op: ExtentFlush { repair_id: ReconciliationId(96), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37510 Sep 22 23:22:15.999 INFO Sent repair work, now wait for resp
37511 Sep 22 23:22:16.000 INFO [0] received reconcile message
37512 Sep 22 23:22:16.000 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(96), op: ExtentFlush { repair_id: ReconciliationId(96), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37513 Sep 22 23:22:16.000 INFO [0] client ExtentFlush { repair_id: ReconciliationId(96), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37514 Sep 22 23:22:16.000 INFO [1] received reconcile message
37515 Sep 22 23:22:16.000 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(96), op: ExtentFlush { repair_id: ReconciliationId(96), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37516 Sep 22 23:22:16.000 INFO [1] client ExtentFlush { repair_id: ReconciliationId(96), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37517 Sep 22 23:22:16.000 INFO [2] received reconcile message
37518 Sep 22 23:22:16.000 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(96), op: ExtentFlush { repair_id: ReconciliationId(96), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37519 Sep 22 23:22:16.000 INFO [2] client ExtentFlush { repair_id: ReconciliationId(96), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37520 Sep 22 23:22:16.000 DEBG 96 Flush extent 168 with f:2 g:2
37521 Sep 22 23:22:16.000 DEBG Flush just extent 168 with f:2 and g:2
37522 Sep 22 23:22:16.000 DEBG [1] It's time to notify for 96
37523 Sep 22 23:22:16.000 INFO Completion from [1] id:96 status:true
37524 Sep 22 23:22:16.000 INFO [97/752] Repair commands completed
37525 Sep 22 23:22:16.000 INFO Pop front: ReconcileIO { id: ReconciliationId(97), op: ExtentClose { repair_id: ReconciliationId(97), extent_id: 168 }, state: ClientData([New, New, New]) }
37526 Sep 22 23:22:16.000 INFO Sent repair work, now wait for resp
37527 Sep 22 23:22:16.000 INFO [0] received reconcile message
37528 Sep 22 23:22:16.000 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(97), op: ExtentClose { repair_id: ReconciliationId(97), extent_id: 168 }, state: ClientData([InProgress, New, New]) }, : downstairs
37529 Sep 22 23:22:16.000 INFO [0] client ExtentClose { repair_id: ReconciliationId(97), extent_id: 168 }
37530 Sep 22 23:22:16.000 INFO [1] received reconcile message
37531 Sep 22 23:22:16.000 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(97), op: ExtentClose { repair_id: ReconciliationId(97), extent_id: 168 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37532 Sep 22 23:22:16.000 INFO [1] client ExtentClose { repair_id: ReconciliationId(97), extent_id: 168 }
37533 Sep 22 23:22:16.000 INFO [2] received reconcile message
37534 Sep 22 23:22:16.000 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(97), op: ExtentClose { repair_id: ReconciliationId(97), extent_id: 168 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37535 Sep 22 23:22:16.000 INFO [2] client ExtentClose { repair_id: ReconciliationId(97), extent_id: 168 }
37536 Sep 22 23:22:16.000 DEBG 97 Close extent 168
37537 Sep 22 23:22:16.001 DEBG 97 Close extent 168
37538 Sep 22 23:22:16.001 DEBG 97 Close extent 168
37539 Sep 22 23:22:16.001 DEBG [2] It's time to notify for 97
37540 Sep 22 23:22:16.001 INFO Completion from [2] id:97 status:true
37541 Sep 22 23:22:16.001 INFO [98/752] Repair commands completed
37542 Sep 22 23:22:16.001 INFO Pop front: ReconcileIO { id: ReconciliationId(98), op: ExtentRepair { repair_id: ReconciliationId(98), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37543 Sep 22 23:22:16.002 INFO Sent repair work, now wait for resp
37544 Sep 22 23:22:16.002 INFO [0] received reconcile message
37545 Sep 22 23:22:16.002 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(98), op: ExtentRepair { repair_id: ReconciliationId(98), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37546 Sep 22 23:22:16.002 INFO [0] client ExtentRepair { repair_id: ReconciliationId(98), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37547 Sep 22 23:22:16.002 INFO [0] Sending repair request ReconciliationId(98)
37548 Sep 22 23:22:16.002 INFO [1] received reconcile message
37549 Sep 22 23:22:16.002 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(98), op: ExtentRepair { repair_id: ReconciliationId(98), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37550 Sep 22 23:22:16.002 INFO [1] client ExtentRepair { repair_id: ReconciliationId(98), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37551 Sep 22 23:22:16.002 INFO [1] No action required ReconciliationId(98)
37552 Sep 22 23:22:16.002 INFO [2] received reconcile message
37553 Sep 22 23:22:16.002 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(98), op: ExtentRepair { repair_id: ReconciliationId(98), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37554 Sep 22 23:22:16.002 INFO [2] client ExtentRepair { repair_id: ReconciliationId(98), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37555 Sep 22 23:22:16.002 INFO [2] No action required ReconciliationId(98)
37556 Sep 22 23:22:16.002 DEBG 98 Repair extent 168 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
37557 Sep 22 23:22:16.002 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0A8.copy"
37558 Sep 22 23:22:16.040 DEBG up_ds_listen was notified
37559 Sep 22 23:22:16.040 DEBG up_ds_listen process 1074
37560 Sep 22 23:22:16.040 DEBG [A] ack job 1074:75, : downstairs
37561 Sep 22 23:22:16.040 DEBG up_ds_listen checked 1 jobs, back to waiting
37562 Sep 22 23:22:16.046 DEBG Read :1075 deps:[JobId(1074)] res:true
37563 Sep 22 23:22:16.066 INFO accepted connection, remote_addr: 127.0.0.1:52304, local_addr: 127.0.0.1:52864, task: repair
37564 Sep 22 23:22:16.067 TRCE incoming request, uri: /extent/168/files, method: GET, req_id: a13f5a05-99f7-44db-a5d3-d2371a25888f, remote_addr: 127.0.0.1:52304, local_addr: 127.0.0.1:52864, task: repair
37565 Sep 22 23:22:16.067 INFO request completed, latency_us: 246, response_code: 200, uri: /extent/168/files, method: GET, req_id: a13f5a05-99f7-44db-a5d3-d2371a25888f, remote_addr: 127.0.0.1:52304, local_addr: 127.0.0.1:52864, task: repair
37566 Sep 22 23:22:16.067 INFO eid:168 Found repair files: ["0A8", "0A8.db"]
37567 Sep 22 23:22:16.068 TRCE incoming request, uri: /newextent/168/data, method: GET, req_id: dae32142-aefa-43dd-8c1a-52908cab0f3c, remote_addr: 127.0.0.1:52304, local_addr: 127.0.0.1:52864, task: repair
37568 Sep 22 23:22:16.068 INFO request completed, latency_us: 336, response_code: 200, uri: /newextent/168/data, method: GET, req_id: dae32142-aefa-43dd-8c1a-52908cab0f3c, remote_addr: 127.0.0.1:52304, local_addr: 127.0.0.1:52864, task: repair
37569 Sep 22 23:22:16.070 INFO [lossy] skipping 1076
37570 Sep 22 23:22:16.070 DEBG Flush :1076 extent_limit None deps:[JobId(1075), JobId(1074)] res:true f:27 g:1
37571 Sep 22 23:22:16.070 INFO [lossy] sleeping 1 second
37572 Sep 22 23:22:16.073 TRCE incoming request, uri: /newextent/168/db, method: GET, req_id: 58bd353f-e1d4-48be-9153-a4df2ce2e901, remote_addr: 127.0.0.1:52304, local_addr: 127.0.0.1:52864, task: repair
37573 Sep 22 23:22:16.073 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/168/db, method: GET, req_id: 58bd353f-e1d4-48be-9153-a4df2ce2e901, remote_addr: 127.0.0.1:52304, local_addr: 127.0.0.1:52864, task: repair
37574 Sep 22 23:22:16.074 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0A8.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0A8.replace"
37575 Sep 22 23:22:16.074 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37576 Sep 22 23:22:16.075 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0A8.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
37577 Sep 22 23:22:16.075 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A8"
37578 Sep 22 23:22:16.075 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A8.db"
37579 Sep 22 23:22:16.075 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37580 Sep 22 23:22:16.075 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0A8.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0A8.completed"
37581 Sep 22 23:22:16.075 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37582 Sep 22 23:22:16.075 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37583 Sep 22 23:22:16.076 DEBG [0] It's time to notify for 98
37584 Sep 22 23:22:16.076 INFO Completion from [0] id:98 status:true
37585 Sep 22 23:22:16.076 INFO [99/752] Repair commands completed
37586 Sep 22 23:22:16.076 INFO Pop front: ReconcileIO { id: ReconciliationId(99), op: ExtentReopen { repair_id: ReconciliationId(99), extent_id: 168 }, state: ClientData([New, New, New]) }
37587 Sep 22 23:22:16.076 INFO Sent repair work, now wait for resp
37588 Sep 22 23:22:16.076 INFO [0] received reconcile message
37589 Sep 22 23:22:16.076 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(99), op: ExtentReopen { repair_id: ReconciliationId(99), extent_id: 168 }, state: ClientData([InProgress, New, New]) }, : downstairs
37590 Sep 22 23:22:16.076 INFO [0] client ExtentReopen { repair_id: ReconciliationId(99), extent_id: 168 }
37591 Sep 22 23:22:16.076 INFO [1] received reconcile message
37592 Sep 22 23:22:16.076 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(99), op: ExtentReopen { repair_id: ReconciliationId(99), extent_id: 168 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37593 Sep 22 23:22:16.076 INFO [1] client ExtentReopen { repair_id: ReconciliationId(99), extent_id: 168 }
37594 Sep 22 23:22:16.076 INFO [2] received reconcile message
37595 Sep 22 23:22:16.076 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(99), op: ExtentReopen { repair_id: ReconciliationId(99), extent_id: 168 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37596 Sep 22 23:22:16.076 INFO [2] client ExtentReopen { repair_id: ReconciliationId(99), extent_id: 168 }
37597 Sep 22 23:22:16.076 DEBG 99 Reopen extent 168
37598 Sep 22 23:22:16.077 DEBG 99 Reopen extent 168
37599 Sep 22 23:22:16.077 DEBG 99 Reopen extent 168
37600 Sep 22 23:22:16.078 DEBG [2] It's time to notify for 99
37601 Sep 22 23:22:16.078 INFO Completion from [2] id:99 status:true
37602 Sep 22 23:22:16.078 INFO [100/752] Repair commands completed
37603 Sep 22 23:22:16.078 INFO Pop front: ReconcileIO { id: ReconciliationId(100), op: ExtentFlush { repair_id: ReconciliationId(100), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37604 Sep 22 23:22:16.078 INFO Sent repair work, now wait for resp
37605 Sep 22 23:22:16.078 INFO [0] received reconcile message
37606 Sep 22 23:22:16.078 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(100), op: ExtentFlush { repair_id: ReconciliationId(100), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37607 Sep 22 23:22:16.078 INFO [0] client ExtentFlush { repair_id: ReconciliationId(100), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37608 Sep 22 23:22:16.078 INFO [1] received reconcile message
37609 Sep 22 23:22:16.078 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(100), op: ExtentFlush { repair_id: ReconciliationId(100), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37610 Sep 22 23:22:16.078 INFO [1] client ExtentFlush { repair_id: ReconciliationId(100), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37611 Sep 22 23:22:16.078 INFO [2] received reconcile message
37612 Sep 22 23:22:16.078 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(100), op: ExtentFlush { repair_id: ReconciliationId(100), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37613 Sep 22 23:22:16.078 INFO [2] client ExtentFlush { repair_id: ReconciliationId(100), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37614 Sep 22 23:22:16.078 DEBG 100 Flush extent 176 with f:2 g:2
37615 Sep 22 23:22:16.078 DEBG Flush just extent 176 with f:2 and g:2
37616 Sep 22 23:22:16.079 DEBG [1] It's time to notify for 100
37617 Sep 22 23:22:16.079 INFO Completion from [1] id:100 status:true
37618 Sep 22 23:22:16.079 INFO [101/752] Repair commands completed
37619 Sep 22 23:22:16.079 INFO Pop front: ReconcileIO { id: ReconciliationId(101), op: ExtentClose { repair_id: ReconciliationId(101), extent_id: 176 }, state: ClientData([New, New, New]) }
37620 Sep 22 23:22:16.079 INFO Sent repair work, now wait for resp
37621 Sep 22 23:22:16.079 INFO [0] received reconcile message
37622 Sep 22 23:22:16.079 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(101), op: ExtentClose { repair_id: ReconciliationId(101), extent_id: 176 }, state: ClientData([InProgress, New, New]) }, : downstairs
37623 Sep 22 23:22:16.079 INFO [0] client ExtentClose { repair_id: ReconciliationId(101), extent_id: 176 }
37624 Sep 22 23:22:16.079 INFO [1] received reconcile message
37625 Sep 22 23:22:16.079 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(101), op: ExtentClose { repair_id: ReconciliationId(101), extent_id: 176 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37626 Sep 22 23:22:16.079 INFO [1] client ExtentClose { repair_id: ReconciliationId(101), extent_id: 176 }
37627 Sep 22 23:22:16.079 INFO [2] received reconcile message
37628 Sep 22 23:22:16.079 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(101), op: ExtentClose { repair_id: ReconciliationId(101), extent_id: 176 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37629 Sep 22 23:22:16.079 INFO [2] client ExtentClose { repair_id: ReconciliationId(101), extent_id: 176 }
37630 Sep 22 23:22:16.079 DEBG 101 Close extent 176
37631 Sep 22 23:22:16.079 DEBG 101 Close extent 176
37632 Sep 22 23:22:16.080 DEBG 101 Close extent 176
37633 Sep 22 23:22:16.080 DEBG [2] It's time to notify for 101
37634 Sep 22 23:22:16.080 INFO Completion from [2] id:101 status:true
37635 Sep 22 23:22:16.080 INFO [102/752] Repair commands completed
37636 Sep 22 23:22:16.080 INFO Pop front: ReconcileIO { id: ReconciliationId(102), op: ExtentRepair { repair_id: ReconciliationId(102), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37637 Sep 22 23:22:16.080 INFO Sent repair work, now wait for resp
37638 Sep 22 23:22:16.080 INFO [0] received reconcile message
37639 Sep 22 23:22:16.080 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(102), op: ExtentRepair { repair_id: ReconciliationId(102), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37640 Sep 22 23:22:16.080 INFO [0] client ExtentRepair { repair_id: ReconciliationId(102), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37641 Sep 22 23:22:16.080 INFO [0] Sending repair request ReconciliationId(102)
37642 Sep 22 23:22:16.080 INFO [1] received reconcile message
37643 Sep 22 23:22:16.080 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(102), op: ExtentRepair { repair_id: ReconciliationId(102), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37644 Sep 22 23:22:16.080 INFO [1] client ExtentRepair { repair_id: ReconciliationId(102), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37645 Sep 22 23:22:16.080 INFO [1] No action required ReconciliationId(102)
37646 Sep 22 23:22:16.080 INFO [2] received reconcile message
37647 Sep 22 23:22:16.080 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(102), op: ExtentRepair { repair_id: ReconciliationId(102), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37648 Sep 22 23:22:16.080 INFO [2] client ExtentRepair { repair_id: ReconciliationId(102), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37649 Sep 22 23:22:16.080 INFO [2] No action required ReconciliationId(102)
37650 Sep 22 23:22:16.081 DEBG 102 Repair extent 176 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
37651 Sep 22 23:22:16.081 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0B0.copy"
37652 Sep 22 23:22:16.142 INFO accepted connection, remote_addr: 127.0.0.1:54002, local_addr: 127.0.0.1:52864, task: repair
37653 Sep 22 23:22:16.143 TRCE incoming request, uri: /extent/176/files, method: GET, req_id: 2feacb58-ca0f-4033-8e89-e42d03ba85cc, remote_addr: 127.0.0.1:54002, local_addr: 127.0.0.1:52864, task: repair
37654 Sep 22 23:22:16.143 INFO request completed, latency_us: 208, response_code: 200, uri: /extent/176/files, method: GET, req_id: 2feacb58-ca0f-4033-8e89-e42d03ba85cc, remote_addr: 127.0.0.1:54002, local_addr: 127.0.0.1:52864, task: repair
37655 Sep 22 23:22:16.143 INFO eid:176 Found repair files: ["0B0", "0B0.db"]
37656 Sep 22 23:22:16.143 TRCE incoming request, uri: /newextent/176/data, method: GET, req_id: 6e17321f-43a4-4a58-8395-2966a1f3fe7c, remote_addr: 127.0.0.1:54002, local_addr: 127.0.0.1:52864, task: repair
37657 Sep 22 23:22:16.144 INFO request completed, latency_us: 314, response_code: 200, uri: /newextent/176/data, method: GET, req_id: 6e17321f-43a4-4a58-8395-2966a1f3fe7c, remote_addr: 127.0.0.1:54002, local_addr: 127.0.0.1:52864, task: repair
37658 Sep 22 23:22:16.148 TRCE incoming request, uri: /newextent/176/db, method: GET, req_id: 3b1f43a9-cbad-408b-9dcd-62926c7ce22a, remote_addr: 127.0.0.1:54002, local_addr: 127.0.0.1:52864, task: repair
37659 Sep 22 23:22:16.149 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/176/db, method: GET, req_id: 3b1f43a9-cbad-408b-9dcd-62926c7ce22a, remote_addr: 127.0.0.1:54002, local_addr: 127.0.0.1:52864, task: repair
37660 Sep 22 23:22:16.150 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0B0.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0B0.replace"
37661 Sep 22 23:22:16.150 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37662 Sep 22 23:22:16.151 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0B0.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
37663 Sep 22 23:22:16.151 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B0"
37664 Sep 22 23:22:16.151 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B0.db"
37665 Sep 22 23:22:16.151 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37666 Sep 22 23:22:16.151 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0B0.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0B0.completed"
37667 Sep 22 23:22:16.151 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37668 Sep 22 23:22:16.151 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37669 Sep 22 23:22:16.151 DEBG [0] It's time to notify for 102
37670 Sep 22 23:22:16.151 INFO Completion from [0] id:102 status:true
37671 Sep 22 23:22:16.151 INFO [103/752] Repair commands completed
37672 Sep 22 23:22:16.152 INFO Pop front: ReconcileIO { id: ReconciliationId(103), op: ExtentReopen { repair_id: ReconciliationId(103), extent_id: 176 }, state: ClientData([New, New, New]) }
37673 Sep 22 23:22:16.152 INFO Sent repair work, now wait for resp
37674 Sep 22 23:22:16.152 INFO [0] received reconcile message
37675 Sep 22 23:22:16.152 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(103), op: ExtentReopen { repair_id: ReconciliationId(103), extent_id: 176 }, state: ClientData([InProgress, New, New]) }, : downstairs
37676 Sep 22 23:22:16.152 INFO [0] client ExtentReopen { repair_id: ReconciliationId(103), extent_id: 176 }
37677 Sep 22 23:22:16.152 INFO [1] received reconcile message
37678 Sep 22 23:22:16.152 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(103), op: ExtentReopen { repair_id: ReconciliationId(103), extent_id: 176 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37679 Sep 22 23:22:16.152 INFO [1] client ExtentReopen { repair_id: ReconciliationId(103), extent_id: 176 }
37680 Sep 22 23:22:16.152 INFO [2] received reconcile message
37681 Sep 22 23:22:16.152 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(103), op: ExtentReopen { repair_id: ReconciliationId(103), extent_id: 176 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37682 Sep 22 23:22:16.152 INFO [2] client ExtentReopen { repair_id: ReconciliationId(103), extent_id: 176 }
37683 Sep 22 23:22:16.152 DEBG 103 Reopen extent 176
37684 Sep 22 23:22:16.152 DEBG 103 Reopen extent 176
37685 Sep 22 23:22:16.153 DEBG 103 Reopen extent 176
37686 Sep 22 23:22:16.154 DEBG [2] It's time to notify for 103
37687 Sep 22 23:22:16.154 INFO Completion from [2] id:103 status:true
37688 Sep 22 23:22:16.154 INFO [104/752] Repair commands completed
37689 Sep 22 23:22:16.154 INFO Pop front: ReconcileIO { id: ReconciliationId(104), op: ExtentFlush { repair_id: ReconciliationId(104), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37690 Sep 22 23:22:16.154 INFO Sent repair work, now wait for resp
37691 Sep 22 23:22:16.154 INFO [0] received reconcile message
37692 Sep 22 23:22:16.154 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(104), op: ExtentFlush { repair_id: ReconciliationId(104), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37693 Sep 22 23:22:16.154 INFO [0] client ExtentFlush { repair_id: ReconciliationId(104), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37694 Sep 22 23:22:16.154 INFO [1] received reconcile message
37695 Sep 22 23:22:16.154 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(104), op: ExtentFlush { repair_id: ReconciliationId(104), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37696 Sep 22 23:22:16.154 INFO [1] client ExtentFlush { repair_id: ReconciliationId(104), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37697 Sep 22 23:22:16.154 INFO [2] received reconcile message
37698 Sep 22 23:22:16.154 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(104), op: ExtentFlush { repair_id: ReconciliationId(104), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37699 Sep 22 23:22:16.154 INFO [2] client ExtentFlush { repair_id: ReconciliationId(104), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37700 Sep 22 23:22:16.154 DEBG 104 Flush extent 38 with f:2 g:2
37701 Sep 22 23:22:16.154 DEBG Flush just extent 38 with f:2 and g:2
37702 Sep 22 23:22:16.154 DEBG [1] It's time to notify for 104
37703 Sep 22 23:22:16.154 INFO Completion from [1] id:104 status:true
37704 Sep 22 23:22:16.154 INFO [105/752] Repair commands completed
37705 Sep 22 23:22:16.154 INFO Pop front: ReconcileIO { id: ReconciliationId(105), op: ExtentClose { repair_id: ReconciliationId(105), extent_id: 38 }, state: ClientData([New, New, New]) }
37706 Sep 22 23:22:16.154 INFO Sent repair work, now wait for resp
37707 Sep 22 23:22:16.154 INFO [0] received reconcile message
37708 Sep 22 23:22:16.154 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(105), op: ExtentClose { repair_id: ReconciliationId(105), extent_id: 38 }, state: ClientData([InProgress, New, New]) }, : downstairs
37709 Sep 22 23:22:16.154 INFO [0] client ExtentClose { repair_id: ReconciliationId(105), extent_id: 38 }
37710 Sep 22 23:22:16.154 INFO [1] received reconcile message
37711 Sep 22 23:22:16.154 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(105), op: ExtentClose { repair_id: ReconciliationId(105), extent_id: 38 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37712 Sep 22 23:22:16.154 INFO [1] client ExtentClose { repair_id: ReconciliationId(105), extent_id: 38 }
37713 Sep 22 23:22:16.154 INFO [2] received reconcile message
37714 Sep 22 23:22:16.154 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(105), op: ExtentClose { repair_id: ReconciliationId(105), extent_id: 38 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37715 Sep 22 23:22:16.154 INFO [2] client ExtentClose { repair_id: ReconciliationId(105), extent_id: 38 }
37716 Sep 22 23:22:16.155 DEBG 105 Close extent 38
37717 Sep 22 23:22:16.155 DEBG 105 Close extent 38
37718 Sep 22 23:22:16.155 DEBG 105 Close extent 38
37719 Sep 22 23:22:16.156 DEBG [2] It's time to notify for 105
37720 Sep 22 23:22:16.156 INFO Completion from [2] id:105 status:true
37721 Sep 22 23:22:16.156 INFO [106/752] Repair commands completed
37722 Sep 22 23:22:16.156 INFO Pop front: ReconcileIO { id: ReconciliationId(106), op: ExtentRepair { repair_id: ReconciliationId(106), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37723 Sep 22 23:22:16.156 INFO Sent repair work, now wait for resp
37724 Sep 22 23:22:16.156 INFO [0] received reconcile message
37725 Sep 22 23:22:16.156 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(106), op: ExtentRepair { repair_id: ReconciliationId(106), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37726 Sep 22 23:22:16.156 INFO [0] client ExtentRepair { repair_id: ReconciliationId(106), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37727 Sep 22 23:22:16.156 INFO [0] Sending repair request ReconciliationId(106)
37728 Sep 22 23:22:16.156 INFO [1] received reconcile message
37729 Sep 22 23:22:16.156 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(106), op: ExtentRepair { repair_id: ReconciliationId(106), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37730 Sep 22 23:22:16.156 INFO [1] client ExtentRepair { repair_id: ReconciliationId(106), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37731 Sep 22 23:22:16.156 INFO [1] No action required ReconciliationId(106)
37732 Sep 22 23:22:16.156 INFO [2] received reconcile message
37733 Sep 22 23:22:16.156 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(106), op: ExtentRepair { repair_id: ReconciliationId(106), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37734 Sep 22 23:22:16.156 INFO [2] client ExtentRepair { repair_id: ReconciliationId(106), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37735 Sep 22 23:22:16.156 INFO [2] No action required ReconciliationId(106)
37736 Sep 22 23:22:16.156 DEBG 106 Repair extent 38 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
37737 Sep 22 23:22:16.156 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/026.copy"
37738 Sep 22 23:22:16.221 INFO accepted connection, remote_addr: 127.0.0.1:61720, local_addr: 127.0.0.1:52864, task: repair
37739 Sep 22 23:22:16.221 TRCE incoming request, uri: /extent/38/files, method: GET, req_id: 59e76f2f-61af-4568-8c2e-9fa6edfe72fc, remote_addr: 127.0.0.1:61720, local_addr: 127.0.0.1:52864, task: repair
37740 Sep 22 23:22:16.222 INFO request completed, latency_us: 200, response_code: 200, uri: /extent/38/files, method: GET, req_id: 59e76f2f-61af-4568-8c2e-9fa6edfe72fc, remote_addr: 127.0.0.1:61720, local_addr: 127.0.0.1:52864, task: repair
37741 Sep 22 23:22:16.222 INFO eid:38 Found repair files: ["026", "026.db"]
37742 Sep 22 23:22:16.222 TRCE incoming request, uri: /newextent/38/data, method: GET, req_id: a8b2aab0-e0e8-414f-b5b1-87dec613115c, remote_addr: 127.0.0.1:61720, local_addr: 127.0.0.1:52864, task: repair
37743 Sep 22 23:22:16.223 INFO request completed, latency_us: 320, response_code: 200, uri: /newextent/38/data, method: GET, req_id: a8b2aab0-e0e8-414f-b5b1-87dec613115c, remote_addr: 127.0.0.1:61720, local_addr: 127.0.0.1:52864, task: repair
37744 Sep 22 23:22:16.227 TRCE incoming request, uri: /newextent/38/db, method: GET, req_id: 0ff20c32-2cb7-4e48-9222-59492c0130ac, remote_addr: 127.0.0.1:61720, local_addr: 127.0.0.1:52864, task: repair
37745 Sep 22 23:22:16.228 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/38/db, method: GET, req_id: 0ff20c32-2cb7-4e48-9222-59492c0130ac, remote_addr: 127.0.0.1:61720, local_addr: 127.0.0.1:52864, task: repair
37746 Sep 22 23:22:16.229 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/026.copy" to "/tmp/downstairs-zrMnlo6G/00/000/026.replace"
37747 Sep 22 23:22:16.229 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37748 Sep 22 23:22:16.229 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/026.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
37749 Sep 22 23:22:16.230 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/026"
37750 Sep 22 23:22:16.230 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/026.db"
37751 Sep 22 23:22:16.230 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37752 Sep 22 23:22:16.230 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/026.replace" to "/tmp/downstairs-zrMnlo6G/00/000/026.completed"
37753 Sep 22 23:22:16.230 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37754 Sep 22 23:22:16.230 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37755 Sep 22 23:22:16.230 DEBG [0] It's time to notify for 106
37756 Sep 22 23:22:16.230 INFO Completion from [0] id:106 status:true
37757 Sep 22 23:22:16.230 INFO [107/752] Repair commands completed
37758 Sep 22 23:22:16.230 INFO Pop front: ReconcileIO { id: ReconciliationId(107), op: ExtentReopen { repair_id: ReconciliationId(107), extent_id: 38 }, state: ClientData([New, New, New]) }
37759 Sep 22 23:22:16.230 INFO Sent repair work, now wait for resp
37760 Sep 22 23:22:16.230 INFO [0] received reconcile message
37761 Sep 22 23:22:16.230 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(107), op: ExtentReopen { repair_id: ReconciliationId(107), extent_id: 38 }, state: ClientData([InProgress, New, New]) }, : downstairs
37762 Sep 22 23:22:16.230 INFO [0] client ExtentReopen { repair_id: ReconciliationId(107), extent_id: 38 }
37763 Sep 22 23:22:16.230 INFO [1] received reconcile message
37764 Sep 22 23:22:16.230 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(107), op: ExtentReopen { repair_id: ReconciliationId(107), extent_id: 38 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37765 Sep 22 23:22:16.231 INFO [1] client ExtentReopen { repair_id: ReconciliationId(107), extent_id: 38 }
37766 Sep 22 23:22:16.231 INFO [2] received reconcile message
37767 Sep 22 23:22:16.231 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(107), op: ExtentReopen { repair_id: ReconciliationId(107), extent_id: 38 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37768 Sep 22 23:22:16.231 INFO [2] client ExtentReopen { repair_id: ReconciliationId(107), extent_id: 38 }
37769 Sep 22 23:22:16.231 DEBG 107 Reopen extent 38
37770 Sep 22 23:22:16.231 DEBG 107 Reopen extent 38
37771 Sep 22 23:22:16.232 DEBG 107 Reopen extent 38
37772 Sep 22 23:22:16.232 DEBG [2] It's time to notify for 107
37773 Sep 22 23:22:16.232 INFO Completion from [2] id:107 status:true
37774 Sep 22 23:22:16.232 INFO [108/752] Repair commands completed
37775 Sep 22 23:22:16.232 INFO Pop front: ReconcileIO { id: ReconciliationId(108), op: ExtentFlush { repair_id: ReconciliationId(108), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37776 Sep 22 23:22:16.233 INFO Sent repair work, now wait for resp
37777 Sep 22 23:22:16.233 INFO [0] received reconcile message
37778 Sep 22 23:22:16.233 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(108), op: ExtentFlush { repair_id: ReconciliationId(108), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37779 Sep 22 23:22:16.233 INFO [0] client ExtentFlush { repair_id: ReconciliationId(108), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37780 Sep 22 23:22:16.233 INFO [1] received reconcile message
37781 Sep 22 23:22:16.233 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(108), op: ExtentFlush { repair_id: ReconciliationId(108), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37782 Sep 22 23:22:16.233 INFO [1] client ExtentFlush { repair_id: ReconciliationId(108), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37783 Sep 22 23:22:16.233 INFO [2] received reconcile message
37784 Sep 22 23:22:16.233 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(108), op: ExtentFlush { repair_id: ReconciliationId(108), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37785 Sep 22 23:22:16.233 INFO [2] client ExtentFlush { repair_id: ReconciliationId(108), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37786 Sep 22 23:22:16.233 DEBG 108 Flush extent 21 with f:2 g:2
37787 Sep 22 23:22:16.233 DEBG Flush just extent 21 with f:2 and g:2
37788 Sep 22 23:22:16.233 DEBG [1] It's time to notify for 108
37789 Sep 22 23:22:16.233 INFO Completion from [1] id:108 status:true
37790 Sep 22 23:22:16.233 INFO [109/752] Repair commands completed
37791 Sep 22 23:22:16.233 INFO Pop front: ReconcileIO { id: ReconciliationId(109), op: ExtentClose { repair_id: ReconciliationId(109), extent_id: 21 }, state: ClientData([New, New, New]) }
37792 Sep 22 23:22:16.233 INFO Sent repair work, now wait for resp
37793 Sep 22 23:22:16.233 INFO [0] received reconcile message
37794 Sep 22 23:22:16.233 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(109), op: ExtentClose { repair_id: ReconciliationId(109), extent_id: 21 }, state: ClientData([InProgress, New, New]) }, : downstairs
37795 Sep 22 23:22:16.233 INFO [0] client ExtentClose { repair_id: ReconciliationId(109), extent_id: 21 }
37796 Sep 22 23:22:16.233 INFO [1] received reconcile message
37797 Sep 22 23:22:16.233 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(109), op: ExtentClose { repair_id: ReconciliationId(109), extent_id: 21 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37798 Sep 22 23:22:16.233 INFO [1] client ExtentClose { repair_id: ReconciliationId(109), extent_id: 21 }
37799 Sep 22 23:22:16.233 INFO [2] received reconcile message
37800 Sep 22 23:22:16.233 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(109), op: ExtentClose { repair_id: ReconciliationId(109), extent_id: 21 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37801 Sep 22 23:22:16.233 INFO [2] client ExtentClose { repair_id: ReconciliationId(109), extent_id: 21 }
37802 Sep 22 23:22:16.233 DEBG 109 Close extent 21
37803 Sep 22 23:22:16.234 DEBG 109 Close extent 21
37804 Sep 22 23:22:16.234 DEBG 109 Close extent 21
37805 Sep 22 23:22:16.234 DEBG [2] It's time to notify for 109
37806 Sep 22 23:22:16.234 INFO Completion from [2] id:109 status:true
37807 Sep 22 23:22:16.235 INFO [110/752] Repair commands completed
37808 Sep 22 23:22:16.235 INFO Pop front: ReconcileIO { id: ReconciliationId(110), op: ExtentRepair { repair_id: ReconciliationId(110), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37809 Sep 22 23:22:16.235 INFO Sent repair work, now wait for resp
37810 Sep 22 23:22:16.235 INFO [0] received reconcile message
37811 Sep 22 23:22:16.235 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(110), op: ExtentRepair { repair_id: ReconciliationId(110), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37812 Sep 22 23:22:16.235 INFO [0] client ExtentRepair { repair_id: ReconciliationId(110), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37813 Sep 22 23:22:16.235 INFO [0] Sending repair request ReconciliationId(110)
37814 Sep 22 23:22:16.235 INFO [1] received reconcile message
37815 Sep 22 23:22:16.235 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(110), op: ExtentRepair { repair_id: ReconciliationId(110), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37816 Sep 22 23:22:16.235 INFO [1] client ExtentRepair { repair_id: ReconciliationId(110), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37817 Sep 22 23:22:16.235 INFO [1] No action required ReconciliationId(110)
37818 Sep 22 23:22:16.235 INFO [2] received reconcile message
37819 Sep 22 23:22:16.235 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(110), op: ExtentRepair { repair_id: ReconciliationId(110), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37820 Sep 22 23:22:16.235 INFO [2] client ExtentRepair { repair_id: ReconciliationId(110), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37821 Sep 22 23:22:16.235 INFO [2] No action required ReconciliationId(110)
37822 Sep 22 23:22:16.235 DEBG 110 Repair extent 21 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
37823 Sep 22 23:22:16.235 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/015.copy"
37824 Sep 22 23:22:16.299 INFO accepted connection, remote_addr: 127.0.0.1:56752, local_addr: 127.0.0.1:52864, task: repair
37825 Sep 22 23:22:16.299 TRCE incoming request, uri: /extent/21/files, method: GET, req_id: a1161617-249c-4a7b-8faf-8804597a63f8, remote_addr: 127.0.0.1:56752, local_addr: 127.0.0.1:52864, task: repair
37826 Sep 22 23:22:16.299 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/21/files, method: GET, req_id: a1161617-249c-4a7b-8faf-8804597a63f8, remote_addr: 127.0.0.1:56752, local_addr: 127.0.0.1:52864, task: repair
37827 Sep 22 23:22:16.300 INFO eid:21 Found repair files: ["015", "015.db"]
37828 Sep 22 23:22:16.300 TRCE incoming request, uri: /newextent/21/data, method: GET, req_id: f4d1ccfd-95b9-4fe0-a6e0-9e892b2281bc, remote_addr: 127.0.0.1:56752, local_addr: 127.0.0.1:52864, task: repair
37829 Sep 22 23:22:16.300 INFO request completed, latency_us: 260, response_code: 200, uri: /newextent/21/data, method: GET, req_id: f4d1ccfd-95b9-4fe0-a6e0-9e892b2281bc, remote_addr: 127.0.0.1:56752, local_addr: 127.0.0.1:52864, task: repair
37830 Sep 22 23:22:16.305 TRCE incoming request, uri: /newextent/21/db, method: GET, req_id: 5b950338-8fe7-48c8-b12f-f2ed9dd34a82, remote_addr: 127.0.0.1:56752, local_addr: 127.0.0.1:52864, task: repair
37831 Sep 22 23:22:16.305 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/21/db, method: GET, req_id: 5b950338-8fe7-48c8-b12f-f2ed9dd34a82, remote_addr: 127.0.0.1:56752, local_addr: 127.0.0.1:52864, task: repair
37832 Sep 22 23:22:16.306 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/015.copy" to "/tmp/downstairs-zrMnlo6G/00/000/015.replace"
37833 Sep 22 23:22:16.306 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37834 Sep 22 23:22:16.307 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/015.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
37835 Sep 22 23:22:16.307 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/015"
37836 Sep 22 23:22:16.307 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/015.db"
37837 Sep 22 23:22:16.307 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37838 Sep 22 23:22:16.307 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/015.replace" to "/tmp/downstairs-zrMnlo6G/00/000/015.completed"
37839 Sep 22 23:22:16.308 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37840 Sep 22 23:22:16.308 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37841 Sep 22 23:22:16.308 DEBG [0] It's time to notify for 110
37842 Sep 22 23:22:16.308 INFO Completion from [0] id:110 status:true
37843 Sep 22 23:22:16.308 INFO [111/752] Repair commands completed
37844 Sep 22 23:22:16.308 INFO Pop front: ReconcileIO { id: ReconciliationId(111), op: ExtentReopen { repair_id: ReconciliationId(111), extent_id: 21 }, state: ClientData([New, New, New]) }
37845 Sep 22 23:22:16.308 INFO Sent repair work, now wait for resp
37846 Sep 22 23:22:16.308 INFO [0] received reconcile message
37847 Sep 22 23:22:16.308 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(111), op: ExtentReopen { repair_id: ReconciliationId(111), extent_id: 21 }, state: ClientData([InProgress, New, New]) }, : downstairs
37848 Sep 22 23:22:16.308 INFO [0] client ExtentReopen { repair_id: ReconciliationId(111), extent_id: 21 }
37849 Sep 22 23:22:16.308 INFO [1] received reconcile message
37850 Sep 22 23:22:16.308 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(111), op: ExtentReopen { repair_id: ReconciliationId(111), extent_id: 21 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37851 Sep 22 23:22:16.308 INFO [1] client ExtentReopen { repair_id: ReconciliationId(111), extent_id: 21 }
37852 Sep 22 23:22:16.308 INFO [2] received reconcile message
37853 Sep 22 23:22:16.308 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(111), op: ExtentReopen { repair_id: ReconciliationId(111), extent_id: 21 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37854 Sep 22 23:22:16.308 INFO [2] client ExtentReopen { repair_id: ReconciliationId(111), extent_id: 21 }
37855 Sep 22 23:22:16.308 DEBG 111 Reopen extent 21
37856 Sep 22 23:22:16.309 DEBG 111 Reopen extent 21
37857 Sep 22 23:22:16.309 DEBG 111 Reopen extent 21
37858 Sep 22 23:22:16.310 DEBG [2] It's time to notify for 111
37859 Sep 22 23:22:16.310 INFO Completion from [2] id:111 status:true
37860 Sep 22 23:22:16.310 INFO [112/752] Repair commands completed
37861 Sep 22 23:22:16.310 INFO Pop front: ReconcileIO { id: ReconciliationId(112), op: ExtentFlush { repair_id: ReconciliationId(112), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37862 Sep 22 23:22:16.310 INFO Sent repair work, now wait for resp
37863 Sep 22 23:22:16.310 INFO [0] received reconcile message
37864 Sep 22 23:22:16.310 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(112), op: ExtentFlush { repair_id: ReconciliationId(112), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37865 Sep 22 23:22:16.310 INFO [0] client ExtentFlush { repair_id: ReconciliationId(112), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37866 Sep 22 23:22:16.310 INFO [1] received reconcile message
37867 Sep 22 23:22:16.310 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(112), op: ExtentFlush { repair_id: ReconciliationId(112), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37868 Sep 22 23:22:16.310 INFO [1] client ExtentFlush { repair_id: ReconciliationId(112), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37869 Sep 22 23:22:16.310 INFO [2] received reconcile message
37870 Sep 22 23:22:16.310 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(112), op: ExtentFlush { repair_id: ReconciliationId(112), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37871 Sep 22 23:22:16.310 INFO [2] client ExtentFlush { repair_id: ReconciliationId(112), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37872 Sep 22 23:22:16.310 DEBG 112 Flush extent 97 with f:2 g:2
37873 Sep 22 23:22:16.310 DEBG Flush just extent 97 with f:2 and g:2
37874 Sep 22 23:22:16.311 DEBG [1] It's time to notify for 112
37875 Sep 22 23:22:16.311 INFO Completion from [1] id:112 status:true
37876 Sep 22 23:22:16.311 INFO [113/752] Repair commands completed
37877 Sep 22 23:22:16.311 INFO Pop front: ReconcileIO { id: ReconciliationId(113), op: ExtentClose { repair_id: ReconciliationId(113), extent_id: 97 }, state: ClientData([New, New, New]) }
37878 Sep 22 23:22:16.311 INFO Sent repair work, now wait for resp
37879 Sep 22 23:22:16.311 INFO [0] received reconcile message
37880 Sep 22 23:22:16.311 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(113), op: ExtentClose { repair_id: ReconciliationId(113), extent_id: 97 }, state: ClientData([InProgress, New, New]) }, : downstairs
37881 Sep 22 23:22:16.311 INFO [0] client ExtentClose { repair_id: ReconciliationId(113), extent_id: 97 }
37882 Sep 22 23:22:16.311 INFO [1] received reconcile message
37883 Sep 22 23:22:16.311 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(113), op: ExtentClose { repair_id: ReconciliationId(113), extent_id: 97 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37884 Sep 22 23:22:16.311 INFO [1] client ExtentClose { repair_id: ReconciliationId(113), extent_id: 97 }
37885 Sep 22 23:22:16.311 INFO [2] received reconcile message
37886 Sep 22 23:22:16.311 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(113), op: ExtentClose { repair_id: ReconciliationId(113), extent_id: 97 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37887 Sep 22 23:22:16.311 INFO [2] client ExtentClose { repair_id: ReconciliationId(113), extent_id: 97 }
37888 Sep 22 23:22:16.311 DEBG 113 Close extent 97
37889 Sep 22 23:22:16.311 DEBG 113 Close extent 97
37890 Sep 22 23:22:16.312 DEBG 113 Close extent 97
37891 Sep 22 23:22:16.312 DEBG [2] It's time to notify for 113
37892 Sep 22 23:22:16.312 INFO Completion from [2] id:113 status:true
37893 Sep 22 23:22:16.312 INFO [114/752] Repair commands completed
37894 Sep 22 23:22:16.312 INFO Pop front: ReconcileIO { id: ReconciliationId(114), op: ExtentRepair { repair_id: ReconciliationId(114), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37895 Sep 22 23:22:16.312 INFO Sent repair work, now wait for resp
37896 Sep 22 23:22:16.312 INFO [0] received reconcile message
37897 Sep 22 23:22:16.312 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(114), op: ExtentRepair { repair_id: ReconciliationId(114), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37898 Sep 22 23:22:16.312 INFO [0] client ExtentRepair { repair_id: ReconciliationId(114), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37899 Sep 22 23:22:16.312 INFO [0] Sending repair request ReconciliationId(114)
37900 Sep 22 23:22:16.312 INFO [1] received reconcile message
37901 Sep 22 23:22:16.312 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(114), op: ExtentRepair { repair_id: ReconciliationId(114), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37902 Sep 22 23:22:16.312 INFO [1] client ExtentRepair { repair_id: ReconciliationId(114), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37903 Sep 22 23:22:16.312 INFO [1] No action required ReconciliationId(114)
37904 Sep 22 23:22:16.312 INFO [2] received reconcile message
37905 Sep 22 23:22:16.312 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(114), op: ExtentRepair { repair_id: ReconciliationId(114), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37906 Sep 22 23:22:16.312 INFO [2] client ExtentRepair { repair_id: ReconciliationId(114), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37907 Sep 22 23:22:16.312 INFO [2] No action required ReconciliationId(114)
37908 Sep 22 23:22:16.313 DEBG 114 Repair extent 97 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
37909 Sep 22 23:22:16.313 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/061.copy"
37910 Sep 22 23:22:16.376 INFO accepted connection, remote_addr: 127.0.0.1:40253, local_addr: 127.0.0.1:52864, task: repair
37911 Sep 22 23:22:16.377 TRCE incoming request, uri: /extent/97/files, method: GET, req_id: 986d2be7-4305-4631-b72d-4b22088e6250, remote_addr: 127.0.0.1:40253, local_addr: 127.0.0.1:52864, task: repair
37912 Sep 22 23:22:16.377 INFO request completed, latency_us: 268, response_code: 200, uri: /extent/97/files, method: GET, req_id: 986d2be7-4305-4631-b72d-4b22088e6250, remote_addr: 127.0.0.1:40253, local_addr: 127.0.0.1:52864, task: repair
37913 Sep 22 23:22:16.377 INFO eid:97 Found repair files: ["061", "061.db"]
37914 Sep 22 23:22:16.377 TRCE incoming request, uri: /newextent/97/data, method: GET, req_id: 44d2cbc7-8fa4-41b1-b3aa-78a59b7e8ef3, remote_addr: 127.0.0.1:40253, local_addr: 127.0.0.1:52864, task: repair
37915 Sep 22 23:22:16.378 INFO request completed, latency_us: 351, response_code: 200, uri: /newextent/97/data, method: GET, req_id: 44d2cbc7-8fa4-41b1-b3aa-78a59b7e8ef3, remote_addr: 127.0.0.1:40253, local_addr: 127.0.0.1:52864, task: repair
37916 Sep 22 23:22:16.383 TRCE incoming request, uri: /newextent/97/db, method: GET, req_id: 15d9e0f7-fa9d-4c1b-8bdc-a560f9b6cffe, remote_addr: 127.0.0.1:40253, local_addr: 127.0.0.1:52864, task: repair
37917 Sep 22 23:22:16.383 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/97/db, method: GET, req_id: 15d9e0f7-fa9d-4c1b-8bdc-a560f9b6cffe, remote_addr: 127.0.0.1:40253, local_addr: 127.0.0.1:52864, task: repair
37918 Sep 22 23:22:16.384 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/061.copy" to "/tmp/downstairs-zrMnlo6G/00/000/061.replace"
37919 Sep 22 23:22:16.384 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37920 Sep 22 23:22:16.385 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/061.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
37921 Sep 22 23:22:16.386 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/061"
37922 Sep 22 23:22:16.386 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/061.db"
37923 Sep 22 23:22:16.386 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37924 Sep 22 23:22:16.386 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/061.replace" to "/tmp/downstairs-zrMnlo6G/00/000/061.completed"
37925 Sep 22 23:22:16.386 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37926 Sep 22 23:22:16.386 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
37927 Sep 22 23:22:16.386 DEBG [0] It's time to notify for 114
37928 Sep 22 23:22:16.386 INFO Completion from [0] id:114 status:true
37929 Sep 22 23:22:16.386 INFO [115/752] Repair commands completed
37930 Sep 22 23:22:16.386 INFO Pop front: ReconcileIO { id: ReconciliationId(115), op: ExtentReopen { repair_id: ReconciliationId(115), extent_id: 97 }, state: ClientData([New, New, New]) }
37931 Sep 22 23:22:16.386 INFO Sent repair work, now wait for resp
37932 Sep 22 23:22:16.386 INFO [0] received reconcile message
37933 Sep 22 23:22:16.386 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(115), op: ExtentReopen { repair_id: ReconciliationId(115), extent_id: 97 }, state: ClientData([InProgress, New, New]) }, : downstairs
37934 Sep 22 23:22:16.386 INFO [0] client ExtentReopen { repair_id: ReconciliationId(115), extent_id: 97 }
37935 Sep 22 23:22:16.386 INFO [1] received reconcile message
37936 Sep 22 23:22:16.386 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(115), op: ExtentReopen { repair_id: ReconciliationId(115), extent_id: 97 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37937 Sep 22 23:22:16.386 INFO [1] client ExtentReopen { repair_id: ReconciliationId(115), extent_id: 97 }
37938 Sep 22 23:22:16.386 INFO [2] received reconcile message
37939 Sep 22 23:22:16.387 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(115), op: ExtentReopen { repair_id: ReconciliationId(115), extent_id: 97 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37940 Sep 22 23:22:16.387 INFO [2] client ExtentReopen { repair_id: ReconciliationId(115), extent_id: 97 }
37941 Sep 22 23:22:16.387 DEBG 115 Reopen extent 97
37942 Sep 22 23:22:16.388 DEBG 115 Reopen extent 97
37943 Sep 22 23:22:16.388 DEBG 115 Reopen extent 97
37944 Sep 22 23:22:16.389 DEBG [2] It's time to notify for 115
37945 Sep 22 23:22:16.389 INFO Completion from [2] id:115 status:true
37946 Sep 22 23:22:16.389 INFO [116/752] Repair commands completed
37947 Sep 22 23:22:16.389 INFO Pop front: ReconcileIO { id: ReconciliationId(116), op: ExtentFlush { repair_id: ReconciliationId(116), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37948 Sep 22 23:22:16.389 INFO Sent repair work, now wait for resp
37949 Sep 22 23:22:16.389 INFO [0] received reconcile message
37950 Sep 22 23:22:16.389 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(116), op: ExtentFlush { repair_id: ReconciliationId(116), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37951 Sep 22 23:22:16.389 INFO [0] client ExtentFlush { repair_id: ReconciliationId(116), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37952 Sep 22 23:22:16.389 INFO [1] received reconcile message
37953 Sep 22 23:22:16.389 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(116), op: ExtentFlush { repair_id: ReconciliationId(116), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37954 Sep 22 23:22:16.389 INFO [1] client ExtentFlush { repair_id: ReconciliationId(116), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37955 Sep 22 23:22:16.389 INFO [2] received reconcile message
37956 Sep 22 23:22:16.389 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(116), op: ExtentFlush { repair_id: ReconciliationId(116), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37957 Sep 22 23:22:16.389 INFO [2] client ExtentFlush { repair_id: ReconciliationId(116), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37958 Sep 22 23:22:16.389 DEBG 116 Flush extent 124 with f:2 g:2
37959 Sep 22 23:22:16.389 DEBG Flush just extent 124 with f:2 and g:2
37960 Sep 22 23:22:16.389 DEBG [1] It's time to notify for 116
37961 Sep 22 23:22:16.389 INFO Completion from [1] id:116 status:true
37962 Sep 22 23:22:16.389 INFO [117/752] Repair commands completed
37963 Sep 22 23:22:16.389 INFO Pop front: ReconcileIO { id: ReconciliationId(117), op: ExtentClose { repair_id: ReconciliationId(117), extent_id: 124 }, state: ClientData([New, New, New]) }
37964 Sep 22 23:22:16.389 INFO Sent repair work, now wait for resp
37965 Sep 22 23:22:16.389 INFO [0] received reconcile message
37966 Sep 22 23:22:16.389 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(117), op: ExtentClose { repair_id: ReconciliationId(117), extent_id: 124 }, state: ClientData([InProgress, New, New]) }, : downstairs
37967 Sep 22 23:22:16.389 INFO [0] client ExtentClose { repair_id: ReconciliationId(117), extent_id: 124 }
37968 Sep 22 23:22:16.389 INFO [1] received reconcile message
37969 Sep 22 23:22:16.389 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(117), op: ExtentClose { repair_id: ReconciliationId(117), extent_id: 124 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37970 Sep 22 23:22:16.389 INFO [1] client ExtentClose { repair_id: ReconciliationId(117), extent_id: 124 }
37971 Sep 22 23:22:16.390 INFO [2] received reconcile message
37972 Sep 22 23:22:16.390 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(117), op: ExtentClose { repair_id: ReconciliationId(117), extent_id: 124 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37973 Sep 22 23:22:16.390 INFO [2] client ExtentClose { repair_id: ReconciliationId(117), extent_id: 124 }
37974 Sep 22 23:22:16.390 DEBG 117 Close extent 124
37975 Sep 22 23:22:16.390 DEBG 117 Close extent 124
37976 Sep 22 23:22:16.390 DEBG 117 Close extent 124
37977 Sep 22 23:22:16.391 DEBG [2] It's time to notify for 117
37978 Sep 22 23:22:16.391 INFO Completion from [2] id:117 status:true
37979 Sep 22 23:22:16.391 INFO [118/752] Repair commands completed
37980 Sep 22 23:22:16.391 INFO Pop front: ReconcileIO { id: ReconciliationId(118), op: ExtentRepair { repair_id: ReconciliationId(118), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37981 Sep 22 23:22:16.391 INFO Sent repair work, now wait for resp
37982 Sep 22 23:22:16.391 INFO [0] received reconcile message
37983 Sep 22 23:22:16.391 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(118), op: ExtentRepair { repair_id: ReconciliationId(118), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37984 Sep 22 23:22:16.391 INFO [0] client ExtentRepair { repair_id: ReconciliationId(118), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37985 Sep 22 23:22:16.391 INFO [0] Sending repair request ReconciliationId(118)
37986 Sep 22 23:22:16.391 INFO [1] received reconcile message
37987 Sep 22 23:22:16.391 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(118), op: ExtentRepair { repair_id: ReconciliationId(118), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37988 Sep 22 23:22:16.391 INFO [1] client ExtentRepair { repair_id: ReconciliationId(118), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37989 Sep 22 23:22:16.391 INFO [1] No action required ReconciliationId(118)
37990 Sep 22 23:22:16.391 INFO [2] received reconcile message
37991 Sep 22 23:22:16.391 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(118), op: ExtentRepair { repair_id: ReconciliationId(118), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37992 Sep 22 23:22:16.391 INFO [2] client ExtentRepair { repair_id: ReconciliationId(118), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
37993 Sep 22 23:22:16.391 INFO [2] No action required ReconciliationId(118)
37994 Sep 22 23:22:16.391 DEBG 118 Repair extent 124 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
37995 Sep 22 23:22:16.391 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/07C.copy"
37996 Sep 22 23:22:16.451 DEBG [1] Read AckReady 1075, : downstairs
37997 Sep 22 23:22:16.453 DEBG up_ds_listen was notified
37998 Sep 22 23:22:16.453 DEBG up_ds_listen process 1075
37999 Sep 22 23:22:16.453 DEBG [A] ack job 1075:76, : downstairs
38000 Sep 22 23:22:16.456 INFO accepted connection, remote_addr: 127.0.0.1:43562, local_addr: 127.0.0.1:52864, task: repair
38001 Sep 22 23:22:16.456 TRCE incoming request, uri: /extent/124/files, method: GET, req_id: 04e2678e-932f-4872-a6de-cf8b8477c475, remote_addr: 127.0.0.1:43562, local_addr: 127.0.0.1:52864, task: repair
38002 Sep 22 23:22:16.457 INFO request completed, latency_us: 271, response_code: 200, uri: /extent/124/files, method: GET, req_id: 04e2678e-932f-4872-a6de-cf8b8477c475, remote_addr: 127.0.0.1:43562, local_addr: 127.0.0.1:52864, task: repair
38003 Sep 22 23:22:16.457 INFO eid:124 Found repair files: ["07C", "07C.db"]
38004 Sep 22 23:22:16.457 TRCE incoming request, uri: /newextent/124/data, method: GET, req_id: a9463948-6e86-45c7-a571-55d90fa0b933, remote_addr: 127.0.0.1:43562, local_addr: 127.0.0.1:52864, task: repair
38005 Sep 22 23:22:16.458 INFO request completed, latency_us: 367, response_code: 200, uri: /newextent/124/data, method: GET, req_id: a9463948-6e86-45c7-a571-55d90fa0b933, remote_addr: 127.0.0.1:43562, local_addr: 127.0.0.1:52864, task: repair
38006 Sep 22 23:22:16.462 TRCE incoming request, uri: /newextent/124/db, method: GET, req_id: bf2d929e-d96f-45ec-8c18-bd4a2bfa93a3, remote_addr: 127.0.0.1:43562, local_addr: 127.0.0.1:52864, task: repair
38007 Sep 22 23:22:16.463 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/124/db, method: GET, req_id: bf2d929e-d96f-45ec-8c18-bd4a2bfa93a3, remote_addr: 127.0.0.1:43562, local_addr: 127.0.0.1:52864, task: repair
38008 Sep 22 23:22:16.464 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/07C.copy" to "/tmp/downstairs-zrMnlo6G/00/000/07C.replace"
38009 Sep 22 23:22:16.464 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38010 Sep 22 23:22:16.465 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/07C.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
38011 Sep 22 23:22:16.465 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/07C"
38012 Sep 22 23:22:16.465 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/07C.db"
38013 Sep 22 23:22:16.465 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38014 Sep 22 23:22:16.465 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/07C.replace" to "/tmp/downstairs-zrMnlo6G/00/000/07C.completed"
38015 Sep 22 23:22:16.465 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38016 Sep 22 23:22:16.466 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38017 Sep 22 23:22:16.466 DEBG [0] It's time to notify for 118
38018 Sep 22 23:22:16.466 INFO Completion from [0] id:118 status:true
38019 Sep 22 23:22:16.466 INFO [119/752] Repair commands completed
38020 Sep 22 23:22:16.466 INFO Pop front: ReconcileIO { id: ReconciliationId(119), op: ExtentReopen { repair_id: ReconciliationId(119), extent_id: 124 }, state: ClientData([New, New, New]) }
38021 Sep 22 23:22:16.466 INFO Sent repair work, now wait for resp
38022 Sep 22 23:22:16.466 INFO [0] received reconcile message
38023 Sep 22 23:22:16.466 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(119), op: ExtentReopen { repair_id: ReconciliationId(119), extent_id: 124 }, state: ClientData([InProgress, New, New]) }, : downstairs
38024 Sep 22 23:22:16.466 INFO [0] client ExtentReopen { repair_id: ReconciliationId(119), extent_id: 124 }
38025 Sep 22 23:22:16.466 INFO [1] received reconcile message
38026 Sep 22 23:22:16.466 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(119), op: ExtentReopen { repair_id: ReconciliationId(119), extent_id: 124 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38027 Sep 22 23:22:16.466 INFO [1] client ExtentReopen { repair_id: ReconciliationId(119), extent_id: 124 }
38028 Sep 22 23:22:16.466 INFO [2] received reconcile message
38029 Sep 22 23:22:16.466 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(119), op: ExtentReopen { repair_id: ReconciliationId(119), extent_id: 124 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38030 Sep 22 23:22:16.466 INFO [2] client ExtentReopen { repair_id: ReconciliationId(119), extent_id: 124 }
38031 Sep 22 23:22:16.466 DEBG 119 Reopen extent 124
38032 Sep 22 23:22:16.467 DEBG 119 Reopen extent 124
38033 Sep 22 23:22:16.468 DEBG 119 Reopen extent 124
38034 Sep 22 23:22:16.468 DEBG [2] It's time to notify for 119
38035 Sep 22 23:22:16.468 INFO Completion from [2] id:119 status:true
38036 Sep 22 23:22:16.468 INFO [120/752] Repair commands completed
38037 Sep 22 23:22:16.468 INFO Pop front: ReconcileIO { id: ReconciliationId(120), op: ExtentFlush { repair_id: ReconciliationId(120), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38038 Sep 22 23:22:16.468 INFO Sent repair work, now wait for resp
38039 Sep 22 23:22:16.468 INFO [0] received reconcile message
38040 Sep 22 23:22:16.468 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(120), op: ExtentFlush { repair_id: ReconciliationId(120), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38041 Sep 22 23:22:16.469 INFO [0] client ExtentFlush { repair_id: ReconciliationId(120), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38042 Sep 22 23:22:16.469 INFO [1] received reconcile message
38043 Sep 22 23:22:16.469 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(120), op: ExtentFlush { repair_id: ReconciliationId(120), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38044 Sep 22 23:22:16.469 INFO [1] client ExtentFlush { repair_id: ReconciliationId(120), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38045 Sep 22 23:22:16.469 INFO [2] received reconcile message
38046 Sep 22 23:22:16.469 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(120), op: ExtentFlush { repair_id: ReconciliationId(120), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38047 Sep 22 23:22:16.469 INFO [2] client ExtentFlush { repair_id: ReconciliationId(120), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38048 Sep 22 23:22:16.469 DEBG 120 Flush extent 47 with f:2 g:2
38049 Sep 22 23:22:16.469 DEBG Flush just extent 47 with f:2 and g:2
38050 Sep 22 23:22:16.469 DEBG [1] It's time to notify for 120
38051 Sep 22 23:22:16.469 INFO Completion from [1] id:120 status:true
38052 Sep 22 23:22:16.469 INFO [121/752] Repair commands completed
38053 Sep 22 23:22:16.469 INFO Pop front: ReconcileIO { id: ReconciliationId(121), op: ExtentClose { repair_id: ReconciliationId(121), extent_id: 47 }, state: ClientData([New, New, New]) }
38054 Sep 22 23:22:16.469 INFO Sent repair work, now wait for resp
38055 Sep 22 23:22:16.469 INFO [0] received reconcile message
38056 Sep 22 23:22:16.469 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(121), op: ExtentClose { repair_id: ReconciliationId(121), extent_id: 47 }, state: ClientData([InProgress, New, New]) }, : downstairs
38057 Sep 22 23:22:16.469 INFO [0] client ExtentClose { repair_id: ReconciliationId(121), extent_id: 47 }
38058 Sep 22 23:22:16.469 INFO [1] received reconcile message
38059 Sep 22 23:22:16.469 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(121), op: ExtentClose { repair_id: ReconciliationId(121), extent_id: 47 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38060 Sep 22 23:22:16.469 INFO [1] client ExtentClose { repair_id: ReconciliationId(121), extent_id: 47 }
38061 Sep 22 23:22:16.469 INFO [2] received reconcile message
38062 Sep 22 23:22:16.469 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(121), op: ExtentClose { repair_id: ReconciliationId(121), extent_id: 47 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38063 Sep 22 23:22:16.469 INFO [2] client ExtentClose { repair_id: ReconciliationId(121), extent_id: 47 }
38064 Sep 22 23:22:16.469 DEBG 121 Close extent 47
38065 Sep 22 23:22:16.470 DEBG 121 Close extent 47
38066 Sep 22 23:22:16.470 DEBG 121 Close extent 47
38067 Sep 22 23:22:16.470 DEBG [2] It's time to notify for 121
38068 Sep 22 23:22:16.470 INFO Completion from [2] id:121 status:true
38069 Sep 22 23:22:16.470 INFO [122/752] Repair commands completed
38070 Sep 22 23:22:16.470 INFO Pop front: ReconcileIO { id: ReconciliationId(122), op: ExtentRepair { repair_id: ReconciliationId(122), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38071 Sep 22 23:22:16.470 INFO Sent repair work, now wait for resp
38072 Sep 22 23:22:16.471 INFO [0] received reconcile message
38073 Sep 22 23:22:16.471 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(122), op: ExtentRepair { repair_id: ReconciliationId(122), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38074 Sep 22 23:22:16.471 INFO [0] client ExtentRepair { repair_id: ReconciliationId(122), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38075 Sep 22 23:22:16.471 INFO [0] Sending repair request ReconciliationId(122)
38076 Sep 22 23:22:16.471 INFO [1] received reconcile message
38077 Sep 22 23:22:16.471 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(122), op: ExtentRepair { repair_id: ReconciliationId(122), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38078 Sep 22 23:22:16.471 INFO [1] client ExtentRepair { repair_id: ReconciliationId(122), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38079 Sep 22 23:22:16.471 INFO [1] No action required ReconciliationId(122)
38080 Sep 22 23:22:16.471 INFO [2] received reconcile message
38081 Sep 22 23:22:16.471 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(122), op: ExtentRepair { repair_id: ReconciliationId(122), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38082 Sep 22 23:22:16.471 INFO [2] client ExtentRepair { repair_id: ReconciliationId(122), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38083 Sep 22 23:22:16.471 INFO [2] No action required ReconciliationId(122)
38084 Sep 22 23:22:16.471 DEBG 122 Repair extent 47 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
38085 Sep 22 23:22:16.471 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/02F.copy"
38086 Sep 22 23:22:16.507 DEBG up_ds_listen checked 1 jobs, back to waiting
38087 Sep 22 23:22:16.509 DEBG Flush :1076 extent_limit None deps:[JobId(1075), JobId(1074)] res:true f:27 g:1
38088 Sep 22 23:22:16.509 INFO [lossy] sleeping 1 second
38089 Sep 22 23:22:16.509 DEBG Flush :1074 extent_limit None deps:[JobId(1073), JobId(1072)] res:true f:26 g:1
38090 Sep 22 23:22:16.515 DEBG Read :1075 deps:[JobId(1074)] res:true
38091 Sep 22 23:22:16.533 INFO accepted connection, remote_addr: 127.0.0.1:62204, local_addr: 127.0.0.1:52864, task: repair
38092 Sep 22 23:22:16.534 TRCE incoming request, uri: /extent/47/files, method: GET, req_id: f2415882-dfa7-4536-81f9-4dfd258cef76, remote_addr: 127.0.0.1:62204, local_addr: 127.0.0.1:52864, task: repair
38093 Sep 22 23:22:16.534 INFO request completed, latency_us: 213, response_code: 200, uri: /extent/47/files, method: GET, req_id: f2415882-dfa7-4536-81f9-4dfd258cef76, remote_addr: 127.0.0.1:62204, local_addr: 127.0.0.1:52864, task: repair
38094 Sep 22 23:22:16.534 INFO eid:47 Found repair files: ["02F", "02F.db"]
38095 Sep 22 23:22:16.534 TRCE incoming request, uri: /newextent/47/data, method: GET, req_id: 8bd15551-312d-49de-820c-20975f6aa05c, remote_addr: 127.0.0.1:62204, local_addr: 127.0.0.1:52864, task: repair
38096 Sep 22 23:22:16.535 INFO request completed, latency_us: 314, response_code: 200, uri: /newextent/47/data, method: GET, req_id: 8bd15551-312d-49de-820c-20975f6aa05c, remote_addr: 127.0.0.1:62204, local_addr: 127.0.0.1:52864, task: repair
38097 Sep 22 23:22:16.539 TRCE incoming request, uri: /newextent/47/db, method: GET, req_id: f6cd896d-8afa-4468-bf95-d66a7dab6d40, remote_addr: 127.0.0.1:62204, local_addr: 127.0.0.1:52864, task: repair
38098 Sep 22 23:22:16.540 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/47/db, method: GET, req_id: f6cd896d-8afa-4468-bf95-d66a7dab6d40, remote_addr: 127.0.0.1:62204, local_addr: 127.0.0.1:52864, task: repair
38099 Sep 22 23:22:16.541 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/02F.copy" to "/tmp/downstairs-zrMnlo6G/00/000/02F.replace"
38100 Sep 22 23:22:16.541 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38101 Sep 22 23:22:16.542 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/02F.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
38102 Sep 22 23:22:16.542 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/02F"
38103 Sep 22 23:22:16.542 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/02F.db"
38104 Sep 22 23:22:16.542 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38105 Sep 22 23:22:16.542 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/02F.replace" to "/tmp/downstairs-zrMnlo6G/00/000/02F.completed"
38106 Sep 22 23:22:16.542 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38107 Sep 22 23:22:16.542 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38108 Sep 22 23:22:16.542 DEBG [0] It's time to notify for 122
38109 Sep 22 23:22:16.542 INFO Completion from [0] id:122 status:true
38110 Sep 22 23:22:16.542 INFO [123/752] Repair commands completed
38111 Sep 22 23:22:16.542 INFO Pop front: ReconcileIO { id: ReconciliationId(123), op: ExtentReopen { repair_id: ReconciliationId(123), extent_id: 47 }, state: ClientData([New, New, New]) }
38112 Sep 22 23:22:16.542 INFO Sent repair work, now wait for resp
38113 Sep 22 23:22:16.543 INFO [0] received reconcile message
38114 Sep 22 23:22:16.543 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(123), op: ExtentReopen { repair_id: ReconciliationId(123), extent_id: 47 }, state: ClientData([InProgress, New, New]) }, : downstairs
38115 Sep 22 23:22:16.543 INFO [0] client ExtentReopen { repair_id: ReconciliationId(123), extent_id: 47 }
38116 Sep 22 23:22:16.543 INFO [1] received reconcile message
38117 Sep 22 23:22:16.543 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(123), op: ExtentReopen { repair_id: ReconciliationId(123), extent_id: 47 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38118 Sep 22 23:22:16.543 INFO [1] client ExtentReopen { repair_id: ReconciliationId(123), extent_id: 47 }
38119 Sep 22 23:22:16.543 INFO [2] received reconcile message
38120 Sep 22 23:22:16.543 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(123), op: ExtentReopen { repair_id: ReconciliationId(123), extent_id: 47 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38121 Sep 22 23:22:16.543 INFO [2] client ExtentReopen { repair_id: ReconciliationId(123), extent_id: 47 }
38122 Sep 22 23:22:16.543 DEBG 123 Reopen extent 47
38123 Sep 22 23:22:16.544 DEBG 123 Reopen extent 47
38124 Sep 22 23:22:16.544 DEBG 123 Reopen extent 47
38125 Sep 22 23:22:16.545 DEBG [2] It's time to notify for 123
38126 Sep 22 23:22:16.545 INFO Completion from [2] id:123 status:true
38127 Sep 22 23:22:16.545 INFO [124/752] Repair commands completed
38128 Sep 22 23:22:16.545 INFO Pop front: ReconcileIO { id: ReconciliationId(124), op: ExtentFlush { repair_id: ReconciliationId(124), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38129 Sep 22 23:22:16.545 INFO Sent repair work, now wait for resp
38130 Sep 22 23:22:16.545 INFO [0] received reconcile message
38131 Sep 22 23:22:16.545 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(124), op: ExtentFlush { repair_id: ReconciliationId(124), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38132 Sep 22 23:22:16.545 INFO [0] client ExtentFlush { repair_id: ReconciliationId(124), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38133 Sep 22 23:22:16.545 INFO [1] received reconcile message
38134 Sep 22 23:22:16.545 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(124), op: ExtentFlush { repair_id: ReconciliationId(124), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38135 Sep 22 23:22:16.545 INFO [1] client ExtentFlush { repair_id: ReconciliationId(124), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38136 Sep 22 23:22:16.545 INFO [2] received reconcile message
38137 Sep 22 23:22:16.545 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(124), op: ExtentFlush { repair_id: ReconciliationId(124), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38138 Sep 22 23:22:16.545 INFO [2] client ExtentFlush { repair_id: ReconciliationId(124), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38139 Sep 22 23:22:16.545 DEBG 124 Flush extent 179 with f:2 g:2
38140 Sep 22 23:22:16.545 DEBG Flush just extent 179 with f:2 and g:2
38141 Sep 22 23:22:16.545 DEBG [1] It's time to notify for 124
38142 Sep 22 23:22:16.545 INFO Completion from [1] id:124 status:true
38143 Sep 22 23:22:16.545 INFO [125/752] Repair commands completed
38144 Sep 22 23:22:16.545 INFO Pop front: ReconcileIO { id: ReconciliationId(125), op: ExtentClose { repair_id: ReconciliationId(125), extent_id: 179 }, state: ClientData([New, New, New]) }
38145 Sep 22 23:22:16.545 INFO Sent repair work, now wait for resp
38146 Sep 22 23:22:16.545 INFO [0] received reconcile message
38147 Sep 22 23:22:16.545 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(125), op: ExtentClose { repair_id: ReconciliationId(125), extent_id: 179 }, state: ClientData([InProgress, New, New]) }, : downstairs
38148 Sep 22 23:22:16.545 INFO [0] client ExtentClose { repair_id: ReconciliationId(125), extent_id: 179 }
38149 Sep 22 23:22:16.545 INFO [1] received reconcile message
38150 Sep 22 23:22:16.545 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(125), op: ExtentClose { repair_id: ReconciliationId(125), extent_id: 179 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38151 Sep 22 23:22:16.545 INFO [1] client ExtentClose { repair_id: ReconciliationId(125), extent_id: 179 }
38152 Sep 22 23:22:16.546 INFO [2] received reconcile message
38153 Sep 22 23:22:16.546 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(125), op: ExtentClose { repair_id: ReconciliationId(125), extent_id: 179 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38154 Sep 22 23:22:16.546 INFO [2] client ExtentClose { repair_id: ReconciliationId(125), extent_id: 179 }
38155 Sep 22 23:22:16.546 DEBG 125 Close extent 179
38156 Sep 22 23:22:16.546 DEBG 125 Close extent 179
38157 Sep 22 23:22:16.546 DEBG 125 Close extent 179
38158 Sep 22 23:22:16.547 DEBG [2] It's time to notify for 125
38159 Sep 22 23:22:16.547 INFO Completion from [2] id:125 status:true
38160 Sep 22 23:22:16.547 INFO [126/752] Repair commands completed
38161 Sep 22 23:22:16.547 INFO Pop front: ReconcileIO { id: ReconciliationId(126), op: ExtentRepair { repair_id: ReconciliationId(126), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38162 Sep 22 23:22:16.547 INFO Sent repair work, now wait for resp
38163 Sep 22 23:22:16.547 INFO [0] received reconcile message
38164 Sep 22 23:22:16.547 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(126), op: ExtentRepair { repair_id: ReconciliationId(126), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38165 Sep 22 23:22:16.547 INFO [0] client ExtentRepair { repair_id: ReconciliationId(126), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38166 Sep 22 23:22:16.547 INFO [0] Sending repair request ReconciliationId(126)
38167 Sep 22 23:22:16.547 INFO [1] received reconcile message
38168 Sep 22 23:22:16.547 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(126), op: ExtentRepair { repair_id: ReconciliationId(126), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38169 Sep 22 23:22:16.547 INFO [1] client ExtentRepair { repair_id: ReconciliationId(126), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38170 Sep 22 23:22:16.547 INFO [1] No action required ReconciliationId(126)
38171 Sep 22 23:22:16.547 INFO [2] received reconcile message
38172 Sep 22 23:22:16.547 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(126), op: ExtentRepair { repair_id: ReconciliationId(126), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38173 Sep 22 23:22:16.547 INFO [2] client ExtentRepair { repair_id: ReconciliationId(126), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38174 Sep 22 23:22:16.547 INFO [2] No action required ReconciliationId(126)
38175 Sep 22 23:22:16.547 DEBG 126 Repair extent 179 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
38176 Sep 22 23:22:16.547 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0B3.copy"
38177 Sep 22 23:22:16.586 DEBG IO Read 1077 has deps [JobId(1076)]
38178 Sep 22 23:22:16.611 INFO accepted connection, remote_addr: 127.0.0.1:54083, local_addr: 127.0.0.1:52864, task: repair
38179 Sep 22 23:22:16.611 TRCE incoming request, uri: /extent/179/files, method: GET, req_id: 3d216e56-1ecf-462b-84bf-1456c9fcf6ed, remote_addr: 127.0.0.1:54083, local_addr: 127.0.0.1:52864, task: repair
38180 Sep 22 23:22:16.611 INFO request completed, latency_us: 203, response_code: 200, uri: /extent/179/files, method: GET, req_id: 3d216e56-1ecf-462b-84bf-1456c9fcf6ed, remote_addr: 127.0.0.1:54083, local_addr: 127.0.0.1:52864, task: repair
38181 Sep 22 23:22:16.611 INFO eid:179 Found repair files: ["0B3", "0B3.db"]
38182 Sep 22 23:22:16.612 TRCE incoming request, uri: /newextent/179/data, method: GET, req_id: 40fc9335-ddea-445d-8781-6aa4440b25bd, remote_addr: 127.0.0.1:54083, local_addr: 127.0.0.1:52864, task: repair
38183 Sep 22 23:22:16.612 INFO request completed, latency_us: 315, response_code: 200, uri: /newextent/179/data, method: GET, req_id: 40fc9335-ddea-445d-8781-6aa4440b25bd, remote_addr: 127.0.0.1:54083, local_addr: 127.0.0.1:52864, task: repair
38184 Sep 22 23:22:16.617 TRCE incoming request, uri: /newextent/179/db, method: GET, req_id: 8ab3642e-6a6a-448f-97fb-c6540ff59147, remote_addr: 127.0.0.1:54083, local_addr: 127.0.0.1:52864, task: repair
38185 Sep 22 23:22:16.617 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/179/db, method: GET, req_id: 8ab3642e-6a6a-448f-97fb-c6540ff59147, remote_addr: 127.0.0.1:54083, local_addr: 127.0.0.1:52864, task: repair
38186 Sep 22 23:22:16.618 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0B3.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0B3.replace"
38187 Sep 22 23:22:16.618 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38188 Sep 22 23:22:16.619 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0B3.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
38189 Sep 22 23:22:16.619 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B3"
38190 Sep 22 23:22:16.619 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B3.db"
38191 Sep 22 23:22:16.619 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38192 Sep 22 23:22:16.619 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0B3.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0B3.completed"
38193 Sep 22 23:22:16.619 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38194 Sep 22 23:22:16.619 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38195 Sep 22 23:22:16.619 DEBG [0] It's time to notify for 126
38196 Sep 22 23:22:16.620 INFO Completion from [0] id:126 status:true
38197 Sep 22 23:22:16.620 INFO [127/752] Repair commands completed
38198 Sep 22 23:22:16.620 INFO Pop front: ReconcileIO { id: ReconciliationId(127), op: ExtentReopen { repair_id: ReconciliationId(127), extent_id: 179 }, state: ClientData([New, New, New]) }
38199 Sep 22 23:22:16.620 INFO Sent repair work, now wait for resp
38200 Sep 22 23:22:16.620 INFO [0] received reconcile message
38201 Sep 22 23:22:16.620 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(127), op: ExtentReopen { repair_id: ReconciliationId(127), extent_id: 179 }, state: ClientData([InProgress, New, New]) }, : downstairs
38202 Sep 22 23:22:16.620 INFO [0] client ExtentReopen { repair_id: ReconciliationId(127), extent_id: 179 }
38203 Sep 22 23:22:16.620 INFO [1] received reconcile message
38204 Sep 22 23:22:16.620 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(127), op: ExtentReopen { repair_id: ReconciliationId(127), extent_id: 179 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38205 Sep 22 23:22:16.620 INFO [1] client ExtentReopen { repair_id: ReconciliationId(127), extent_id: 179 }
38206 Sep 22 23:22:16.620 INFO [2] received reconcile message
38207 Sep 22 23:22:16.620 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(127), op: ExtentReopen { repair_id: ReconciliationId(127), extent_id: 179 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38208 Sep 22 23:22:16.620 INFO [2] client ExtentReopen { repair_id: ReconciliationId(127), extent_id: 179 }
38209 Sep 22 23:22:16.620 DEBG 127 Reopen extent 179
38210 Sep 22 23:22:16.621 DEBG 127 Reopen extent 179
38211 Sep 22 23:22:16.621 DEBG 127 Reopen extent 179
38212 Sep 22 23:22:16.622 DEBG [2] It's time to notify for 127
38213 Sep 22 23:22:16.622 INFO Completion from [2] id:127 status:true
38214 Sep 22 23:22:16.622 INFO [128/752] Repair commands completed
38215 Sep 22 23:22:16.622 INFO Pop front: ReconcileIO { id: ReconciliationId(128), op: ExtentFlush { repair_id: ReconciliationId(128), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38216 Sep 22 23:22:16.622 INFO Sent repair work, now wait for resp
38217 Sep 22 23:22:16.622 INFO [0] received reconcile message
38218 Sep 22 23:22:16.622 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(128), op: ExtentFlush { repair_id: ReconciliationId(128), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38219 Sep 22 23:22:16.622 INFO [0] client ExtentFlush { repair_id: ReconciliationId(128), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38220 Sep 22 23:22:16.622 INFO [1] received reconcile message
38221 Sep 22 23:22:16.622 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(128), op: ExtentFlush { repair_id: ReconciliationId(128), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38222 Sep 22 23:22:16.622 INFO [1] client ExtentFlush { repair_id: ReconciliationId(128), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38223 Sep 22 23:22:16.622 INFO [2] received reconcile message
38224 Sep 22 23:22:16.622 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(128), op: ExtentFlush { repair_id: ReconciliationId(128), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38225 Sep 22 23:22:16.622 INFO [2] client ExtentFlush { repair_id: ReconciliationId(128), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38226 Sep 22 23:22:16.622 DEBG 128 Flush extent 19 with f:2 g:2
38227 Sep 22 23:22:16.622 DEBG Flush just extent 19 with f:2 and g:2
38228 Sep 22 23:22:16.622 DEBG [1] It's time to notify for 128
38229 Sep 22 23:22:16.622 INFO Completion from [1] id:128 status:true
38230 Sep 22 23:22:16.622 INFO [129/752] Repair commands completed
38231 Sep 22 23:22:16.622 INFO Pop front: ReconcileIO { id: ReconciliationId(129), op: ExtentClose { repair_id: ReconciliationId(129), extent_id: 19 }, state: ClientData([New, New, New]) }
38232 Sep 22 23:22:16.622 INFO Sent repair work, now wait for resp
38233 Sep 22 23:22:16.622 INFO [0] received reconcile message
38234 Sep 22 23:22:16.622 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(129), op: ExtentClose { repair_id: ReconciliationId(129), extent_id: 19 }, state: ClientData([InProgress, New, New]) }, : downstairs
38235 Sep 22 23:22:16.622 INFO [0] client ExtentClose { repair_id: ReconciliationId(129), extent_id: 19 }
38236 Sep 22 23:22:16.623 INFO [1] received reconcile message
38237 Sep 22 23:22:16.623 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(129), op: ExtentClose { repair_id: ReconciliationId(129), extent_id: 19 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38238 Sep 22 23:22:16.623 INFO [1] client ExtentClose { repair_id: ReconciliationId(129), extent_id: 19 }
38239 Sep 22 23:22:16.623 INFO [2] received reconcile message
38240 Sep 22 23:22:16.623 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(129), op: ExtentClose { repair_id: ReconciliationId(129), extent_id: 19 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38241 Sep 22 23:22:16.623 INFO [2] client ExtentClose { repair_id: ReconciliationId(129), extent_id: 19 }
38242 Sep 22 23:22:16.623 DEBG 129 Close extent 19
38243 Sep 22 23:22:16.623 DEBG 129 Close extent 19
38244 Sep 22 23:22:16.623 DEBG 129 Close extent 19
38245 Sep 22 23:22:16.624 DEBG [2] It's time to notify for 129
38246 Sep 22 23:22:16.624 INFO Completion from [2] id:129 status:true
38247 Sep 22 23:22:16.624 INFO [130/752] Repair commands completed
38248 Sep 22 23:22:16.624 INFO Pop front: ReconcileIO { id: ReconciliationId(130), op: ExtentRepair { repair_id: ReconciliationId(130), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38249 Sep 22 23:22:16.624 INFO Sent repair work, now wait for resp
38250 Sep 22 23:22:16.624 INFO [0] received reconcile message
38251 Sep 22 23:22:16.624 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(130), op: ExtentRepair { repair_id: ReconciliationId(130), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38252 Sep 22 23:22:16.624 INFO [0] client ExtentRepair { repair_id: ReconciliationId(130), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38253 Sep 22 23:22:16.624 INFO [0] Sending repair request ReconciliationId(130)
38254 Sep 22 23:22:16.624 INFO [1] received reconcile message
38255 Sep 22 23:22:16.624 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(130), op: ExtentRepair { repair_id: ReconciliationId(130), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38256 Sep 22 23:22:16.624 INFO [1] client ExtentRepair { repair_id: ReconciliationId(130), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38257 Sep 22 23:22:16.624 INFO [1] No action required ReconciliationId(130)
38258 Sep 22 23:22:16.624 INFO [2] received reconcile message
38259 Sep 22 23:22:16.624 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(130), op: ExtentRepair { repair_id: ReconciliationId(130), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38260 Sep 22 23:22:16.624 INFO [2] client ExtentRepair { repair_id: ReconciliationId(130), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38261 Sep 22 23:22:16.624 INFO [2] No action required ReconciliationId(130)
38262 Sep 22 23:22:16.624 DEBG 130 Repair extent 19 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
38263 Sep 22 23:22:16.624 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/013.copy"
38264 Sep 22 23:22:16.689 INFO accepted connection, remote_addr: 127.0.0.1:33589, local_addr: 127.0.0.1:52864, task: repair
38265 Sep 22 23:22:16.689 TRCE incoming request, uri: /extent/19/files, method: GET, req_id: 531fd6a2-1346-48a5-9a9f-176be177d3a0, remote_addr: 127.0.0.1:33589, local_addr: 127.0.0.1:52864, task: repair
38266 Sep 22 23:22:16.689 INFO request completed, latency_us: 208, response_code: 200, uri: /extent/19/files, method: GET, req_id: 531fd6a2-1346-48a5-9a9f-176be177d3a0, remote_addr: 127.0.0.1:33589, local_addr: 127.0.0.1:52864, task: repair
38267 Sep 22 23:22:16.689 INFO eid:19 Found repair files: ["013", "013.db"]
38268 Sep 22 23:22:16.690 TRCE incoming request, uri: /newextent/19/data, method: GET, req_id: b8abf94d-48f5-4db9-bc57-98fbf459827b, remote_addr: 127.0.0.1:33589, local_addr: 127.0.0.1:52864, task: repair
38269 Sep 22 23:22:16.690 INFO request completed, latency_us: 307, response_code: 200, uri: /newextent/19/data, method: GET, req_id: b8abf94d-48f5-4db9-bc57-98fbf459827b, remote_addr: 127.0.0.1:33589, local_addr: 127.0.0.1:52864, task: repair
38270 Sep 22 23:22:16.695 TRCE incoming request, uri: /newextent/19/db, method: GET, req_id: 1d625ee8-5ebb-4116-846d-673d1ac07c1d, remote_addr: 127.0.0.1:33589, local_addr: 127.0.0.1:52864, task: repair
38271 Sep 22 23:22:16.695 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/19/db, method: GET, req_id: 1d625ee8-5ebb-4116-846d-673d1ac07c1d, remote_addr: 127.0.0.1:33589, local_addr: 127.0.0.1:52864, task: repair
38272 Sep 22 23:22:16.696 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/013.copy" to "/tmp/downstairs-zrMnlo6G/00/000/013.replace"
38273 Sep 22 23:22:16.696 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38274 Sep 22 23:22:16.697 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/013.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
38275 Sep 22 23:22:16.697 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/013"
38276 Sep 22 23:22:16.697 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/013.db"
38277 Sep 22 23:22:16.697 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38278 Sep 22 23:22:16.697 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/013.replace" to "/tmp/downstairs-zrMnlo6G/00/000/013.completed"
38279 Sep 22 23:22:16.697 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38280 Sep 22 23:22:16.697 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38281 Sep 22 23:22:16.698 DEBG [0] It's time to notify for 130
38282 Sep 22 23:22:16.698 INFO Completion from [0] id:130 status:true
38283 Sep 22 23:22:16.698 INFO [131/752] Repair commands completed
38284 Sep 22 23:22:16.698 INFO Pop front: ReconcileIO { id: ReconciliationId(131), op: ExtentReopen { repair_id: ReconciliationId(131), extent_id: 19 }, state: ClientData([New, New, New]) }
38285 Sep 22 23:22:16.698 INFO Sent repair work, now wait for resp
38286 Sep 22 23:22:16.698 INFO [0] received reconcile message
38287 Sep 22 23:22:16.698 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(131), op: ExtentReopen { repair_id: ReconciliationId(131), extent_id: 19 }, state: ClientData([InProgress, New, New]) }, : downstairs
38288 Sep 22 23:22:16.698 INFO [0] client ExtentReopen { repair_id: ReconciliationId(131), extent_id: 19 }
38289 Sep 22 23:22:16.698 INFO [1] received reconcile message
38290 Sep 22 23:22:16.698 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(131), op: ExtentReopen { repair_id: ReconciliationId(131), extent_id: 19 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38291 Sep 22 23:22:16.698 INFO [1] client ExtentReopen { repair_id: ReconciliationId(131), extent_id: 19 }
38292 Sep 22 23:22:16.698 INFO [2] received reconcile message
38293 Sep 22 23:22:16.698 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(131), op: ExtentReopen { repair_id: ReconciliationId(131), extent_id: 19 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38294 Sep 22 23:22:16.698 INFO [2] client ExtentReopen { repair_id: ReconciliationId(131), extent_id: 19 }
38295 Sep 22 23:22:16.698 DEBG 131 Reopen extent 19
38296 Sep 22 23:22:16.699 DEBG 131 Reopen extent 19
38297 Sep 22 23:22:16.699 DEBG 131 Reopen extent 19
38298 Sep 22 23:22:16.700 DEBG [2] It's time to notify for 131
38299 Sep 22 23:22:16.700 INFO Completion from [2] id:131 status:true
38300 Sep 22 23:22:16.700 INFO [132/752] Repair commands completed
38301 Sep 22 23:22:16.700 INFO Pop front: ReconcileIO { id: ReconciliationId(132), op: ExtentFlush { repair_id: ReconciliationId(132), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38302 Sep 22 23:22:16.700 INFO Sent repair work, now wait for resp
38303 Sep 22 23:22:16.700 INFO [0] received reconcile message
38304 Sep 22 23:22:16.700 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(132), op: ExtentFlush { repair_id: ReconciliationId(132), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38305 Sep 22 23:22:16.700 INFO [0] client ExtentFlush { repair_id: ReconciliationId(132), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38306 Sep 22 23:22:16.700 INFO [1] received reconcile message
38307 Sep 22 23:22:16.700 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(132), op: ExtentFlush { repair_id: ReconciliationId(132), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38308 Sep 22 23:22:16.700 INFO [1] client ExtentFlush { repair_id: ReconciliationId(132), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38309 Sep 22 23:22:16.700 INFO [2] received reconcile message
38310 Sep 22 23:22:16.700 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(132), op: ExtentFlush { repair_id: ReconciliationId(132), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38311 Sep 22 23:22:16.700 INFO [2] client ExtentFlush { repair_id: ReconciliationId(132), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38312 Sep 22 23:22:16.700 DEBG 132 Flush extent 167 with f:2 g:2
38313 Sep 22 23:22:16.700 DEBG Flush just extent 167 with f:2 and g:2
38314 Sep 22 23:22:16.701 DEBG [1] It's time to notify for 132
38315 Sep 22 23:22:16.701 INFO Completion from [1] id:132 status:true
38316 Sep 22 23:22:16.701 INFO [133/752] Repair commands completed
38317 Sep 22 23:22:16.701 INFO Pop front: ReconcileIO { id: ReconciliationId(133), op: ExtentClose { repair_id: ReconciliationId(133), extent_id: 167 }, state: ClientData([New, New, New]) }
38318 Sep 22 23:22:16.701 INFO Sent repair work, now wait for resp
38319 Sep 22 23:22:16.701 INFO [0] received reconcile message
38320 Sep 22 23:22:16.701 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(133), op: ExtentClose { repair_id: ReconciliationId(133), extent_id: 167 }, state: ClientData([InProgress, New, New]) }, : downstairs
38321 Sep 22 23:22:16.701 INFO [0] client ExtentClose { repair_id: ReconciliationId(133), extent_id: 167 }
38322 Sep 22 23:22:16.701 INFO [1] received reconcile message
38323 Sep 22 23:22:16.701 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(133), op: ExtentClose { repair_id: ReconciliationId(133), extent_id: 167 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38324 Sep 22 23:22:16.701 INFO [1] client ExtentClose { repair_id: ReconciliationId(133), extent_id: 167 }
38325 Sep 22 23:22:16.701 INFO [2] received reconcile message
38326 Sep 22 23:22:16.701 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(133), op: ExtentClose { repair_id: ReconciliationId(133), extent_id: 167 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38327 Sep 22 23:22:16.701 INFO [2] client ExtentClose { repair_id: ReconciliationId(133), extent_id: 167 }
38328 Sep 22 23:22:16.701 DEBG 133 Close extent 167
38329 Sep 22 23:22:16.701 DEBG 133 Close extent 167
38330 Sep 22 23:22:16.702 DEBG 133 Close extent 167
38331 Sep 22 23:22:16.702 DEBG [2] It's time to notify for 133
38332 Sep 22 23:22:16.702 INFO Completion from [2] id:133 status:true
38333 Sep 22 23:22:16.702 INFO [134/752] Repair commands completed
38334 Sep 22 23:22:16.702 INFO Pop front: ReconcileIO { id: ReconciliationId(134), op: ExtentRepair { repair_id: ReconciliationId(134), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38335 Sep 22 23:22:16.702 INFO Sent repair work, now wait for resp
38336 Sep 22 23:22:16.702 INFO [0] received reconcile message
38337 Sep 22 23:22:16.702 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(134), op: ExtentRepair { repair_id: ReconciliationId(134), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38338 Sep 22 23:22:16.702 INFO [0] client ExtentRepair { repair_id: ReconciliationId(134), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38339 Sep 22 23:22:16.702 INFO [0] Sending repair request ReconciliationId(134)
38340 Sep 22 23:22:16.702 INFO [1] received reconcile message
38341 Sep 22 23:22:16.702 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(134), op: ExtentRepair { repair_id: ReconciliationId(134), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38342 Sep 22 23:22:16.702 INFO [1] client ExtentRepair { repair_id: ReconciliationId(134), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38343 Sep 22 23:22:16.702 INFO [1] No action required ReconciliationId(134)
38344 Sep 22 23:22:16.702 INFO [2] received reconcile message
38345 Sep 22 23:22:16.702 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(134), op: ExtentRepair { repair_id: ReconciliationId(134), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38346 Sep 22 23:22:16.702 INFO [2] client ExtentRepair { repair_id: ReconciliationId(134), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38347 Sep 22 23:22:16.702 INFO [2] No action required ReconciliationId(134)
38348 Sep 22 23:22:16.703 DEBG 134 Repair extent 167 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
38349 Sep 22 23:22:16.703 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0A7.copy"
38350 Sep 22 23:22:16.766 INFO accepted connection, remote_addr: 127.0.0.1:58639, local_addr: 127.0.0.1:52864, task: repair
38351 Sep 22 23:22:16.766 TRCE incoming request, uri: /extent/167/files, method: GET, req_id: a928b88a-8d6d-4263-93d2-e7b6b06142c3, remote_addr: 127.0.0.1:58639, local_addr: 127.0.0.1:52864, task: repair
38352 Sep 22 23:22:16.766 INFO request completed, latency_us: 220, response_code: 200, uri: /extent/167/files, method: GET, req_id: a928b88a-8d6d-4263-93d2-e7b6b06142c3, remote_addr: 127.0.0.1:58639, local_addr: 127.0.0.1:52864, task: repair
38353 Sep 22 23:22:16.766 INFO eid:167 Found repair files: ["0A7", "0A7.db"]
38354 Sep 22 23:22:16.767 TRCE incoming request, uri: /newextent/167/data, method: GET, req_id: 8be497f6-c334-4224-ad8c-ce3e63dc02a0, remote_addr: 127.0.0.1:58639, local_addr: 127.0.0.1:52864, task: repair
38355 Sep 22 23:22:16.767 INFO request completed, latency_us: 315, response_code: 200, uri: /newextent/167/data, method: GET, req_id: 8be497f6-c334-4224-ad8c-ce3e63dc02a0, remote_addr: 127.0.0.1:58639, local_addr: 127.0.0.1:52864, task: repair
38356 Sep 22 23:22:16.772 TRCE incoming request, uri: /newextent/167/db, method: GET, req_id: 1619eb67-fadf-4829-9f8a-904e0bdd533e, remote_addr: 127.0.0.1:58639, local_addr: 127.0.0.1:52864, task: repair
38357 Sep 22 23:22:16.772 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/167/db, method: GET, req_id: 1619eb67-fadf-4829-9f8a-904e0bdd533e, remote_addr: 127.0.0.1:58639, local_addr: 127.0.0.1:52864, task: repair
38358 Sep 22 23:22:16.773 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0A7.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0A7.replace"
38359 Sep 22 23:22:16.773 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38360 Sep 22 23:22:16.774 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0A7.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
38361 Sep 22 23:22:16.774 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A7"
38362 Sep 22 23:22:16.774 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A7.db"
38363 Sep 22 23:22:16.774 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38364 Sep 22 23:22:16.774 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0A7.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0A7.completed"
38365 Sep 22 23:22:16.774 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38366 Sep 22 23:22:16.774 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38367 Sep 22 23:22:16.774 DEBG [0] It's time to notify for 134
38368 Sep 22 23:22:16.775 INFO Completion from [0] id:134 status:true
38369 Sep 22 23:22:16.775 INFO [135/752] Repair commands completed
38370 Sep 22 23:22:16.775 INFO Pop front: ReconcileIO { id: ReconciliationId(135), op: ExtentReopen { repair_id: ReconciliationId(135), extent_id: 167 }, state: ClientData([New, New, New]) }
38371 Sep 22 23:22:16.775 INFO Sent repair work, now wait for resp
38372 Sep 22 23:22:16.775 INFO [0] received reconcile message
38373 Sep 22 23:22:16.775 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(135), op: ExtentReopen { repair_id: ReconciliationId(135), extent_id: 167 }, state: ClientData([InProgress, New, New]) }, : downstairs
38374 Sep 22 23:22:16.775 INFO [0] client ExtentReopen { repair_id: ReconciliationId(135), extent_id: 167 }
38375 Sep 22 23:22:16.775 INFO [1] received reconcile message
38376 Sep 22 23:22:16.775 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(135), op: ExtentReopen { repair_id: ReconciliationId(135), extent_id: 167 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38377 Sep 22 23:22:16.775 INFO [1] client ExtentReopen { repair_id: ReconciliationId(135), extent_id: 167 }
38378 Sep 22 23:22:16.775 INFO [2] received reconcile message
38379 Sep 22 23:22:16.775 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(135), op: ExtentReopen { repair_id: ReconciliationId(135), extent_id: 167 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38380 Sep 22 23:22:16.775 INFO [2] client ExtentReopen { repair_id: ReconciliationId(135), extent_id: 167 }
38381 Sep 22 23:22:16.775 DEBG 135 Reopen extent 167
38382 Sep 22 23:22:16.776 DEBG 135 Reopen extent 167
38383 Sep 22 23:22:16.776 DEBG 135 Reopen extent 167
38384 Sep 22 23:22:16.777 DEBG [2] It's time to notify for 135
38385 Sep 22 23:22:16.777 INFO Completion from [2] id:135 status:true
38386 Sep 22 23:22:16.777 INFO [136/752] Repair commands completed
38387 Sep 22 23:22:16.777 INFO Pop front: ReconcileIO { id: ReconciliationId(136), op: ExtentFlush { repair_id: ReconciliationId(136), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38388 Sep 22 23:22:16.777 INFO Sent repair work, now wait for resp
38389 Sep 22 23:22:16.777 INFO [0] received reconcile message
38390 Sep 22 23:22:16.777 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(136), op: ExtentFlush { repair_id: ReconciliationId(136), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38391 Sep 22 23:22:16.777 INFO [0] client ExtentFlush { repair_id: ReconciliationId(136), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38392 Sep 22 23:22:16.777 INFO [1] received reconcile message
38393 Sep 22 23:22:16.777 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(136), op: ExtentFlush { repair_id: ReconciliationId(136), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38394 Sep 22 23:22:16.777 INFO [1] client ExtentFlush { repair_id: ReconciliationId(136), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38395 Sep 22 23:22:16.777 INFO [2] received reconcile message
38396 Sep 22 23:22:16.777 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(136), op: ExtentFlush { repair_id: ReconciliationId(136), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38397 Sep 22 23:22:16.777 INFO [2] client ExtentFlush { repair_id: ReconciliationId(136), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38398 Sep 22 23:22:16.777 DEBG 136 Flush extent 182 with f:2 g:2
38399 Sep 22 23:22:16.777 DEBG Flush just extent 182 with f:2 and g:2
38400 Sep 22 23:22:16.777 DEBG [1] It's time to notify for 136
38401 Sep 22 23:22:16.777 INFO Completion from [1] id:136 status:true
38402 Sep 22 23:22:16.777 INFO [137/752] Repair commands completed
38403 Sep 22 23:22:16.777 INFO Pop front: ReconcileIO { id: ReconciliationId(137), op: ExtentClose { repair_id: ReconciliationId(137), extent_id: 182 }, state: ClientData([New, New, New]) }
38404 Sep 22 23:22:16.777 INFO Sent repair work, now wait for resp
38405 Sep 22 23:22:16.777 INFO [0] received reconcile message
38406 Sep 22 23:22:16.777 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(137), op: ExtentClose { repair_id: ReconciliationId(137), extent_id: 182 }, state: ClientData([InProgress, New, New]) }, : downstairs
38407 Sep 22 23:22:16.777 INFO [0] client ExtentClose { repair_id: ReconciliationId(137), extent_id: 182 }
38408 Sep 22 23:22:16.778 INFO [1] received reconcile message
38409 Sep 22 23:22:16.778 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(137), op: ExtentClose { repair_id: ReconciliationId(137), extent_id: 182 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38410 Sep 22 23:22:16.778 INFO [1] client ExtentClose { repair_id: ReconciliationId(137), extent_id: 182 }
38411 Sep 22 23:22:16.778 INFO [2] received reconcile message
38412 Sep 22 23:22:16.778 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(137), op: ExtentClose { repair_id: ReconciliationId(137), extent_id: 182 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38413 Sep 22 23:22:16.778 INFO [2] client ExtentClose { repair_id: ReconciliationId(137), extent_id: 182 }
38414 Sep 22 23:22:16.778 DEBG 137 Close extent 182
38415 Sep 22 23:22:16.778 DEBG 137 Close extent 182
38416 Sep 22 23:22:16.778 DEBG 137 Close extent 182
38417 Sep 22 23:22:16.779 DEBG [2] It's time to notify for 137
38418 Sep 22 23:22:16.779 INFO Completion from [2] id:137 status:true
38419 Sep 22 23:22:16.779 INFO [138/752] Repair commands completed
38420 Sep 22 23:22:16.779 INFO Pop front: ReconcileIO { id: ReconciliationId(138), op: ExtentRepair { repair_id: ReconciliationId(138), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38421 Sep 22 23:22:16.779 INFO Sent repair work, now wait for resp
38422 Sep 22 23:22:16.779 INFO [0] received reconcile message
38423 Sep 22 23:22:16.779 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(138), op: ExtentRepair { repair_id: ReconciliationId(138), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38424 Sep 22 23:22:16.779 INFO [0] client ExtentRepair { repair_id: ReconciliationId(138), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38425 Sep 22 23:22:16.779 INFO [0] Sending repair request ReconciliationId(138)
38426 Sep 22 23:22:16.779 INFO [1] received reconcile message
38427 Sep 22 23:22:16.779 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(138), op: ExtentRepair { repair_id: ReconciliationId(138), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38428 Sep 22 23:22:16.779 INFO [1] client ExtentRepair { repair_id: ReconciliationId(138), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38429 Sep 22 23:22:16.779 INFO [1] No action required ReconciliationId(138)
38430 Sep 22 23:22:16.779 INFO [2] received reconcile message
38431 Sep 22 23:22:16.779 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(138), op: ExtentRepair { repair_id: ReconciliationId(138), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38432 Sep 22 23:22:16.779 INFO [2] client ExtentRepair { repair_id: ReconciliationId(138), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38433 Sep 22 23:22:16.779 INFO [2] No action required ReconciliationId(138)
38434 Sep 22 23:22:16.779 DEBG 138 Repair extent 182 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
38435 Sep 22 23:22:16.779 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0B6.copy"
38436 Sep 22 23:22:16.843 INFO accepted connection, remote_addr: 127.0.0.1:41659, local_addr: 127.0.0.1:52864, task: repair
38437 Sep 22 23:22:16.843 TRCE incoming request, uri: /extent/182/files, method: GET, req_id: 61175ef0-46f1-4596-8edc-fd061f1ed308, remote_addr: 127.0.0.1:41659, local_addr: 127.0.0.1:52864, task: repair
38438 Sep 22 23:22:16.844 INFO request completed, latency_us: 199, response_code: 200, uri: /extent/182/files, method: GET, req_id: 61175ef0-46f1-4596-8edc-fd061f1ed308, remote_addr: 127.0.0.1:41659, local_addr: 127.0.0.1:52864, task: repair
38439 Sep 22 23:22:16.844 INFO eid:182 Found repair files: ["0B6", "0B6.db"]
38440 Sep 22 23:22:16.844 TRCE incoming request, uri: /newextent/182/data, method: GET, req_id: aa3cf251-40e5-4122-a2e5-d277bc19b9ec, remote_addr: 127.0.0.1:41659, local_addr: 127.0.0.1:52864, task: repair
38441 Sep 22 23:22:16.844 INFO request completed, latency_us: 319, response_code: 200, uri: /newextent/182/data, method: GET, req_id: aa3cf251-40e5-4122-a2e5-d277bc19b9ec, remote_addr: 127.0.0.1:41659, local_addr: 127.0.0.1:52864, task: repair
38442 Sep 22 23:22:16.849 TRCE incoming request, uri: /newextent/182/db, method: GET, req_id: a8840280-cf29-48c8-809c-40df6e0275eb, remote_addr: 127.0.0.1:41659, local_addr: 127.0.0.1:52864, task: repair
38443 Sep 22 23:22:16.850 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/182/db, method: GET, req_id: a8840280-cf29-48c8-809c-40df6e0275eb, remote_addr: 127.0.0.1:41659, local_addr: 127.0.0.1:52864, task: repair
38444 Sep 22 23:22:16.851 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0B6.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0B6.replace"
38445 Sep 22 23:22:16.851 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38446 Sep 22 23:22:16.852 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0B6.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
38447 Sep 22 23:22:16.852 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B6"
38448 Sep 22 23:22:16.852 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B6.db"
38449 Sep 22 23:22:16.852 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38450 Sep 22 23:22:16.852 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0B6.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0B6.completed"
38451 Sep 22 23:22:16.852 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38452 Sep 22 23:22:16.852 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38453 Sep 22 23:22:16.852 DEBG [0] It's time to notify for 138
38454 Sep 22 23:22:16.852 INFO Completion from [0] id:138 status:true
38455 Sep 22 23:22:16.852 INFO [139/752] Repair commands completed
38456 Sep 22 23:22:16.852 INFO Pop front: ReconcileIO { id: ReconciliationId(139), op: ExtentReopen { repair_id: ReconciliationId(139), extent_id: 182 }, state: ClientData([New, New, New]) }
38457 Sep 22 23:22:16.852 INFO Sent repair work, now wait for resp
38458 Sep 22 23:22:16.852 INFO [0] received reconcile message
38459 Sep 22 23:22:16.853 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(139), op: ExtentReopen { repair_id: ReconciliationId(139), extent_id: 182 }, state: ClientData([InProgress, New, New]) }, : downstairs
38460 Sep 22 23:22:16.853 INFO [0] client ExtentReopen { repair_id: ReconciliationId(139), extent_id: 182 }
38461 Sep 22 23:22:16.853 INFO [1] received reconcile message
38462 Sep 22 23:22:16.853 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(139), op: ExtentReopen { repair_id: ReconciliationId(139), extent_id: 182 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38463 Sep 22 23:22:16.853 INFO [1] client ExtentReopen { repair_id: ReconciliationId(139), extent_id: 182 }
38464 Sep 22 23:22:16.853 INFO [2] received reconcile message
38465 Sep 22 23:22:16.853 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(139), op: ExtentReopen { repair_id: ReconciliationId(139), extent_id: 182 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38466 Sep 22 23:22:16.853 INFO [2] client ExtentReopen { repair_id: ReconciliationId(139), extent_id: 182 }
38467 Sep 22 23:22:16.853 DEBG 139 Reopen extent 182
38468 Sep 22 23:22:16.853 DEBG 139 Reopen extent 182
38469 Sep 22 23:22:16.854 DEBG 139 Reopen extent 182
38470 Sep 22 23:22:16.854 DEBG [2] It's time to notify for 139
38471 Sep 22 23:22:16.855 INFO Completion from [2] id:139 status:true
38472 Sep 22 23:22:16.855 INFO [140/752] Repair commands completed
38473 Sep 22 23:22:16.855 INFO Pop front: ReconcileIO { id: ReconciliationId(140), op: ExtentFlush { repair_id: ReconciliationId(140), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38474 Sep 22 23:22:16.855 INFO Sent repair work, now wait for resp
38475 Sep 22 23:22:16.855 INFO [0] received reconcile message
38476 Sep 22 23:22:16.855 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(140), op: ExtentFlush { repair_id: ReconciliationId(140), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38477 Sep 22 23:22:16.855 INFO [0] client ExtentFlush { repair_id: ReconciliationId(140), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38478 Sep 22 23:22:16.855 INFO [1] received reconcile message
38479 Sep 22 23:22:16.855 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(140), op: ExtentFlush { repair_id: ReconciliationId(140), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38480 Sep 22 23:22:16.855 INFO [1] client ExtentFlush { repair_id: ReconciliationId(140), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38481 Sep 22 23:22:16.855 INFO [2] received reconcile message
38482 Sep 22 23:22:16.855 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(140), op: ExtentFlush { repair_id: ReconciliationId(140), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38483 Sep 22 23:22:16.855 INFO [2] client ExtentFlush { repair_id: ReconciliationId(140), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38484 Sep 22 23:22:16.855 DEBG 140 Flush extent 78 with f:2 g:2
38485 Sep 22 23:22:16.855 DEBG Flush just extent 78 with f:2 and g:2
38486 Sep 22 23:22:16.855 DEBG [1] It's time to notify for 140
38487 Sep 22 23:22:16.855 INFO Completion from [1] id:140 status:true
38488 Sep 22 23:22:16.855 INFO [141/752] Repair commands completed
38489 Sep 22 23:22:16.855 INFO Pop front: ReconcileIO { id: ReconciliationId(141), op: ExtentClose { repair_id: ReconciliationId(141), extent_id: 78 }, state: ClientData([New, New, New]) }
38490 Sep 22 23:22:16.855 INFO Sent repair work, now wait for resp
38491 Sep 22 23:22:16.855 INFO [0] received reconcile message
38492 Sep 22 23:22:16.855 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(141), op: ExtentClose { repair_id: ReconciliationId(141), extent_id: 78 }, state: ClientData([InProgress, New, New]) }, : downstairs
38493 Sep 22 23:22:16.855 INFO [0] client ExtentClose { repair_id: ReconciliationId(141), extent_id: 78 }
38494 Sep 22 23:22:16.855 INFO [1] received reconcile message
38495 Sep 22 23:22:16.855 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(141), op: ExtentClose { repair_id: ReconciliationId(141), extent_id: 78 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38496 Sep 22 23:22:16.855 INFO [1] client ExtentClose { repair_id: ReconciliationId(141), extent_id: 78 }
38497 Sep 22 23:22:16.855 INFO [2] received reconcile message
38498 Sep 22 23:22:16.855 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(141), op: ExtentClose { repair_id: ReconciliationId(141), extent_id: 78 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38499 Sep 22 23:22:16.855 INFO [2] client ExtentClose { repair_id: ReconciliationId(141), extent_id: 78 }
38500 Sep 22 23:22:16.856 DEBG 141 Close extent 78
38501 Sep 22 23:22:16.856 DEBG 141 Close extent 78
38502 Sep 22 23:22:16.856 DEBG 141 Close extent 78
38503 Sep 22 23:22:16.856 DEBG [2] It's time to notify for 141
38504 Sep 22 23:22:16.857 INFO Completion from [2] id:141 status:true
38505 Sep 22 23:22:16.857 INFO [142/752] Repair commands completed
38506 Sep 22 23:22:16.857 INFO Pop front: ReconcileIO { id: ReconciliationId(142), op: ExtentRepair { repair_id: ReconciliationId(142), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38507 Sep 22 23:22:16.857 INFO Sent repair work, now wait for resp
38508 Sep 22 23:22:16.857 INFO [0] received reconcile message
38509 Sep 22 23:22:16.857 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(142), op: ExtentRepair { repair_id: ReconciliationId(142), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38510 Sep 22 23:22:16.857 INFO [0] client ExtentRepair { repair_id: ReconciliationId(142), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38511 Sep 22 23:22:16.857 INFO [0] Sending repair request ReconciliationId(142)
38512 Sep 22 23:22:16.857 INFO [1] received reconcile message
38513 Sep 22 23:22:16.857 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(142), op: ExtentRepair { repair_id: ReconciliationId(142), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38514 Sep 22 23:22:16.857 INFO [1] client ExtentRepair { repair_id: ReconciliationId(142), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38515 Sep 22 23:22:16.857 INFO [1] No action required ReconciliationId(142)
38516 Sep 22 23:22:16.857 INFO [2] received reconcile message
38517 Sep 22 23:22:16.857 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(142), op: ExtentRepair { repair_id: ReconciliationId(142), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38518 Sep 22 23:22:16.857 INFO [2] client ExtentRepair { repair_id: ReconciliationId(142), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38519 Sep 22 23:22:16.857 INFO [2] No action required ReconciliationId(142)
38520 Sep 22 23:22:16.857 DEBG 142 Repair extent 78 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
38521 Sep 22 23:22:16.857 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/04E.copy"
38522 Sep 22 23:22:16.920 INFO accepted connection, remote_addr: 127.0.0.1:34914, local_addr: 127.0.0.1:52864, task: repair
38523 Sep 22 23:22:16.920 TRCE incoming request, uri: /extent/78/files, method: GET, req_id: 7fb138ee-9655-472d-9c2b-993ae6a1816b, remote_addr: 127.0.0.1:34914, local_addr: 127.0.0.1:52864, task: repair
38524 Sep 22 23:22:16.920 INFO request completed, latency_us: 201, response_code: 200, uri: /extent/78/files, method: GET, req_id: 7fb138ee-9655-472d-9c2b-993ae6a1816b, remote_addr: 127.0.0.1:34914, local_addr: 127.0.0.1:52864, task: repair
38525 Sep 22 23:22:16.920 INFO eid:78 Found repair files: ["04E", "04E.db"]
38526 Sep 22 23:22:16.921 TRCE incoming request, uri: /newextent/78/data, method: GET, req_id: 5b404dd1-9460-4baf-a26b-15fe5306f7ab, remote_addr: 127.0.0.1:34914, local_addr: 127.0.0.1:52864, task: repair
38527 Sep 22 23:22:16.921 INFO request completed, latency_us: 302, response_code: 200, uri: /newextent/78/data, method: GET, req_id: 5b404dd1-9460-4baf-a26b-15fe5306f7ab, remote_addr: 127.0.0.1:34914, local_addr: 127.0.0.1:52864, task: repair
38528 Sep 22 23:22:16.922 DEBG [rc] retire 1074 clears [JobId(1073), JobId(1074)], : downstairs
38529 Sep 22 23:22:16.926 TRCE incoming request, uri: /newextent/78/db, method: GET, req_id: 87bb67ce-8c37-42b2-b323-377daad1fb95, remote_addr: 127.0.0.1:34914, local_addr: 127.0.0.1:52864, task: repair
38530 Sep 22 23:22:16.926 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/78/db, method: GET, req_id: 87bb67ce-8c37-42b2-b323-377daad1fb95, remote_addr: 127.0.0.1:34914, local_addr: 127.0.0.1:52864, task: repair
38531 Sep 22 23:22:16.927 DEBG up_ds_listen was notified
38532 Sep 22 23:22:16.927 DEBG up_ds_listen process 1076
38533 Sep 22 23:22:16.927 DEBG [A] ack job 1076:77, : downstairs
38534 Sep 22 23:22:16.927 DEBG up_ds_listen checked 1 jobs, back to waiting
38535 Sep 22 23:22:16.927 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/04E.copy" to "/tmp/downstairs-zrMnlo6G/00/000/04E.replace"
38536 Sep 22 23:22:16.927 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38537 Sep 22 23:22:16.928 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/04E.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
38538 Sep 22 23:22:16.929 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/04E"
38539 Sep 22 23:22:16.929 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/04E.db"
38540 Sep 22 23:22:16.929 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38541 Sep 22 23:22:16.929 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/04E.replace" to "/tmp/downstairs-zrMnlo6G/00/000/04E.completed"
38542 Sep 22 23:22:16.929 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38543 Sep 22 23:22:16.929 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38544 Sep 22 23:22:16.929 DEBG [0] It's time to notify for 142
38545 Sep 22 23:22:16.929 INFO Completion from [0] id:142 status:true
38546 Sep 22 23:22:16.929 INFO [143/752] Repair commands completed
38547 Sep 22 23:22:16.929 INFO Pop front: ReconcileIO { id: ReconciliationId(143), op: ExtentReopen { repair_id: ReconciliationId(143), extent_id: 78 }, state: ClientData([New, New, New]) }
38548 Sep 22 23:22:16.929 INFO Sent repair work, now wait for resp
38549 Sep 22 23:22:16.929 INFO [0] received reconcile message
38550 Sep 22 23:22:16.929 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(143), op: ExtentReopen { repair_id: ReconciliationId(143), extent_id: 78 }, state: ClientData([InProgress, New, New]) }, : downstairs
38551 Sep 22 23:22:16.929 INFO [0] client ExtentReopen { repair_id: ReconciliationId(143), extent_id: 78 }
38552 Sep 22 23:22:16.929 INFO [1] received reconcile message
38553 Sep 22 23:22:16.929 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(143), op: ExtentReopen { repair_id: ReconciliationId(143), extent_id: 78 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38554 Sep 22 23:22:16.929 INFO [1] client ExtentReopen { repair_id: ReconciliationId(143), extent_id: 78 }
38555 Sep 22 23:22:16.929 INFO [2] received reconcile message
38556 Sep 22 23:22:16.929 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(143), op: ExtentReopen { repair_id: ReconciliationId(143), extent_id: 78 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38557 Sep 22 23:22:16.929 INFO [2] client ExtentReopen { repair_id: ReconciliationId(143), extent_id: 78 }
38558 Sep 22 23:22:16.930 DEBG 143 Reopen extent 78
38559 Sep 22 23:22:16.930 DEBG 143 Reopen extent 78
38560 Sep 22 23:22:16.931 DEBG 143 Reopen extent 78
38561 Sep 22 23:22:16.931 DEBG [2] It's time to notify for 143
38562 Sep 22 23:22:16.931 INFO Completion from [2] id:143 status:true
38563 Sep 22 23:22:16.931 INFO [144/752] Repair commands completed
38564 Sep 22 23:22:16.931 INFO Pop front: ReconcileIO { id: ReconciliationId(144), op: ExtentFlush { repair_id: ReconciliationId(144), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38565 Sep 22 23:22:16.931 INFO Sent repair work, now wait for resp
38566 Sep 22 23:22:16.931 INFO [0] received reconcile message
38567 Sep 22 23:22:16.932 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(144), op: ExtentFlush { repair_id: ReconciliationId(144), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38568 Sep 22 23:22:16.932 INFO [0] client ExtentFlush { repair_id: ReconciliationId(144), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38569 Sep 22 23:22:16.932 INFO [1] received reconcile message
38570 Sep 22 23:22:16.932 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(144), op: ExtentFlush { repair_id: ReconciliationId(144), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38571 Sep 22 23:22:16.932 INFO [1] client ExtentFlush { repair_id: ReconciliationId(144), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38572 Sep 22 23:22:16.932 INFO [2] received reconcile message
38573 Sep 22 23:22:16.932 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(144), op: ExtentFlush { repair_id: ReconciliationId(144), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38574 Sep 22 23:22:16.932 INFO [2] client ExtentFlush { repair_id: ReconciliationId(144), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38575 Sep 22 23:22:16.932 DEBG 144 Flush extent 52 with f:2 g:2
38576 Sep 22 23:22:16.932 DEBG Flush just extent 52 with f:2 and g:2
38577 Sep 22 23:22:16.932 DEBG [1] It's time to notify for 144
38578 Sep 22 23:22:16.932 INFO Completion from [1] id:144 status:true
38579 Sep 22 23:22:16.932 INFO [145/752] Repair commands completed
38580 Sep 22 23:22:16.932 INFO Pop front: ReconcileIO { id: ReconciliationId(145), op: ExtentClose { repair_id: ReconciliationId(145), extent_id: 52 }, state: ClientData([New, New, New]) }
38581 Sep 22 23:22:16.932 INFO Sent repair work, now wait for resp
38582 Sep 22 23:22:16.932 INFO [0] received reconcile message
38583 Sep 22 23:22:16.932 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(145), op: ExtentClose { repair_id: ReconciliationId(145), extent_id: 52 }, state: ClientData([InProgress, New, New]) }, : downstairs
38584 Sep 22 23:22:16.932 INFO [0] client ExtentClose { repair_id: ReconciliationId(145), extent_id: 52 }
38585 Sep 22 23:22:16.932 INFO [1] received reconcile message
38586 Sep 22 23:22:16.932 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(145), op: ExtentClose { repair_id: ReconciliationId(145), extent_id: 52 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38587 Sep 22 23:22:16.932 INFO [1] client ExtentClose { repair_id: ReconciliationId(145), extent_id: 52 }
38588 Sep 22 23:22:16.932 INFO [2] received reconcile message
38589 Sep 22 23:22:16.932 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(145), op: ExtentClose { repair_id: ReconciliationId(145), extent_id: 52 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38590 Sep 22 23:22:16.932 INFO [2] client ExtentClose { repair_id: ReconciliationId(145), extent_id: 52 }
38591 Sep 22 23:22:16.932 DEBG 145 Close extent 52
38592 Sep 22 23:22:16.933 DEBG 145 Close extent 52
38593 Sep 22 23:22:16.933 DEBG 145 Close extent 52
38594 Sep 22 23:22:16.933 DEBG [2] It's time to notify for 145
38595 Sep 22 23:22:16.933 INFO Completion from [2] id:145 status:true
38596 Sep 22 23:22:16.933 INFO [146/752] Repair commands completed
38597 Sep 22 23:22:16.933 INFO Pop front: ReconcileIO { id: ReconciliationId(146), op: ExtentRepair { repair_id: ReconciliationId(146), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38598 Sep 22 23:22:16.933 INFO Sent repair work, now wait for resp
38599 Sep 22 23:22:16.934 INFO [0] received reconcile message
38600 Sep 22 23:22:16.934 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(146), op: ExtentRepair { repair_id: ReconciliationId(146), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38601 Sep 22 23:22:16.934 INFO [0] client ExtentRepair { repair_id: ReconciliationId(146), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38602 Sep 22 23:22:16.934 INFO [0] Sending repair request ReconciliationId(146)
38603 Sep 22 23:22:16.934 INFO [1] received reconcile message
38604 Sep 22 23:22:16.934 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(146), op: ExtentRepair { repair_id: ReconciliationId(146), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38605 Sep 22 23:22:16.934 INFO [1] client ExtentRepair { repair_id: ReconciliationId(146), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38606 Sep 22 23:22:16.934 INFO [1] No action required ReconciliationId(146)
38607 Sep 22 23:22:16.934 INFO [2] received reconcile message
38608 Sep 22 23:22:16.934 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(146), op: ExtentRepair { repair_id: ReconciliationId(146), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38609 Sep 22 23:22:16.934 INFO [2] client ExtentRepair { repair_id: ReconciliationId(146), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38610 Sep 22 23:22:16.934 INFO [2] No action required ReconciliationId(146)
38611 Sep 22 23:22:16.934 DEBG 146 Repair extent 52 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
38612 Sep 22 23:22:16.934 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/034.copy"
38613 Sep 22 23:22:16.934 DEBG IO Flush 1078 has deps [JobId(1077), JobId(1076)]
38614 Sep 22 23:22:16.937 INFO [lossy] skipping 1076
38615 Sep 22 23:22:16.937 DEBG Flush :1076 extent_limit None deps:[JobId(1075), JobId(1074)] res:true f:27 g:1
38616 Sep 22 23:22:16.943 DEBG Read :1077 deps:[JobId(1076)] res:true
38617 Sep 22 23:22:16.998 INFO accepted connection, remote_addr: 127.0.0.1:52058, local_addr: 127.0.0.1:52864, task: repair
38618 Sep 22 23:22:16.998 TRCE incoming request, uri: /extent/52/files, method: GET, req_id: 363ed5f9-5739-4d19-ac30-c7a188813cab, remote_addr: 127.0.0.1:52058, local_addr: 127.0.0.1:52864, task: repair
38619 Sep 22 23:22:16.999 INFO request completed, latency_us: 255, response_code: 200, uri: /extent/52/files, method: GET, req_id: 363ed5f9-5739-4d19-ac30-c7a188813cab, remote_addr: 127.0.0.1:52058, local_addr: 127.0.0.1:52864, task: repair
38620 Sep 22 23:22:16.999 INFO eid:52 Found repair files: ["034", "034.db"]
38621 Sep 22 23:22:16.999 TRCE incoming request, uri: /newextent/52/data, method: GET, req_id: 3c3825dd-412b-45c6-a583-45439085c71e, remote_addr: 127.0.0.1:52058, local_addr: 127.0.0.1:52864, task: repair
38622 Sep 22 23:22:17.000 INFO request completed, latency_us: 361, response_code: 200, uri: /newextent/52/data, method: GET, req_id: 3c3825dd-412b-45c6-a583-45439085c71e, remote_addr: 127.0.0.1:52058, local_addr: 127.0.0.1:52864, task: repair
38623 Sep 22 23:22:17.005 TRCE incoming request, uri: /newextent/52/db, method: GET, req_id: a87bbae6-48c7-4d09-a0e2-a8f96e5f4bb5, remote_addr: 127.0.0.1:52058, local_addr: 127.0.0.1:52864, task: repair
38624 Sep 22 23:22:17.005 INFO request completed, latency_us: 317, response_code: 200, uri: /newextent/52/db, method: GET, req_id: a87bbae6-48c7-4d09-a0e2-a8f96e5f4bb5, remote_addr: 127.0.0.1:52058, local_addr: 127.0.0.1:52864, task: repair
38625 Sep 22 23:22:17.007 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/034.copy" to "/tmp/downstairs-zrMnlo6G/00/000/034.replace"
38626 Sep 22 23:22:17.007 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38627 Sep 22 23:22:17.008 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/034.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
38628 Sep 22 23:22:17.008 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/034"
38629 Sep 22 23:22:17.008 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/034.db"
38630 Sep 22 23:22:17.008 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38631 Sep 22 23:22:17.008 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/034.replace" to "/tmp/downstairs-zrMnlo6G/00/000/034.completed"
38632 Sep 22 23:22:17.008 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38633 Sep 22 23:22:17.008 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38634 Sep 22 23:22:17.009 DEBG [0] It's time to notify for 146
38635 Sep 22 23:22:17.009 INFO Completion from [0] id:146 status:true
38636 Sep 22 23:22:17.009 INFO [147/752] Repair commands completed
38637 Sep 22 23:22:17.009 INFO Pop front: ReconcileIO { id: ReconciliationId(147), op: ExtentReopen { repair_id: ReconciliationId(147), extent_id: 52 }, state: ClientData([New, New, New]) }
38638 Sep 22 23:22:17.009 INFO Sent repair work, now wait for resp
38639 Sep 22 23:22:17.009 INFO [0] received reconcile message
38640 Sep 22 23:22:17.009 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(147), op: ExtentReopen { repair_id: ReconciliationId(147), extent_id: 52 }, state: ClientData([InProgress, New, New]) }, : downstairs
38641 Sep 22 23:22:17.009 INFO [0] client ExtentReopen { repair_id: ReconciliationId(147), extent_id: 52 }
38642 Sep 22 23:22:17.009 INFO [1] received reconcile message
38643 Sep 22 23:22:17.009 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(147), op: ExtentReopen { repair_id: ReconciliationId(147), extent_id: 52 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38644 Sep 22 23:22:17.009 INFO [1] client ExtentReopen { repair_id: ReconciliationId(147), extent_id: 52 }
38645 Sep 22 23:22:17.009 INFO [2] received reconcile message
38646 Sep 22 23:22:17.009 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(147), op: ExtentReopen { repair_id: ReconciliationId(147), extent_id: 52 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38647 Sep 22 23:22:17.009 INFO [2] client ExtentReopen { repair_id: ReconciliationId(147), extent_id: 52 }
38648 Sep 22 23:22:17.009 DEBG 147 Reopen extent 52
38649 Sep 22 23:22:17.010 DEBG 147 Reopen extent 52
38650 Sep 22 23:22:17.011 DEBG 147 Reopen extent 52
38651 Sep 22 23:22:17.011 DEBG [2] It's time to notify for 147
38652 Sep 22 23:22:17.011 INFO Completion from [2] id:147 status:true
38653 Sep 22 23:22:17.011 INFO [148/752] Repair commands completed
38654 Sep 22 23:22:17.011 INFO Pop front: ReconcileIO { id: ReconciliationId(148), op: ExtentFlush { repair_id: ReconciliationId(148), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38655 Sep 22 23:22:17.011 INFO Sent repair work, now wait for resp
38656 Sep 22 23:22:17.011 INFO [0] received reconcile message
38657 Sep 22 23:22:17.011 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(148), op: ExtentFlush { repair_id: ReconciliationId(148), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38658 Sep 22 23:22:17.011 INFO [0] client ExtentFlush { repair_id: ReconciliationId(148), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38659 Sep 22 23:22:17.011 INFO [1] received reconcile message
38660 Sep 22 23:22:17.011 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(148), op: ExtentFlush { repair_id: ReconciliationId(148), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38661 Sep 22 23:22:17.011 INFO [1] client ExtentFlush { repair_id: ReconciliationId(148), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38662 Sep 22 23:22:17.012 INFO [2] received reconcile message
38663 Sep 22 23:22:17.012 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(148), op: ExtentFlush { repair_id: ReconciliationId(148), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38664 Sep 22 23:22:17.012 INFO [2] client ExtentFlush { repair_id: ReconciliationId(148), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38665 Sep 22 23:22:17.012 DEBG 148 Flush extent 172 with f:2 g:2
38666 Sep 22 23:22:17.012 DEBG Flush just extent 172 with f:2 and g:2
38667 Sep 22 23:22:17.012 DEBG [1] It's time to notify for 148
38668 Sep 22 23:22:17.012 INFO Completion from [1] id:148 status:true
38669 Sep 22 23:22:17.012 INFO [149/752] Repair commands completed
38670 Sep 22 23:22:17.012 INFO Pop front: ReconcileIO { id: ReconciliationId(149), op: ExtentClose { repair_id: ReconciliationId(149), extent_id: 172 }, state: ClientData([New, New, New]) }
38671 Sep 22 23:22:17.012 INFO Sent repair work, now wait for resp
38672 Sep 22 23:22:17.012 INFO [0] received reconcile message
38673 Sep 22 23:22:17.012 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(149), op: ExtentClose { repair_id: ReconciliationId(149), extent_id: 172 }, state: ClientData([InProgress, New, New]) }, : downstairs
38674 Sep 22 23:22:17.012 INFO [0] client ExtentClose { repair_id: ReconciliationId(149), extent_id: 172 }
38675 Sep 22 23:22:17.012 INFO [1] received reconcile message
38676 Sep 22 23:22:17.012 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(149), op: ExtentClose { repair_id: ReconciliationId(149), extent_id: 172 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38677 Sep 22 23:22:17.012 INFO [1] client ExtentClose { repair_id: ReconciliationId(149), extent_id: 172 }
38678 Sep 22 23:22:17.012 INFO [2] received reconcile message
38679 Sep 22 23:22:17.012 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(149), op: ExtentClose { repair_id: ReconciliationId(149), extent_id: 172 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38680 Sep 22 23:22:17.012 INFO [2] client ExtentClose { repair_id: ReconciliationId(149), extent_id: 172 }
38681 Sep 22 23:22:17.012 DEBG 149 Close extent 172
38682 Sep 22 23:22:17.013 DEBG 149 Close extent 172
38683 Sep 22 23:22:17.013 DEBG 149 Close extent 172
38684 Sep 22 23:22:17.013 DEBG [2] It's time to notify for 149
38685 Sep 22 23:22:17.013 INFO Completion from [2] id:149 status:true
38686 Sep 22 23:22:17.013 INFO [150/752] Repair commands completed
38687 Sep 22 23:22:17.013 INFO Pop front: ReconcileIO { id: ReconciliationId(150), op: ExtentRepair { repair_id: ReconciliationId(150), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38688 Sep 22 23:22:17.013 INFO Sent repair work, now wait for resp
38689 Sep 22 23:22:17.013 INFO [0] received reconcile message
38690 Sep 22 23:22:17.013 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(150), op: ExtentRepair { repair_id: ReconciliationId(150), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38691 Sep 22 23:22:17.013 INFO [0] client ExtentRepair { repair_id: ReconciliationId(150), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38692 Sep 22 23:22:17.013 INFO [0] Sending repair request ReconciliationId(150)
38693 Sep 22 23:22:17.014 INFO [1] received reconcile message
38694 Sep 22 23:22:17.014 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(150), op: ExtentRepair { repair_id: ReconciliationId(150), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38695 Sep 22 23:22:17.014 INFO [1] client ExtentRepair { repair_id: ReconciliationId(150), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38696 Sep 22 23:22:17.014 INFO [1] No action required ReconciliationId(150)
38697 Sep 22 23:22:17.014 INFO [2] received reconcile message
38698 Sep 22 23:22:17.014 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(150), op: ExtentRepair { repair_id: ReconciliationId(150), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38699 Sep 22 23:22:17.014 INFO [2] client ExtentRepair { repair_id: ReconciliationId(150), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38700 Sep 22 23:22:17.014 INFO [2] No action required ReconciliationId(150)
38701 Sep 22 23:22:17.014 DEBG 150 Repair extent 172 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
38702 Sep 22 23:22:17.014 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0AC.copy"
38703 Sep 22 23:22:17.078 INFO accepted connection, remote_addr: 127.0.0.1:62113, local_addr: 127.0.0.1:52864, task: repair
38704 Sep 22 23:22:17.078 TRCE incoming request, uri: /extent/172/files, method: GET, req_id: a38ba9a9-76dc-4fb1-bf59-413d19ce07d3, remote_addr: 127.0.0.1:62113, local_addr: 127.0.0.1:52864, task: repair
38705 Sep 22 23:22:17.079 INFO request completed, latency_us: 210, response_code: 200, uri: /extent/172/files, method: GET, req_id: a38ba9a9-76dc-4fb1-bf59-413d19ce07d3, remote_addr: 127.0.0.1:62113, local_addr: 127.0.0.1:52864, task: repair
38706 Sep 22 23:22:17.079 INFO eid:172 Found repair files: ["0AC", "0AC.db"]
38707 Sep 22 23:22:17.079 TRCE incoming request, uri: /newextent/172/data, method: GET, req_id: 2e306838-2ae4-408d-98e5-75df94fc25ed, remote_addr: 127.0.0.1:62113, local_addr: 127.0.0.1:52864, task: repair
38708 Sep 22 23:22:17.080 INFO request completed, latency_us: 325, response_code: 200, uri: /newextent/172/data, method: GET, req_id: 2e306838-2ae4-408d-98e5-75df94fc25ed, remote_addr: 127.0.0.1:62113, local_addr: 127.0.0.1:52864, task: repair
38709 Sep 22 23:22:17.084 TRCE incoming request, uri: /newextent/172/db, method: GET, req_id: a6273c2d-8391-4583-b3a6-99403a1c8d7a, remote_addr: 127.0.0.1:62113, local_addr: 127.0.0.1:52864, task: repair
38710 Sep 22 23:22:17.085 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/172/db, method: GET, req_id: a6273c2d-8391-4583-b3a6-99403a1c8d7a, remote_addr: 127.0.0.1:62113, local_addr: 127.0.0.1:52864, task: repair
38711 Sep 22 23:22:17.086 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0AC.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0AC.replace"
38712 Sep 22 23:22:17.086 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38713 Sep 22 23:22:17.086 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0AC.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
38714 Sep 22 23:22:17.087 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0AC"
38715 Sep 22 23:22:17.087 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0AC.db"
38716 Sep 22 23:22:17.087 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38717 Sep 22 23:22:17.087 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0AC.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0AC.completed"
38718 Sep 22 23:22:17.087 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38719 Sep 22 23:22:17.087 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38720 Sep 22 23:22:17.087 DEBG [0] It's time to notify for 150
38721 Sep 22 23:22:17.087 INFO Completion from [0] id:150 status:true
38722 Sep 22 23:22:17.087 INFO [151/752] Repair commands completed
38723 Sep 22 23:22:17.087 INFO Pop front: ReconcileIO { id: ReconciliationId(151), op: ExtentReopen { repair_id: ReconciliationId(151), extent_id: 172 }, state: ClientData([New, New, New]) }
38724 Sep 22 23:22:17.087 INFO Sent repair work, now wait for resp
38725 Sep 22 23:22:17.087 INFO [0] received reconcile message
38726 Sep 22 23:22:17.087 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(151), op: ExtentReopen { repair_id: ReconciliationId(151), extent_id: 172 }, state: ClientData([InProgress, New, New]) }, : downstairs
38727 Sep 22 23:22:17.087 INFO [0] client ExtentReopen { repair_id: ReconciliationId(151), extent_id: 172 }
38728 Sep 22 23:22:17.087 INFO [1] received reconcile message
38729 Sep 22 23:22:17.087 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(151), op: ExtentReopen { repair_id: ReconciliationId(151), extent_id: 172 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38730 Sep 22 23:22:17.088 INFO [1] client ExtentReopen { repair_id: ReconciliationId(151), extent_id: 172 }
38731 Sep 22 23:22:17.088 INFO [2] received reconcile message
38732 Sep 22 23:22:17.088 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(151), op: ExtentReopen { repair_id: ReconciliationId(151), extent_id: 172 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38733 Sep 22 23:22:17.088 INFO [2] client ExtentReopen { repair_id: ReconciliationId(151), extent_id: 172 }
38734 Sep 22 23:22:17.088 DEBG 151 Reopen extent 172
38735 Sep 22 23:22:17.088 DEBG 151 Reopen extent 172
38736 Sep 22 23:22:17.089 DEBG 151 Reopen extent 172
38737 Sep 22 23:22:17.089 DEBG [2] It's time to notify for 151
38738 Sep 22 23:22:17.089 INFO Completion from [2] id:151 status:true
38739 Sep 22 23:22:17.089 INFO [152/752] Repair commands completed
38740 Sep 22 23:22:17.089 INFO Pop front: ReconcileIO { id: ReconciliationId(152), op: ExtentFlush { repair_id: ReconciliationId(152), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38741 Sep 22 23:22:17.090 INFO Sent repair work, now wait for resp
38742 Sep 22 23:22:17.090 INFO [0] received reconcile message
38743 Sep 22 23:22:17.090 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(152), op: ExtentFlush { repair_id: ReconciliationId(152), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38744 Sep 22 23:22:17.090 INFO [0] client ExtentFlush { repair_id: ReconciliationId(152), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38745 Sep 22 23:22:17.090 INFO [1] received reconcile message
38746 Sep 22 23:22:17.090 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(152), op: ExtentFlush { repair_id: ReconciliationId(152), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38747 Sep 22 23:22:17.090 INFO [1] client ExtentFlush { repair_id: ReconciliationId(152), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38748 Sep 22 23:22:17.090 INFO [2] received reconcile message
38749 Sep 22 23:22:17.090 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(152), op: ExtentFlush { repair_id: ReconciliationId(152), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38750 Sep 22 23:22:17.090 INFO [2] client ExtentFlush { repair_id: ReconciliationId(152), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38751 Sep 22 23:22:17.090 DEBG 152 Flush extent 183 with f:2 g:2
38752 Sep 22 23:22:17.090 DEBG Flush just extent 183 with f:2 and g:2
38753 Sep 22 23:22:17.090 DEBG [1] It's time to notify for 152
38754 Sep 22 23:22:17.090 INFO Completion from [1] id:152 status:true
38755 Sep 22 23:22:17.090 INFO [153/752] Repair commands completed
38756 Sep 22 23:22:17.090 INFO Pop front: ReconcileIO { id: ReconciliationId(153), op: ExtentClose { repair_id: ReconciliationId(153), extent_id: 183 }, state: ClientData([New, New, New]) }
38757 Sep 22 23:22:17.090 INFO Sent repair work, now wait for resp
38758 Sep 22 23:22:17.090 INFO [0] received reconcile message
38759 Sep 22 23:22:17.090 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(153), op: ExtentClose { repair_id: ReconciliationId(153), extent_id: 183 }, state: ClientData([InProgress, New, New]) }, : downstairs
38760 Sep 22 23:22:17.090 INFO [0] client ExtentClose { repair_id: ReconciliationId(153), extent_id: 183 }
38761 Sep 22 23:22:17.090 INFO [1] received reconcile message
38762 Sep 22 23:22:17.090 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(153), op: ExtentClose { repair_id: ReconciliationId(153), extent_id: 183 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38763 Sep 22 23:22:17.090 INFO [1] client ExtentClose { repair_id: ReconciliationId(153), extent_id: 183 }
38764 Sep 22 23:22:17.090 INFO [2] received reconcile message
38765 Sep 22 23:22:17.090 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(153), op: ExtentClose { repair_id: ReconciliationId(153), extent_id: 183 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38766 Sep 22 23:22:17.090 INFO [2] client ExtentClose { repair_id: ReconciliationId(153), extent_id: 183 }
38767 Sep 22 23:22:17.090 DEBG 153 Close extent 183
38768 Sep 22 23:22:17.091 DEBG 153 Close extent 183
38769 Sep 22 23:22:17.091 DEBG 153 Close extent 183
38770 Sep 22 23:22:17.091 DEBG [2] It's time to notify for 153
38771 Sep 22 23:22:17.091 INFO Completion from [2] id:153 status:true
38772 Sep 22 23:22:17.091 INFO [154/752] Repair commands completed
38773 Sep 22 23:22:17.092 INFO Pop front: ReconcileIO { id: ReconciliationId(154), op: ExtentRepair { repair_id: ReconciliationId(154), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38774 Sep 22 23:22:17.092 INFO Sent repair work, now wait for resp
38775 Sep 22 23:22:17.092 INFO [0] received reconcile message
38776 Sep 22 23:22:17.092 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(154), op: ExtentRepair { repair_id: ReconciliationId(154), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38777 Sep 22 23:22:17.092 INFO [0] client ExtentRepair { repair_id: ReconciliationId(154), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38778 Sep 22 23:22:17.092 INFO [0] Sending repair request ReconciliationId(154)
38779 Sep 22 23:22:17.092 INFO [1] received reconcile message
38780 Sep 22 23:22:17.092 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(154), op: ExtentRepair { repair_id: ReconciliationId(154), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38781 Sep 22 23:22:17.092 INFO [1] client ExtentRepair { repair_id: ReconciliationId(154), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38782 Sep 22 23:22:17.092 INFO [1] No action required ReconciliationId(154)
38783 Sep 22 23:22:17.092 INFO [2] received reconcile message
38784 Sep 22 23:22:17.092 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(154), op: ExtentRepair { repair_id: ReconciliationId(154), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38785 Sep 22 23:22:17.092 INFO [2] client ExtentRepair { repair_id: ReconciliationId(154), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38786 Sep 22 23:22:17.092 INFO [2] No action required ReconciliationId(154)
38787 Sep 22 23:22:17.092 DEBG 154 Repair extent 183 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
38788 Sep 22 23:22:17.092 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0B7.copy"
38789 Sep 22 23:22:17.156 INFO accepted connection, remote_addr: 127.0.0.1:39922, local_addr: 127.0.0.1:52864, task: repair
38790 Sep 22 23:22:17.156 TRCE incoming request, uri: /extent/183/files, method: GET, req_id: d4a4981d-c65e-4879-beff-a128439ccd9d, remote_addr: 127.0.0.1:39922, local_addr: 127.0.0.1:52864, task: repair
38791 Sep 22 23:22:17.156 INFO request completed, latency_us: 203, response_code: 200, uri: /extent/183/files, method: GET, req_id: d4a4981d-c65e-4879-beff-a128439ccd9d, remote_addr: 127.0.0.1:39922, local_addr: 127.0.0.1:52864, task: repair
38792 Sep 22 23:22:17.157 INFO eid:183 Found repair files: ["0B7", "0B7.db"]
38793 Sep 22 23:22:17.157 TRCE incoming request, uri: /newextent/183/data, method: GET, req_id: 8979f112-da5d-46c8-9341-3c3b269d12dd, remote_addr: 127.0.0.1:39922, local_addr: 127.0.0.1:52864, task: repair
38794 Sep 22 23:22:17.157 INFO request completed, latency_us: 325, response_code: 200, uri: /newextent/183/data, method: GET, req_id: 8979f112-da5d-46c8-9341-3c3b269d12dd, remote_addr: 127.0.0.1:39922, local_addr: 127.0.0.1:52864, task: repair
38795 Sep 22 23:22:17.162 TRCE incoming request, uri: /newextent/183/db, method: GET, req_id: 1c39fc90-df86-4d84-9f18-61a7b9923cfa, remote_addr: 127.0.0.1:39922, local_addr: 127.0.0.1:52864, task: repair
38796 Sep 22 23:22:17.162 INFO request completed, latency_us: 297, response_code: 200, uri: /newextent/183/db, method: GET, req_id: 1c39fc90-df86-4d84-9f18-61a7b9923cfa, remote_addr: 127.0.0.1:39922, local_addr: 127.0.0.1:52864, task: repair
38797 Sep 22 23:22:17.163 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0B7.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0B7.replace"
38798 Sep 22 23:22:17.163 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38799 Sep 22 23:22:17.164 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0B7.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
38800 Sep 22 23:22:17.164 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B7"
38801 Sep 22 23:22:17.165 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B7.db"
38802 Sep 22 23:22:17.165 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38803 Sep 22 23:22:17.165 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0B7.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0B7.completed"
38804 Sep 22 23:22:17.165 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38805 Sep 22 23:22:17.165 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38806 Sep 22 23:22:17.165 DEBG [0] It's time to notify for 154
38807 Sep 22 23:22:17.165 INFO Completion from [0] id:154 status:true
38808 Sep 22 23:22:17.165 INFO [155/752] Repair commands completed
38809 Sep 22 23:22:17.165 INFO Pop front: ReconcileIO { id: ReconciliationId(155), op: ExtentReopen { repair_id: ReconciliationId(155), extent_id: 183 }, state: ClientData([New, New, New]) }
38810 Sep 22 23:22:17.165 INFO Sent repair work, now wait for resp
38811 Sep 22 23:22:17.165 INFO [0] received reconcile message
38812 Sep 22 23:22:17.165 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(155), op: ExtentReopen { repair_id: ReconciliationId(155), extent_id: 183 }, state: ClientData([InProgress, New, New]) }, : downstairs
38813 Sep 22 23:22:17.165 INFO [0] client ExtentReopen { repair_id: ReconciliationId(155), extent_id: 183 }
38814 Sep 22 23:22:17.165 INFO [1] received reconcile message
38815 Sep 22 23:22:17.165 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(155), op: ExtentReopen { repair_id: ReconciliationId(155), extent_id: 183 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38816 Sep 22 23:22:17.165 INFO [1] client ExtentReopen { repair_id: ReconciliationId(155), extent_id: 183 }
38817 Sep 22 23:22:17.165 INFO [2] received reconcile message
38818 Sep 22 23:22:17.165 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(155), op: ExtentReopen { repair_id: ReconciliationId(155), extent_id: 183 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38819 Sep 22 23:22:17.165 INFO [2] client ExtentReopen { repair_id: ReconciliationId(155), extent_id: 183 }
38820 Sep 22 23:22:17.165 DEBG 155 Reopen extent 183
38821 Sep 22 23:22:17.166 DEBG 155 Reopen extent 183
38822 Sep 22 23:22:17.166 DEBG 155 Reopen extent 183
38823 Sep 22 23:22:17.167 DEBG [2] It's time to notify for 155
38824 Sep 22 23:22:17.167 INFO Completion from [2] id:155 status:true
38825 Sep 22 23:22:17.167 INFO [156/752] Repair commands completed
38826 Sep 22 23:22:17.167 INFO Pop front: ReconcileIO { id: ReconciliationId(156), op: ExtentFlush { repair_id: ReconciliationId(156), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38827 Sep 22 23:22:17.167 INFO Sent repair work, now wait for resp
38828 Sep 22 23:22:17.167 INFO [0] received reconcile message
38829 Sep 22 23:22:17.167 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(156), op: ExtentFlush { repair_id: ReconciliationId(156), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38830 Sep 22 23:22:17.167 INFO [0] client ExtentFlush { repair_id: ReconciliationId(156), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38831 Sep 22 23:22:17.167 INFO [1] received reconcile message
38832 Sep 22 23:22:17.167 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(156), op: ExtentFlush { repair_id: ReconciliationId(156), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38833 Sep 22 23:22:17.167 INFO [1] client ExtentFlush { repair_id: ReconciliationId(156), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38834 Sep 22 23:22:17.167 INFO [2] received reconcile message
38835 Sep 22 23:22:17.167 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(156), op: ExtentFlush { repair_id: ReconciliationId(156), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38836 Sep 22 23:22:17.167 INFO [2] client ExtentFlush { repair_id: ReconciliationId(156), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38837 Sep 22 23:22:17.168 DEBG 156 Flush extent 11 with f:2 g:2
38838 Sep 22 23:22:17.168 DEBG Flush just extent 11 with f:2 and g:2
38839 Sep 22 23:22:17.168 DEBG [1] It's time to notify for 156
38840 Sep 22 23:22:17.168 INFO Completion from [1] id:156 status:true
38841 Sep 22 23:22:17.168 INFO [157/752] Repair commands completed
38842 Sep 22 23:22:17.168 INFO Pop front: ReconcileIO { id: ReconciliationId(157), op: ExtentClose { repair_id: ReconciliationId(157), extent_id: 11 }, state: ClientData([New, New, New]) }
38843 Sep 22 23:22:17.168 INFO Sent repair work, now wait for resp
38844 Sep 22 23:22:17.168 INFO [0] received reconcile message
38845 Sep 22 23:22:17.168 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(157), op: ExtentClose { repair_id: ReconciliationId(157), extent_id: 11 }, state: ClientData([InProgress, New, New]) }, : downstairs
38846 Sep 22 23:22:17.168 INFO [0] client ExtentClose { repair_id: ReconciliationId(157), extent_id: 11 }
38847 Sep 22 23:22:17.168 INFO [1] received reconcile message
38848 Sep 22 23:22:17.168 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(157), op: ExtentClose { repair_id: ReconciliationId(157), extent_id: 11 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38849 Sep 22 23:22:17.168 INFO [1] client ExtentClose { repair_id: ReconciliationId(157), extent_id: 11 }
38850 Sep 22 23:22:17.168 INFO [2] received reconcile message
38851 Sep 22 23:22:17.168 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(157), op: ExtentClose { repair_id: ReconciliationId(157), extent_id: 11 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38852 Sep 22 23:22:17.168 INFO [2] client ExtentClose { repair_id: ReconciliationId(157), extent_id: 11 }
38853 Sep 22 23:22:17.168 DEBG 157 Close extent 11
38854 Sep 22 23:22:17.168 DEBG 157 Close extent 11
38855 Sep 22 23:22:17.169 DEBG 157 Close extent 11
38856 Sep 22 23:22:17.169 DEBG [2] It's time to notify for 157
38857 Sep 22 23:22:17.169 INFO Completion from [2] id:157 status:true
38858 Sep 22 23:22:17.169 INFO [158/752] Repair commands completed
38859 Sep 22 23:22:17.169 INFO Pop front: ReconcileIO { id: ReconciliationId(158), op: ExtentRepair { repair_id: ReconciliationId(158), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38860 Sep 22 23:22:17.169 INFO Sent repair work, now wait for resp
38861 Sep 22 23:22:17.169 INFO [0] received reconcile message
38862 Sep 22 23:22:17.169 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(158), op: ExtentRepair { repair_id: ReconciliationId(158), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38863 Sep 22 23:22:17.169 INFO [0] client ExtentRepair { repair_id: ReconciliationId(158), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38864 Sep 22 23:22:17.169 INFO [0] Sending repair request ReconciliationId(158)
38865 Sep 22 23:22:17.169 INFO [1] received reconcile message
38866 Sep 22 23:22:17.169 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(158), op: ExtentRepair { repair_id: ReconciliationId(158), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38867 Sep 22 23:22:17.169 INFO [1] client ExtentRepair { repair_id: ReconciliationId(158), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38868 Sep 22 23:22:17.169 INFO [1] No action required ReconciliationId(158)
38869 Sep 22 23:22:17.169 INFO [2] received reconcile message
38870 Sep 22 23:22:17.169 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(158), op: ExtentRepair { repair_id: ReconciliationId(158), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38871 Sep 22 23:22:17.169 INFO [2] client ExtentRepair { repair_id: ReconciliationId(158), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38872 Sep 22 23:22:17.170 INFO [2] No action required ReconciliationId(158)
38873 Sep 22 23:22:17.170 DEBG 158 Repair extent 11 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
38874 Sep 22 23:22:17.170 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/00B.copy"
38875 Sep 22 23:22:17.231 INFO accepted connection, remote_addr: 127.0.0.1:47163, local_addr: 127.0.0.1:52864, task: repair
38876 Sep 22 23:22:17.231 TRCE incoming request, uri: /extent/11/files, method: GET, req_id: 38dc2dba-2848-4846-9b18-a27b7fdc59d6, remote_addr: 127.0.0.1:47163, local_addr: 127.0.0.1:52864, task: repair
38877 Sep 22 23:22:17.232 INFO request completed, latency_us: 203, response_code: 200, uri: /extent/11/files, method: GET, req_id: 38dc2dba-2848-4846-9b18-a27b7fdc59d6, remote_addr: 127.0.0.1:47163, local_addr: 127.0.0.1:52864, task: repair
38878 Sep 22 23:22:17.232 INFO eid:11 Found repair files: ["00B", "00B.db"]
38879 Sep 22 23:22:17.232 TRCE incoming request, uri: /newextent/11/data, method: GET, req_id: 9c870808-9285-4281-9866-eb571d7336b3, remote_addr: 127.0.0.1:47163, local_addr: 127.0.0.1:52864, task: repair
38880 Sep 22 23:22:17.233 INFO request completed, latency_us: 319, response_code: 200, uri: /newextent/11/data, method: GET, req_id: 9c870808-9285-4281-9866-eb571d7336b3, remote_addr: 127.0.0.1:47163, local_addr: 127.0.0.1:52864, task: repair
38881 Sep 22 23:22:17.237 TRCE incoming request, uri: /newextent/11/db, method: GET, req_id: 5a5f4c22-37de-4257-84f1-25a808e776dd, remote_addr: 127.0.0.1:47163, local_addr: 127.0.0.1:52864, task: repair
38882 Sep 22 23:22:17.237 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/11/db, method: GET, req_id: 5a5f4c22-37de-4257-84f1-25a808e776dd, remote_addr: 127.0.0.1:47163, local_addr: 127.0.0.1:52864, task: repair
38883 Sep 22 23:22:17.239 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/00B.copy" to "/tmp/downstairs-zrMnlo6G/00/000/00B.replace"
38884 Sep 22 23:22:17.239 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38885 Sep 22 23:22:17.239 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/00B.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
38886 Sep 22 23:22:17.240 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/00B"
38887 Sep 22 23:22:17.240 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/00B.db"
38888 Sep 22 23:22:17.240 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38889 Sep 22 23:22:17.240 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/00B.replace" to "/tmp/downstairs-zrMnlo6G/00/000/00B.completed"
38890 Sep 22 23:22:17.240 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38891 Sep 22 23:22:17.240 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38892 Sep 22 23:22:17.240 DEBG [0] It's time to notify for 158
38893 Sep 22 23:22:17.240 INFO Completion from [0] id:158 status:true
38894 Sep 22 23:22:17.240 INFO [159/752] Repair commands completed
38895 Sep 22 23:22:17.240 INFO Pop front: ReconcileIO { id: ReconciliationId(159), op: ExtentReopen { repair_id: ReconciliationId(159), extent_id: 11 }, state: ClientData([New, New, New]) }
38896 Sep 22 23:22:17.240 INFO Sent repair work, now wait for resp
38897 Sep 22 23:22:17.240 INFO [0] received reconcile message
38898 Sep 22 23:22:17.240 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(159), op: ExtentReopen { repair_id: ReconciliationId(159), extent_id: 11 }, state: ClientData([InProgress, New, New]) }, : downstairs
38899 Sep 22 23:22:17.240 INFO [0] client ExtentReopen { repair_id: ReconciliationId(159), extent_id: 11 }
38900 Sep 22 23:22:17.240 INFO [1] received reconcile message
38901 Sep 22 23:22:17.240 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(159), op: ExtentReopen { repair_id: ReconciliationId(159), extent_id: 11 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38902 Sep 22 23:22:17.240 INFO [1] client ExtentReopen { repair_id: ReconciliationId(159), extent_id: 11 }
38903 Sep 22 23:22:17.241 INFO [2] received reconcile message
38904 Sep 22 23:22:17.241 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(159), op: ExtentReopen { repair_id: ReconciliationId(159), extent_id: 11 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38905 Sep 22 23:22:17.241 INFO [2] client ExtentReopen { repair_id: ReconciliationId(159), extent_id: 11 }
38906 Sep 22 23:22:17.241 DEBG 159 Reopen extent 11
38907 Sep 22 23:22:17.241 DEBG 159 Reopen extent 11
38908 Sep 22 23:22:17.242 DEBG 159 Reopen extent 11
38909 Sep 22 23:22:17.242 DEBG [2] It's time to notify for 159
38910 Sep 22 23:22:17.242 INFO Completion from [2] id:159 status:true
38911 Sep 22 23:22:17.242 INFO [160/752] Repair commands completed
38912 Sep 22 23:22:17.242 INFO Pop front: ReconcileIO { id: ReconciliationId(160), op: ExtentFlush { repair_id: ReconciliationId(160), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38913 Sep 22 23:22:17.243 INFO Sent repair work, now wait for resp
38914 Sep 22 23:22:17.243 INFO [0] received reconcile message
38915 Sep 22 23:22:17.243 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(160), op: ExtentFlush { repair_id: ReconciliationId(160), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38916 Sep 22 23:22:17.243 INFO [0] client ExtentFlush { repair_id: ReconciliationId(160), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38917 Sep 22 23:22:17.243 INFO [1] received reconcile message
38918 Sep 22 23:22:17.243 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(160), op: ExtentFlush { repair_id: ReconciliationId(160), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38919 Sep 22 23:22:17.243 INFO [1] client ExtentFlush { repair_id: ReconciliationId(160), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38920 Sep 22 23:22:17.243 INFO [2] received reconcile message
38921 Sep 22 23:22:17.243 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(160), op: ExtentFlush { repair_id: ReconciliationId(160), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38922 Sep 22 23:22:17.243 INFO [2] client ExtentFlush { repair_id: ReconciliationId(160), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38923 Sep 22 23:22:17.243 DEBG 160 Flush extent 184 with f:2 g:2
38924 Sep 22 23:22:17.243 DEBG Flush just extent 184 with f:2 and g:2
38925 Sep 22 23:22:17.243 DEBG [1] It's time to notify for 160
38926 Sep 22 23:22:17.243 INFO Completion from [1] id:160 status:true
38927 Sep 22 23:22:17.243 INFO [161/752] Repair commands completed
38928 Sep 22 23:22:17.243 INFO Pop front: ReconcileIO { id: ReconciliationId(161), op: ExtentClose { repair_id: ReconciliationId(161), extent_id: 184 }, state: ClientData([New, New, New]) }
38929 Sep 22 23:22:17.243 INFO Sent repair work, now wait for resp
38930 Sep 22 23:22:17.243 INFO [0] received reconcile message
38931 Sep 22 23:22:17.243 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(161), op: ExtentClose { repair_id: ReconciliationId(161), extent_id: 184 }, state: ClientData([InProgress, New, New]) }, : downstairs
38932 Sep 22 23:22:17.243 INFO [0] client ExtentClose { repair_id: ReconciliationId(161), extent_id: 184 }
38933 Sep 22 23:22:17.243 INFO [1] received reconcile message
38934 Sep 22 23:22:17.243 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(161), op: ExtentClose { repair_id: ReconciliationId(161), extent_id: 184 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38935 Sep 22 23:22:17.243 INFO [1] client ExtentClose { repair_id: ReconciliationId(161), extent_id: 184 }
38936 Sep 22 23:22:17.243 INFO [2] received reconcile message
38937 Sep 22 23:22:17.243 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(161), op: ExtentClose { repair_id: ReconciliationId(161), extent_id: 184 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38938 Sep 22 23:22:17.243 INFO [2] client ExtentClose { repair_id: ReconciliationId(161), extent_id: 184 }
38939 Sep 22 23:22:17.243 DEBG 161 Close extent 184
38940 Sep 22 23:22:17.244 DEBG 161 Close extent 184
38941 Sep 22 23:22:17.244 DEBG 161 Close extent 184
38942 Sep 22 23:22:17.244 DEBG [2] It's time to notify for 161
38943 Sep 22 23:22:17.244 INFO Completion from [2] id:161 status:true
38944 Sep 22 23:22:17.244 INFO [162/752] Repair commands completed
38945 Sep 22 23:22:17.244 INFO Pop front: ReconcileIO { id: ReconciliationId(162), op: ExtentRepair { repair_id: ReconciliationId(162), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38946 Sep 22 23:22:17.244 INFO Sent repair work, now wait for resp
38947 Sep 22 23:22:17.245 INFO [0] received reconcile message
38948 Sep 22 23:22:17.245 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(162), op: ExtentRepair { repair_id: ReconciliationId(162), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38949 Sep 22 23:22:17.245 INFO [0] client ExtentRepair { repair_id: ReconciliationId(162), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38950 Sep 22 23:22:17.245 INFO [0] Sending repair request ReconciliationId(162)
38951 Sep 22 23:22:17.245 INFO [1] received reconcile message
38952 Sep 22 23:22:17.245 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(162), op: ExtentRepair { repair_id: ReconciliationId(162), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38953 Sep 22 23:22:17.245 INFO [1] client ExtentRepair { repair_id: ReconciliationId(162), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38954 Sep 22 23:22:17.245 INFO [1] No action required ReconciliationId(162)
38955 Sep 22 23:22:17.245 INFO [2] received reconcile message
38956 Sep 22 23:22:17.245 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(162), op: ExtentRepair { repair_id: ReconciliationId(162), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38957 Sep 22 23:22:17.245 INFO [2] client ExtentRepair { repair_id: ReconciliationId(162), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
38958 Sep 22 23:22:17.245 INFO [2] No action required ReconciliationId(162)
38959 Sep 22 23:22:17.245 DEBG 162 Repair extent 184 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
38960 Sep 22 23:22:17.245 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0B8.copy"
38961 Sep 22 23:22:17.309 INFO accepted connection, remote_addr: 127.0.0.1:55201, local_addr: 127.0.0.1:52864, task: repair
38962 Sep 22 23:22:17.309 TRCE incoming request, uri: /extent/184/files, method: GET, req_id: 386d5a37-580e-47bd-9e21-a1f4974905fc, remote_addr: 127.0.0.1:55201, local_addr: 127.0.0.1:52864, task: repair
38963 Sep 22 23:22:17.310 INFO request completed, latency_us: 196, response_code: 200, uri: /extent/184/files, method: GET, req_id: 386d5a37-580e-47bd-9e21-a1f4974905fc, remote_addr: 127.0.0.1:55201, local_addr: 127.0.0.1:52864, task: repair
38964 Sep 22 23:22:17.310 INFO eid:184 Found repair files: ["0B8", "0B8.db"]
38965 Sep 22 23:22:17.310 TRCE incoming request, uri: /newextent/184/data, method: GET, req_id: 17616a71-7f55-46d4-bdc6-de4f76590f02, remote_addr: 127.0.0.1:55201, local_addr: 127.0.0.1:52864, task: repair
38966 Sep 22 23:22:17.310 INFO request completed, latency_us: 316, response_code: 200, uri: /newextent/184/data, method: GET, req_id: 17616a71-7f55-46d4-bdc6-de4f76590f02, remote_addr: 127.0.0.1:55201, local_addr: 127.0.0.1:52864, task: repair
38967 Sep 22 23:22:17.315 TRCE incoming request, uri: /newextent/184/db, method: GET, req_id: 7dc18af3-7c2c-40ab-877b-3cf08acc357b, remote_addr: 127.0.0.1:55201, local_addr: 127.0.0.1:52864, task: repair
38968 Sep 22 23:22:17.315 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/184/db, method: GET, req_id: 7dc18af3-7c2c-40ab-877b-3cf08acc357b, remote_addr: 127.0.0.1:55201, local_addr: 127.0.0.1:52864, task: repair
38969 Sep 22 23:22:17.317 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0B8.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0B8.replace"
38970 Sep 22 23:22:17.317 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38971 Sep 22 23:22:17.317 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0B8.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
38972 Sep 22 23:22:17.318 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B8"
38973 Sep 22 23:22:17.318 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B8.db"
38974 Sep 22 23:22:17.318 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38975 Sep 22 23:22:17.318 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0B8.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0B8.completed"
38976 Sep 22 23:22:17.318 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38977 Sep 22 23:22:17.318 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
38978 Sep 22 23:22:17.318 DEBG [0] It's time to notify for 162
38979 Sep 22 23:22:17.318 INFO Completion from [0] id:162 status:true
38980 Sep 22 23:22:17.318 INFO [163/752] Repair commands completed
38981 Sep 22 23:22:17.318 INFO Pop front: ReconcileIO { id: ReconciliationId(163), op: ExtentReopen { repair_id: ReconciliationId(163), extent_id: 184 }, state: ClientData([New, New, New]) }
38982 Sep 22 23:22:17.318 INFO Sent repair work, now wait for resp
38983 Sep 22 23:22:17.318 INFO [0] received reconcile message
38984 Sep 22 23:22:17.318 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(163), op: ExtentReopen { repair_id: ReconciliationId(163), extent_id: 184 }, state: ClientData([InProgress, New, New]) }, : downstairs
38985 Sep 22 23:22:17.318 INFO [0] client ExtentReopen { repair_id: ReconciliationId(163), extent_id: 184 }
38986 Sep 22 23:22:17.318 INFO [1] received reconcile message
38987 Sep 22 23:22:17.318 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(163), op: ExtentReopen { repair_id: ReconciliationId(163), extent_id: 184 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38988 Sep 22 23:22:17.318 INFO [1] client ExtentReopen { repair_id: ReconciliationId(163), extent_id: 184 }
38989 Sep 22 23:22:17.318 INFO [2] received reconcile message
38990 Sep 22 23:22:17.318 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(163), op: ExtentReopen { repair_id: ReconciliationId(163), extent_id: 184 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38991 Sep 22 23:22:17.318 INFO [2] client ExtentReopen { repair_id: ReconciliationId(163), extent_id: 184 }
38992 Sep 22 23:22:17.319 DEBG 163 Reopen extent 184
38993 Sep 22 23:22:17.319 DEBG 163 Reopen extent 184
38994 Sep 22 23:22:17.320 DEBG 163 Reopen extent 184
38995 Sep 22 23:22:17.320 DEBG [2] It's time to notify for 163
38996 Sep 22 23:22:17.320 INFO Completion from [2] id:163 status:true
38997 Sep 22 23:22:17.320 INFO [164/752] Repair commands completed
38998 Sep 22 23:22:17.320 INFO Pop front: ReconcileIO { id: ReconciliationId(164), op: ExtentFlush { repair_id: ReconciliationId(164), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38999 Sep 22 23:22:17.320 INFO Sent repair work, now wait for resp
39000 Sep 22 23:22:17.320 INFO [0] received reconcile message
39001 Sep 22 23:22:17.320 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(164), op: ExtentFlush { repair_id: ReconciliationId(164), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39002 Sep 22 23:22:17.320 INFO [0] client ExtentFlush { repair_id: ReconciliationId(164), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39003 Sep 22 23:22:17.320 INFO [1] received reconcile message
39004 Sep 22 23:22:17.320 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(164), op: ExtentFlush { repair_id: ReconciliationId(164), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39005 Sep 22 23:22:17.320 INFO [1] client ExtentFlush { repair_id: ReconciliationId(164), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39006 Sep 22 23:22:17.321 INFO [2] received reconcile message
39007 Sep 22 23:22:17.321 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(164), op: ExtentFlush { repair_id: ReconciliationId(164), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39008 Sep 22 23:22:17.321 INFO [2] client ExtentFlush { repair_id: ReconciliationId(164), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39009 Sep 22 23:22:17.321 DEBG 164 Flush extent 41 with f:2 g:2
39010 Sep 22 23:22:17.321 DEBG Flush just extent 41 with f:2 and g:2
39011 Sep 22 23:22:17.321 DEBG [1] It's time to notify for 164
39012 Sep 22 23:22:17.321 INFO Completion from [1] id:164 status:true
39013 Sep 22 23:22:17.321 INFO [165/752] Repair commands completed
39014 Sep 22 23:22:17.321 INFO Pop front: ReconcileIO { id: ReconciliationId(165), op: ExtentClose { repair_id: ReconciliationId(165), extent_id: 41 }, state: ClientData([New, New, New]) }
39015 Sep 22 23:22:17.321 INFO Sent repair work, now wait for resp
39016 Sep 22 23:22:17.321 INFO [0] received reconcile message
39017 Sep 22 23:22:17.321 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(165), op: ExtentClose { repair_id: ReconciliationId(165), extent_id: 41 }, state: ClientData([InProgress, New, New]) }, : downstairs
39018 Sep 22 23:22:17.321 INFO [0] client ExtentClose { repair_id: ReconciliationId(165), extent_id: 41 }
39019 Sep 22 23:22:17.321 INFO [1] received reconcile message
39020 Sep 22 23:22:17.321 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(165), op: ExtentClose { repair_id: ReconciliationId(165), extent_id: 41 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39021 Sep 22 23:22:17.321 INFO [1] client ExtentClose { repair_id: ReconciliationId(165), extent_id: 41 }
39022 Sep 22 23:22:17.321 INFO [2] received reconcile message
39023 Sep 22 23:22:17.321 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(165), op: ExtentClose { repair_id: ReconciliationId(165), extent_id: 41 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39024 Sep 22 23:22:17.321 INFO [2] client ExtentClose { repair_id: ReconciliationId(165), extent_id: 41 }
39025 Sep 22 23:22:17.321 DEBG 165 Close extent 41
39026 Sep 22 23:22:17.321 DEBG 165 Close extent 41
39027 Sep 22 23:22:17.322 DEBG 165 Close extent 41
39028 Sep 22 23:22:17.322 DEBG [2] It's time to notify for 165
39029 Sep 22 23:22:17.322 INFO Completion from [2] id:165 status:true
39030 Sep 22 23:22:17.322 INFO [166/752] Repair commands completed
39031 Sep 22 23:22:17.322 INFO Pop front: ReconcileIO { id: ReconciliationId(166), op: ExtentRepair { repair_id: ReconciliationId(166), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39032 Sep 22 23:22:17.322 INFO Sent repair work, now wait for resp
39033 Sep 22 23:22:17.322 INFO [0] received reconcile message
39034 Sep 22 23:22:17.322 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(166), op: ExtentRepair { repair_id: ReconciliationId(166), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39035 Sep 22 23:22:17.322 INFO [0] client ExtentRepair { repair_id: ReconciliationId(166), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39036 Sep 22 23:22:17.322 INFO [0] Sending repair request ReconciliationId(166)
39037 Sep 22 23:22:17.322 INFO [1] received reconcile message
39038 Sep 22 23:22:17.322 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(166), op: ExtentRepair { repair_id: ReconciliationId(166), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39039 Sep 22 23:22:17.323 INFO [1] client ExtentRepair { repair_id: ReconciliationId(166), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39040 Sep 22 23:22:17.323 INFO [1] No action required ReconciliationId(166)
39041 Sep 22 23:22:17.323 INFO [2] received reconcile message
39042 Sep 22 23:22:17.323 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(166), op: ExtentRepair { repair_id: ReconciliationId(166), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39043 Sep 22 23:22:17.323 INFO [2] client ExtentRepair { repair_id: ReconciliationId(166), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39044 Sep 22 23:22:17.323 INFO [2] No action required ReconciliationId(166)
39045 Sep 22 23:22:17.323 DEBG 166 Repair extent 41 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
39046 Sep 22 23:22:17.323 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/029.copy"
39047 Sep 22 23:22:17.350 DEBG [rc] retire 1076 clears [JobId(1075), JobId(1076)], : downstairs
39048 Sep 22 23:22:17.350 INFO [lossy] skipping 1077
39049 Sep 22 23:22:17.350 INFO [lossy] skipping 1077
39050 Sep 22 23:22:17.356 DEBG Read :1077 deps:[JobId(1076)] res:true
39051 Sep 22 23:22:17.380 DEBG Flush :1078 extent_limit None deps:[JobId(1077), JobId(1076)] res:true f:28 g:1
39052 Sep 22 23:22:17.380 INFO [lossy] sleeping 1 second
39053 Sep 22 23:22:17.386 INFO accepted connection, remote_addr: 127.0.0.1:43792, local_addr: 127.0.0.1:52864, task: repair
39054 Sep 22 23:22:17.386 TRCE incoming request, uri: /extent/41/files, method: GET, req_id: 73c43dc5-1d03-4b46-8dfa-b037d87c4dda, remote_addr: 127.0.0.1:43792, local_addr: 127.0.0.1:52864, task: repair
39055 Sep 22 23:22:17.386 INFO request completed, latency_us: 202, response_code: 200, uri: /extent/41/files, method: GET, req_id: 73c43dc5-1d03-4b46-8dfa-b037d87c4dda, remote_addr: 127.0.0.1:43792, local_addr: 127.0.0.1:52864, task: repair
39056 Sep 22 23:22:17.386 INFO eid:41 Found repair files: ["029", "029.db"]
39057 Sep 22 23:22:17.387 TRCE incoming request, uri: /newextent/41/data, method: GET, req_id: f53995b0-7e9e-4356-a4e9-a4f74c1c59b0, remote_addr: 127.0.0.1:43792, local_addr: 127.0.0.1:52864, task: repair
39058 Sep 22 23:22:17.387 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/41/data, method: GET, req_id: f53995b0-7e9e-4356-a4e9-a4f74c1c59b0, remote_addr: 127.0.0.1:43792, local_addr: 127.0.0.1:52864, task: repair
39059 Sep 22 23:22:17.392 TRCE incoming request, uri: /newextent/41/db, method: GET, req_id: e9d154ec-08fb-491f-bae0-1cbda091d4e1, remote_addr: 127.0.0.1:43792, local_addr: 127.0.0.1:52864, task: repair
39060 Sep 22 23:22:17.392 INFO request completed, latency_us: 297, response_code: 200, uri: /newextent/41/db, method: GET, req_id: e9d154ec-08fb-491f-bae0-1cbda091d4e1, remote_addr: 127.0.0.1:43792, local_addr: 127.0.0.1:52864, task: repair
39061 Sep 22 23:22:17.393 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/029.copy" to "/tmp/downstairs-zrMnlo6G/00/000/029.replace"
39062 Sep 22 23:22:17.393 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39063 Sep 22 23:22:17.394 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/029.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
39064 Sep 22 23:22:17.394 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/029"
39065 Sep 22 23:22:17.394 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/029.db"
39066 Sep 22 23:22:17.394 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39067 Sep 22 23:22:17.394 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/029.replace" to "/tmp/downstairs-zrMnlo6G/00/000/029.completed"
39068 Sep 22 23:22:17.394 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39069 Sep 22 23:22:17.394 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39070 Sep 22 23:22:17.394 DEBG [0] It's time to notify for 166
39071 Sep 22 23:22:17.395 INFO Completion from [0] id:166 status:true
39072 Sep 22 23:22:17.395 INFO [167/752] Repair commands completed
39073 Sep 22 23:22:17.395 INFO Pop front: ReconcileIO { id: ReconciliationId(167), op: ExtentReopen { repair_id: ReconciliationId(167), extent_id: 41 }, state: ClientData([New, New, New]) }
39074 Sep 22 23:22:17.395 INFO Sent repair work, now wait for resp
39075 Sep 22 23:22:17.395 INFO [0] received reconcile message
39076 Sep 22 23:22:17.395 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(167), op: ExtentReopen { repair_id: ReconciliationId(167), extent_id: 41 }, state: ClientData([InProgress, New, New]) }, : downstairs
39077 Sep 22 23:22:17.395 INFO [0] client ExtentReopen { repair_id: ReconciliationId(167), extent_id: 41 }
39078 Sep 22 23:22:17.395 INFO [1] received reconcile message
39079 Sep 22 23:22:17.395 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(167), op: ExtentReopen { repair_id: ReconciliationId(167), extent_id: 41 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39080 Sep 22 23:22:17.395 INFO [1] client ExtentReopen { repair_id: ReconciliationId(167), extent_id: 41 }
39081 Sep 22 23:22:17.395 INFO [2] received reconcile message
39082 Sep 22 23:22:17.395 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(167), op: ExtentReopen { repair_id: ReconciliationId(167), extent_id: 41 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39083 Sep 22 23:22:17.395 INFO [2] client ExtentReopen { repair_id: ReconciliationId(167), extent_id: 41 }
39084 Sep 22 23:22:17.395 DEBG 167 Reopen extent 41
39085 Sep 22 23:22:17.396 DEBG 167 Reopen extent 41
39086 Sep 22 23:22:17.396 DEBG 167 Reopen extent 41
39087 Sep 22 23:22:17.397 DEBG [2] It's time to notify for 167
39088 Sep 22 23:22:17.397 INFO Completion from [2] id:167 status:true
39089 Sep 22 23:22:17.397 INFO [168/752] Repair commands completed
39090 Sep 22 23:22:17.397 INFO Pop front: ReconcileIO { id: ReconciliationId(168), op: ExtentFlush { repair_id: ReconciliationId(168), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39091 Sep 22 23:22:17.397 INFO Sent repair work, now wait for resp
39092 Sep 22 23:22:17.397 INFO [0] received reconcile message
39093 Sep 22 23:22:17.397 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(168), op: ExtentFlush { repair_id: ReconciliationId(168), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39094 Sep 22 23:22:17.397 INFO [0] client ExtentFlush { repair_id: ReconciliationId(168), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39095 Sep 22 23:22:17.397 INFO [1] received reconcile message
39096 Sep 22 23:22:17.397 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(168), op: ExtentFlush { repair_id: ReconciliationId(168), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39097 Sep 22 23:22:17.397 INFO [1] client ExtentFlush { repair_id: ReconciliationId(168), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39098 Sep 22 23:22:17.397 INFO [2] received reconcile message
39099 Sep 22 23:22:17.397 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(168), op: ExtentFlush { repair_id: ReconciliationId(168), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39100 Sep 22 23:22:17.397 INFO [2] client ExtentFlush { repair_id: ReconciliationId(168), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39101 Sep 22 23:22:17.397 DEBG 168 Flush extent 77 with f:2 g:2
39102 Sep 22 23:22:17.397 DEBG Flush just extent 77 with f:2 and g:2
39103 Sep 22 23:22:17.397 DEBG [1] It's time to notify for 168
39104 Sep 22 23:22:17.397 INFO Completion from [1] id:168 status:true
39105 Sep 22 23:22:17.397 INFO [169/752] Repair commands completed
39106 Sep 22 23:22:17.397 INFO Pop front: ReconcileIO { id: ReconciliationId(169), op: ExtentClose { repair_id: ReconciliationId(169), extent_id: 77 }, state: ClientData([New, New, New]) }
39107 Sep 22 23:22:17.397 INFO Sent repair work, now wait for resp
39108 Sep 22 23:22:17.397 INFO [0] received reconcile message
39109 Sep 22 23:22:17.397 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(169), op: ExtentClose { repair_id: ReconciliationId(169), extent_id: 77 }, state: ClientData([InProgress, New, New]) }, : downstairs
39110 Sep 22 23:22:17.398 INFO [0] client ExtentClose { repair_id: ReconciliationId(169), extent_id: 77 }
39111 Sep 22 23:22:17.398 INFO [1] received reconcile message
39112 Sep 22 23:22:17.398 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(169), op: ExtentClose { repair_id: ReconciliationId(169), extent_id: 77 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39113 Sep 22 23:22:17.398 INFO [1] client ExtentClose { repair_id: ReconciliationId(169), extent_id: 77 }
39114 Sep 22 23:22:17.398 INFO [2] received reconcile message
39115 Sep 22 23:22:17.398 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(169), op: ExtentClose { repair_id: ReconciliationId(169), extent_id: 77 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39116 Sep 22 23:22:17.398 INFO [2] client ExtentClose { repair_id: ReconciliationId(169), extent_id: 77 }
39117 Sep 22 23:22:17.398 DEBG 169 Close extent 77
39118 Sep 22 23:22:17.398 DEBG 169 Close extent 77
39119 Sep 22 23:22:17.398 DEBG 169 Close extent 77
39120 Sep 22 23:22:17.399 DEBG [2] It's time to notify for 169
39121 Sep 22 23:22:17.399 INFO Completion from [2] id:169 status:true
39122 Sep 22 23:22:17.399 INFO [170/752] Repair commands completed
39123 Sep 22 23:22:17.399 INFO Pop front: ReconcileIO { id: ReconciliationId(170), op: ExtentRepair { repair_id: ReconciliationId(170), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39124 Sep 22 23:22:17.399 INFO Sent repair work, now wait for resp
39125 Sep 22 23:22:17.399 INFO [0] received reconcile message
39126 Sep 22 23:22:17.399 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(170), op: ExtentRepair { repair_id: ReconciliationId(170), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39127 Sep 22 23:22:17.399 INFO [0] client ExtentRepair { repair_id: ReconciliationId(170), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39128 Sep 22 23:22:17.399 INFO [0] Sending repair request ReconciliationId(170)
39129 Sep 22 23:22:17.399 INFO [1] received reconcile message
39130 Sep 22 23:22:17.399 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(170), op: ExtentRepair { repair_id: ReconciliationId(170), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39131 Sep 22 23:22:17.399 INFO [1] client ExtentRepair { repair_id: ReconciliationId(170), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39132 Sep 22 23:22:17.399 INFO [1] No action required ReconciliationId(170)
39133 Sep 22 23:22:17.399 INFO [2] received reconcile message
39134 Sep 22 23:22:17.399 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(170), op: ExtentRepair { repair_id: ReconciliationId(170), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39135 Sep 22 23:22:17.399 INFO [2] client ExtentRepair { repair_id: ReconciliationId(170), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39136 Sep 22 23:22:17.399 INFO [2] No action required ReconciliationId(170)
39137 Sep 22 23:22:17.399 DEBG 170 Repair extent 77 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
39138 Sep 22 23:22:17.399 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/04D.copy"
39139 Sep 22 23:22:17.463 INFO accepted connection, remote_addr: 127.0.0.1:53871, local_addr: 127.0.0.1:52864, task: repair
39140 Sep 22 23:22:17.464 TRCE incoming request, uri: /extent/77/files, method: GET, req_id: fccb43e6-ddcb-4895-b0a8-ead3b604cd41, remote_addr: 127.0.0.1:53871, local_addr: 127.0.0.1:52864, task: repair
39141 Sep 22 23:22:17.464 INFO request completed, latency_us: 203, response_code: 200, uri: /extent/77/files, method: GET, req_id: fccb43e6-ddcb-4895-b0a8-ead3b604cd41, remote_addr: 127.0.0.1:53871, local_addr: 127.0.0.1:52864, task: repair
39142 Sep 22 23:22:17.464 INFO eid:77 Found repair files: ["04D", "04D.db"]
39143 Sep 22 23:22:17.464 TRCE incoming request, uri: /newextent/77/data, method: GET, req_id: 8e26ad80-865f-44e4-b4e3-2d6a13dc62f8, remote_addr: 127.0.0.1:53871, local_addr: 127.0.0.1:52864, task: repair
39144 Sep 22 23:22:17.465 INFO request completed, latency_us: 307, response_code: 200, uri: /newextent/77/data, method: GET, req_id: 8e26ad80-865f-44e4-b4e3-2d6a13dc62f8, remote_addr: 127.0.0.1:53871, local_addr: 127.0.0.1:52864, task: repair
39145 Sep 22 23:22:17.469 TRCE incoming request, uri: /newextent/77/db, method: GET, req_id: 7a2b16e6-2bbf-43a0-b9c2-0f3a086191b5, remote_addr: 127.0.0.1:53871, local_addr: 127.0.0.1:52864, task: repair
39146 Sep 22 23:22:17.470 INFO request completed, latency_us: 297, response_code: 200, uri: /newextent/77/db, method: GET, req_id: 7a2b16e6-2bbf-43a0-b9c2-0f3a086191b5, remote_addr: 127.0.0.1:53871, local_addr: 127.0.0.1:52864, task: repair
39147 Sep 22 23:22:17.471 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/04D.copy" to "/tmp/downstairs-zrMnlo6G/00/000/04D.replace"
39148 Sep 22 23:22:17.471 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39149 Sep 22 23:22:17.472 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/04D.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
39150 Sep 22 23:22:17.472 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/04D"
39151 Sep 22 23:22:17.472 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/04D.db"
39152 Sep 22 23:22:17.472 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39153 Sep 22 23:22:17.472 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/04D.replace" to "/tmp/downstairs-zrMnlo6G/00/000/04D.completed"
39154 Sep 22 23:22:17.472 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39155 Sep 22 23:22:17.472 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39156 Sep 22 23:22:17.472 DEBG [0] It's time to notify for 170
39157 Sep 22 23:22:17.473 INFO Completion from [0] id:170 status:true
39158 Sep 22 23:22:17.473 INFO [171/752] Repair commands completed
39159 Sep 22 23:22:17.473 INFO Pop front: ReconcileIO { id: ReconciliationId(171), op: ExtentReopen { repair_id: ReconciliationId(171), extent_id: 77 }, state: ClientData([New, New, New]) }
39160 Sep 22 23:22:17.473 INFO Sent repair work, now wait for resp
39161 Sep 22 23:22:17.473 INFO [0] received reconcile message
39162 Sep 22 23:22:17.473 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(171), op: ExtentReopen { repair_id: ReconciliationId(171), extent_id: 77 }, state: ClientData([InProgress, New, New]) }, : downstairs
39163 Sep 22 23:22:17.473 INFO [0] client ExtentReopen { repair_id: ReconciliationId(171), extent_id: 77 }
39164 Sep 22 23:22:17.473 INFO [1] received reconcile message
39165 Sep 22 23:22:17.473 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(171), op: ExtentReopen { repair_id: ReconciliationId(171), extent_id: 77 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39166 Sep 22 23:22:17.473 INFO [1] client ExtentReopen { repair_id: ReconciliationId(171), extent_id: 77 }
39167 Sep 22 23:22:17.473 INFO [2] received reconcile message
39168 Sep 22 23:22:17.473 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(171), op: ExtentReopen { repair_id: ReconciliationId(171), extent_id: 77 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39169 Sep 22 23:22:17.473 INFO [2] client ExtentReopen { repair_id: ReconciliationId(171), extent_id: 77 }
39170 Sep 22 23:22:17.473 DEBG 171 Reopen extent 77
39171 Sep 22 23:22:17.474 DEBG 171 Reopen extent 77
39172 Sep 22 23:22:17.474 DEBG 171 Reopen extent 77
39173 Sep 22 23:22:17.475 DEBG [2] It's time to notify for 171
39174 Sep 22 23:22:17.475 INFO Completion from [2] id:171 status:true
39175 Sep 22 23:22:17.475 INFO [172/752] Repair commands completed
39176 Sep 22 23:22:17.475 INFO Pop front: ReconcileIO { id: ReconciliationId(172), op: ExtentFlush { repair_id: ReconciliationId(172), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39177 Sep 22 23:22:17.475 INFO Sent repair work, now wait for resp
39178 Sep 22 23:22:17.475 INFO [0] received reconcile message
39179 Sep 22 23:22:17.475 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(172), op: ExtentFlush { repair_id: ReconciliationId(172), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39180 Sep 22 23:22:17.475 INFO [0] client ExtentFlush { repair_id: ReconciliationId(172), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39181 Sep 22 23:22:17.475 INFO [1] received reconcile message
39182 Sep 22 23:22:17.475 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(172), op: ExtentFlush { repair_id: ReconciliationId(172), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39183 Sep 22 23:22:17.475 INFO [1] client ExtentFlush { repair_id: ReconciliationId(172), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39184 Sep 22 23:22:17.475 INFO [2] received reconcile message
39185 Sep 22 23:22:17.475 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(172), op: ExtentFlush { repair_id: ReconciliationId(172), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39186 Sep 22 23:22:17.475 INFO [2] client ExtentFlush { repair_id: ReconciliationId(172), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39187 Sep 22 23:22:17.475 DEBG 172 Flush extent 107 with f:2 g:2
39188 Sep 22 23:22:17.475 DEBG Flush just extent 107 with f:2 and g:2
39189 Sep 22 23:22:17.475 DEBG [1] It's time to notify for 172
39190 Sep 22 23:22:17.475 INFO Completion from [1] id:172 status:true
39191 Sep 22 23:22:17.475 INFO [173/752] Repair commands completed
39192 Sep 22 23:22:17.475 INFO Pop front: ReconcileIO { id: ReconciliationId(173), op: ExtentClose { repair_id: ReconciliationId(173), extent_id: 107 }, state: ClientData([New, New, New]) }
39193 Sep 22 23:22:17.475 INFO Sent repair work, now wait for resp
39194 Sep 22 23:22:17.475 INFO [0] received reconcile message
39195 Sep 22 23:22:17.476 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(173), op: ExtentClose { repair_id: ReconciliationId(173), extent_id: 107 }, state: ClientData([InProgress, New, New]) }, : downstairs
39196 Sep 22 23:22:17.476 INFO [0] client ExtentClose { repair_id: ReconciliationId(173), extent_id: 107 }
39197 Sep 22 23:22:17.476 INFO [1] received reconcile message
39198 Sep 22 23:22:17.476 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(173), op: ExtentClose { repair_id: ReconciliationId(173), extent_id: 107 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39199 Sep 22 23:22:17.476 INFO [1] client ExtentClose { repair_id: ReconciliationId(173), extent_id: 107 }
39200 Sep 22 23:22:17.476 INFO [2] received reconcile message
39201 Sep 22 23:22:17.476 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(173), op: ExtentClose { repair_id: ReconciliationId(173), extent_id: 107 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39202 Sep 22 23:22:17.476 INFO [2] client ExtentClose { repair_id: ReconciliationId(173), extent_id: 107 }
39203 Sep 22 23:22:17.476 DEBG 173 Close extent 107
39204 Sep 22 23:22:17.476 DEBG 173 Close extent 107
39205 Sep 22 23:22:17.476 DEBG 173 Close extent 107
39206 Sep 22 23:22:17.477 DEBG [2] It's time to notify for 173
39207 Sep 22 23:22:17.477 INFO Completion from [2] id:173 status:true
39208 Sep 22 23:22:17.477 INFO [174/752] Repair commands completed
39209 Sep 22 23:22:17.477 INFO Pop front: ReconcileIO { id: ReconciliationId(174), op: ExtentRepair { repair_id: ReconciliationId(174), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39210 Sep 22 23:22:17.477 INFO Sent repair work, now wait for resp
39211 Sep 22 23:22:17.477 INFO [0] received reconcile message
39212 Sep 22 23:22:17.477 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(174), op: ExtentRepair { repair_id: ReconciliationId(174), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39213 Sep 22 23:22:17.477 INFO [0] client ExtentRepair { repair_id: ReconciliationId(174), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39214 Sep 22 23:22:17.477 INFO [0] Sending repair request ReconciliationId(174)
39215 Sep 22 23:22:17.477 INFO [1] received reconcile message
39216 Sep 22 23:22:17.477 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(174), op: ExtentRepair { repair_id: ReconciliationId(174), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39217 Sep 22 23:22:17.477 INFO [1] client ExtentRepair { repair_id: ReconciliationId(174), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39218 Sep 22 23:22:17.477 INFO [1] No action required ReconciliationId(174)
39219 Sep 22 23:22:17.477 INFO [2] received reconcile message
39220 Sep 22 23:22:17.477 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(174), op: ExtentRepair { repair_id: ReconciliationId(174), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39221 Sep 22 23:22:17.477 INFO [2] client ExtentRepair { repair_id: ReconciliationId(174), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39222 Sep 22 23:22:17.477 INFO [2] No action required ReconciliationId(174)
39223 Sep 22 23:22:17.477 DEBG 174 Repair extent 107 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
39224 Sep 22 23:22:17.477 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/06B.copy"
39225 Sep 22 23:22:17.544 INFO accepted connection, remote_addr: 127.0.0.1:34959, local_addr: 127.0.0.1:52864, task: repair
39226 Sep 22 23:22:17.544 TRCE incoming request, uri: /extent/107/files, method: GET, req_id: 437d39af-0e2e-4906-869b-239637735a2e, remote_addr: 127.0.0.1:34959, local_addr: 127.0.0.1:52864, task: repair
39227 Sep 22 23:22:17.544 INFO request completed, latency_us: 280, response_code: 200, uri: /extent/107/files, method: GET, req_id: 437d39af-0e2e-4906-869b-239637735a2e, remote_addr: 127.0.0.1:34959, local_addr: 127.0.0.1:52864, task: repair
39228 Sep 22 23:22:17.545 INFO eid:107 Found repair files: ["06B", "06B.db"]
39229 Sep 22 23:22:17.545 TRCE incoming request, uri: /newextent/107/data, method: GET, req_id: 02cda503-d1e3-42a4-b6b5-4c97685a8658, remote_addr: 127.0.0.1:34959, local_addr: 127.0.0.1:52864, task: repair
39230 Sep 22 23:22:17.545 INFO request completed, latency_us: 367, response_code: 200, uri: /newextent/107/data, method: GET, req_id: 02cda503-d1e3-42a4-b6b5-4c97685a8658, remote_addr: 127.0.0.1:34959, local_addr: 127.0.0.1:52864, task: repair
39231 Sep 22 23:22:17.550 TRCE incoming request, uri: /newextent/107/db, method: GET, req_id: d687edd0-129d-497d-ab9e-21dd3002cab1, remote_addr: 127.0.0.1:34959, local_addr: 127.0.0.1:52864, task: repair
39232 Sep 22 23:22:17.551 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/107/db, method: GET, req_id: d687edd0-129d-497d-ab9e-21dd3002cab1, remote_addr: 127.0.0.1:34959, local_addr: 127.0.0.1:52864, task: repair
39233 Sep 22 23:22:17.552 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/06B.copy" to "/tmp/downstairs-zrMnlo6G/00/000/06B.replace"
39234 Sep 22 23:22:17.552 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39235 Sep 22 23:22:17.553 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/06B.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
39236 Sep 22 23:22:17.554 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/06B"
39237 Sep 22 23:22:17.554 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/06B.db"
39238 Sep 22 23:22:17.554 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39239 Sep 22 23:22:17.554 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/06B.replace" to "/tmp/downstairs-zrMnlo6G/00/000/06B.completed"
39240 Sep 22 23:22:17.554 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39241 Sep 22 23:22:17.554 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39242 Sep 22 23:22:17.554 DEBG [0] It's time to notify for 174
39243 Sep 22 23:22:17.554 INFO Completion from [0] id:174 status:true
39244 Sep 22 23:22:17.554 INFO [175/752] Repair commands completed
39245 Sep 22 23:22:17.554 INFO Pop front: ReconcileIO { id: ReconciliationId(175), op: ExtentReopen { repair_id: ReconciliationId(175), extent_id: 107 }, state: ClientData([New, New, New]) }
39246 Sep 22 23:22:17.554 INFO Sent repair work, now wait for resp
39247 Sep 22 23:22:17.554 INFO [0] received reconcile message
39248 Sep 22 23:22:17.554 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(175), op: ExtentReopen { repair_id: ReconciliationId(175), extent_id: 107 }, state: ClientData([InProgress, New, New]) }, : downstairs
39249 Sep 22 23:22:17.554 INFO [0] client ExtentReopen { repair_id: ReconciliationId(175), extent_id: 107 }
39250 Sep 22 23:22:17.554 INFO [1] received reconcile message
39251 Sep 22 23:22:17.555 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(175), op: ExtentReopen { repair_id: ReconciliationId(175), extent_id: 107 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39252 Sep 22 23:22:17.555 INFO [1] client ExtentReopen { repair_id: ReconciliationId(175), extent_id: 107 }
39253 Sep 22 23:22:17.555 INFO [2] received reconcile message
39254 Sep 22 23:22:17.555 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(175), op: ExtentReopen { repair_id: ReconciliationId(175), extent_id: 107 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39255 Sep 22 23:22:17.555 INFO [2] client ExtentReopen { repair_id: ReconciliationId(175), extent_id: 107 }
39256 Sep 22 23:22:17.555 DEBG 175 Reopen extent 107
39257 Sep 22 23:22:17.556 DEBG 175 Reopen extent 107
39258 Sep 22 23:22:17.556 DEBG 175 Reopen extent 107
39259 Sep 22 23:22:17.557 DEBG [2] It's time to notify for 175
39260 Sep 22 23:22:17.557 INFO Completion from [2] id:175 status:true
39261 Sep 22 23:22:17.557 INFO [176/752] Repair commands completed
39262 Sep 22 23:22:17.557 INFO Pop front: ReconcileIO { id: ReconciliationId(176), op: ExtentFlush { repair_id: ReconciliationId(176), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39263 Sep 22 23:22:17.557 INFO Sent repair work, now wait for resp
39264 Sep 22 23:22:17.557 INFO [0] received reconcile message
39265 Sep 22 23:22:17.557 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(176), op: ExtentFlush { repair_id: ReconciliationId(176), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39266 Sep 22 23:22:17.557 INFO [0] client ExtentFlush { repair_id: ReconciliationId(176), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39267 Sep 22 23:22:17.557 INFO [1] received reconcile message
39268 Sep 22 23:22:17.557 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(176), op: ExtentFlush { repair_id: ReconciliationId(176), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39269 Sep 22 23:22:17.557 INFO [1] client ExtentFlush { repair_id: ReconciliationId(176), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39270 Sep 22 23:22:17.557 INFO [2] received reconcile message
39271 Sep 22 23:22:17.557 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(176), op: ExtentFlush { repair_id: ReconciliationId(176), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39272 Sep 22 23:22:17.557 INFO [2] client ExtentFlush { repair_id: ReconciliationId(176), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39273 Sep 22 23:22:17.557 DEBG 176 Flush extent 64 with f:2 g:2
39274 Sep 22 23:22:17.557 DEBG Flush just extent 64 with f:2 and g:2
39275 Sep 22 23:22:17.557 DEBG [1] It's time to notify for 176
39276 Sep 22 23:22:17.557 INFO Completion from [1] id:176 status:true
39277 Sep 22 23:22:17.557 INFO [177/752] Repair commands completed
39278 Sep 22 23:22:17.557 INFO Pop front: ReconcileIO { id: ReconciliationId(177), op: ExtentClose { repair_id: ReconciliationId(177), extent_id: 64 }, state: ClientData([New, New, New]) }
39279 Sep 22 23:22:17.557 INFO Sent repair work, now wait for resp
39280 Sep 22 23:22:17.557 INFO [0] received reconcile message
39281 Sep 22 23:22:17.557 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(177), op: ExtentClose { repair_id: ReconciliationId(177), extent_id: 64 }, state: ClientData([InProgress, New, New]) }, : downstairs
39282 Sep 22 23:22:17.557 INFO [0] client ExtentClose { repair_id: ReconciliationId(177), extent_id: 64 }
39283 Sep 22 23:22:17.558 INFO [1] received reconcile message
39284 Sep 22 23:22:17.558 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(177), op: ExtentClose { repair_id: ReconciliationId(177), extent_id: 64 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39285 Sep 22 23:22:17.558 INFO [1] client ExtentClose { repair_id: ReconciliationId(177), extent_id: 64 }
39286 Sep 22 23:22:17.558 INFO [2] received reconcile message
39287 Sep 22 23:22:17.558 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(177), op: ExtentClose { repair_id: ReconciliationId(177), extent_id: 64 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39288 Sep 22 23:22:17.558 INFO [2] client ExtentClose { repair_id: ReconciliationId(177), extent_id: 64 }
39289 Sep 22 23:22:17.558 DEBG 177 Close extent 64
39290 Sep 22 23:22:17.558 DEBG 177 Close extent 64
39291 Sep 22 23:22:17.558 DEBG 177 Close extent 64
39292 Sep 22 23:22:17.559 DEBG [2] It's time to notify for 177
39293 Sep 22 23:22:17.559 INFO Completion from [2] id:177 status:true
39294 Sep 22 23:22:17.559 INFO [178/752] Repair commands completed
39295 Sep 22 23:22:17.559 INFO Pop front: ReconcileIO { id: ReconciliationId(178), op: ExtentRepair { repair_id: ReconciliationId(178), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39296 Sep 22 23:22:17.559 INFO Sent repair work, now wait for resp
39297 Sep 22 23:22:17.559 INFO [0] received reconcile message
39298 Sep 22 23:22:17.559 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(178), op: ExtentRepair { repair_id: ReconciliationId(178), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39299 Sep 22 23:22:17.559 INFO [0] client ExtentRepair { repair_id: ReconciliationId(178), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39300 Sep 22 23:22:17.559 INFO [0] Sending repair request ReconciliationId(178)
39301 Sep 22 23:22:17.559 INFO [1] received reconcile message
39302 Sep 22 23:22:17.559 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(178), op: ExtentRepair { repair_id: ReconciliationId(178), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39303 Sep 22 23:22:17.559 INFO [1] client ExtentRepair { repair_id: ReconciliationId(178), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39304 Sep 22 23:22:17.559 INFO [1] No action required ReconciliationId(178)
39305 Sep 22 23:22:17.559 INFO [2] received reconcile message
39306 Sep 22 23:22:17.559 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(178), op: ExtentRepair { repair_id: ReconciliationId(178), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39307 Sep 22 23:22:17.559 INFO [2] client ExtentRepair { repair_id: ReconciliationId(178), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39308 Sep 22 23:22:17.559 INFO [2] No action required ReconciliationId(178)
39309 Sep 22 23:22:17.559 DEBG 178 Repair extent 64 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
39310 Sep 22 23:22:17.559 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/040.copy"
39311 Sep 22 23:22:17.622 INFO accepted connection, remote_addr: 127.0.0.1:64907, local_addr: 127.0.0.1:52864, task: repair
39312 Sep 22 23:22:17.622 TRCE incoming request, uri: /extent/64/files, method: GET, req_id: 39b232b0-a0e4-4484-a42e-264c8fd1ab91, remote_addr: 127.0.0.1:64907, local_addr: 127.0.0.1:52864, task: repair
39313 Sep 22 23:22:17.622 INFO request completed, latency_us: 252, response_code: 200, uri: /extent/64/files, method: GET, req_id: 39b232b0-a0e4-4484-a42e-264c8fd1ab91, remote_addr: 127.0.0.1:64907, local_addr: 127.0.0.1:52864, task: repair
39314 Sep 22 23:22:17.623 INFO eid:64 Found repair files: ["040", "040.db"]
39315 Sep 22 23:22:17.623 TRCE incoming request, uri: /newextent/64/data, method: GET, req_id: 9f3b4fe9-4a06-42c8-9149-947d846f8b64, remote_addr: 127.0.0.1:64907, local_addr: 127.0.0.1:52864, task: repair
39316 Sep 22 23:22:17.623 INFO request completed, latency_us: 341, response_code: 200, uri: /newextent/64/data, method: GET, req_id: 9f3b4fe9-4a06-42c8-9149-947d846f8b64, remote_addr: 127.0.0.1:64907, local_addr: 127.0.0.1:52864, task: repair
39317 Sep 22 23:22:17.628 TRCE incoming request, uri: /newextent/64/db, method: GET, req_id: 67d4f62a-358c-4da8-9fbc-258047443558, remote_addr: 127.0.0.1:64907, local_addr: 127.0.0.1:52864, task: repair
39318 Sep 22 23:22:17.629 INFO request completed, latency_us: 288, response_code: 200, uri: /newextent/64/db, method: GET, req_id: 67d4f62a-358c-4da8-9fbc-258047443558, remote_addr: 127.0.0.1:64907, local_addr: 127.0.0.1:52864, task: repair
39319 Sep 22 23:22:17.630 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/040.copy" to "/tmp/downstairs-zrMnlo6G/00/000/040.replace"
39320 Sep 22 23:22:17.630 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39321 Sep 22 23:22:17.630 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/040.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
39322 Sep 22 23:22:17.631 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/040"
39323 Sep 22 23:22:17.631 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/040.db"
39324 Sep 22 23:22:17.631 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39325 Sep 22 23:22:17.631 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/040.replace" to "/tmp/downstairs-zrMnlo6G/00/000/040.completed"
39326 Sep 22 23:22:17.631 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39327 Sep 22 23:22:17.631 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39328 Sep 22 23:22:17.631 DEBG [0] It's time to notify for 178
39329 Sep 22 23:22:17.631 INFO Completion from [0] id:178 status:true
39330 Sep 22 23:22:17.631 INFO [179/752] Repair commands completed
39331 Sep 22 23:22:17.631 INFO Pop front: ReconcileIO { id: ReconciliationId(179), op: ExtentReopen { repair_id: ReconciliationId(179), extent_id: 64 }, state: ClientData([New, New, New]) }
39332 Sep 22 23:22:17.631 INFO Sent repair work, now wait for resp
39333 Sep 22 23:22:17.631 INFO [0] received reconcile message
39334 Sep 22 23:22:17.631 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(179), op: ExtentReopen { repair_id: ReconciliationId(179), extent_id: 64 }, state: ClientData([InProgress, New, New]) }, : downstairs
39335 Sep 22 23:22:17.631 INFO [0] client ExtentReopen { repair_id: ReconciliationId(179), extent_id: 64 }
39336 Sep 22 23:22:17.631 INFO [1] received reconcile message
39337 Sep 22 23:22:17.631 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(179), op: ExtentReopen { repair_id: ReconciliationId(179), extent_id: 64 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39338 Sep 22 23:22:17.631 INFO [1] client ExtentReopen { repair_id: ReconciliationId(179), extent_id: 64 }
39339 Sep 22 23:22:17.632 INFO [2] received reconcile message
39340 Sep 22 23:22:17.632 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(179), op: ExtentReopen { repair_id: ReconciliationId(179), extent_id: 64 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39341 Sep 22 23:22:17.632 INFO [2] client ExtentReopen { repair_id: ReconciliationId(179), extent_id: 64 }
39342 Sep 22 23:22:17.632 DEBG 179 Reopen extent 64
39343 Sep 22 23:22:17.632 DEBG 179 Reopen extent 64
39344 Sep 22 23:22:17.633 DEBG 179 Reopen extent 64
39345 Sep 22 23:22:17.633 DEBG [2] It's time to notify for 179
39346 Sep 22 23:22:17.634 INFO Completion from [2] id:179 status:true
39347 Sep 22 23:22:17.634 INFO [180/752] Repair commands completed
39348 Sep 22 23:22:17.634 INFO Pop front: ReconcileIO { id: ReconciliationId(180), op: ExtentFlush { repair_id: ReconciliationId(180), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39349 Sep 22 23:22:17.634 INFO Sent repair work, now wait for resp
39350 Sep 22 23:22:17.634 INFO [0] received reconcile message
39351 Sep 22 23:22:17.634 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(180), op: ExtentFlush { repair_id: ReconciliationId(180), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39352 Sep 22 23:22:17.634 INFO [0] client ExtentFlush { repair_id: ReconciliationId(180), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39353 Sep 22 23:22:17.634 INFO [1] received reconcile message
39354 Sep 22 23:22:17.634 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(180), op: ExtentFlush { repair_id: ReconciliationId(180), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39355 Sep 22 23:22:17.634 INFO [1] client ExtentFlush { repair_id: ReconciliationId(180), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39356 Sep 22 23:22:17.634 INFO [2] received reconcile message
39357 Sep 22 23:22:17.634 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(180), op: ExtentFlush { repair_id: ReconciliationId(180), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39358 Sep 22 23:22:17.634 INFO [2] client ExtentFlush { repair_id: ReconciliationId(180), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39359 Sep 22 23:22:17.634 DEBG 180 Flush extent 171 with f:2 g:2
39360 Sep 22 23:22:17.634 DEBG Flush just extent 171 with f:2 and g:2
39361 Sep 22 23:22:17.634 DEBG [1] It's time to notify for 180
39362 Sep 22 23:22:17.634 INFO Completion from [1] id:180 status:true
39363 Sep 22 23:22:17.634 INFO [181/752] Repair commands completed
39364 Sep 22 23:22:17.634 INFO Pop front: ReconcileIO { id: ReconciliationId(181), op: ExtentClose { repair_id: ReconciliationId(181), extent_id: 171 }, state: ClientData([New, New, New]) }
39365 Sep 22 23:22:17.634 INFO Sent repair work, now wait for resp
39366 Sep 22 23:22:17.634 INFO [0] received reconcile message
39367 Sep 22 23:22:17.634 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(181), op: ExtentClose { repair_id: ReconciliationId(181), extent_id: 171 }, state: ClientData([InProgress, New, New]) }, : downstairs
39368 Sep 22 23:22:17.634 INFO [0] client ExtentClose { repair_id: ReconciliationId(181), extent_id: 171 }
39369 Sep 22 23:22:17.634 INFO [1] received reconcile message
39370 Sep 22 23:22:17.634 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(181), op: ExtentClose { repair_id: ReconciliationId(181), extent_id: 171 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39371 Sep 22 23:22:17.634 INFO [1] client ExtentClose { repair_id: ReconciliationId(181), extent_id: 171 }
39372 Sep 22 23:22:17.634 INFO [2] received reconcile message
39373 Sep 22 23:22:17.634 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(181), op: ExtentClose { repair_id: ReconciliationId(181), extent_id: 171 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39374 Sep 22 23:22:17.634 INFO [2] client ExtentClose { repair_id: ReconciliationId(181), extent_id: 171 }
39375 Sep 22 23:22:17.635 DEBG 181 Close extent 171
39376 Sep 22 23:22:17.635 DEBG 181 Close extent 171
39377 Sep 22 23:22:17.635 DEBG 181 Close extent 171
39378 Sep 22 23:22:17.635 DEBG [2] It's time to notify for 181
39379 Sep 22 23:22:17.636 INFO Completion from [2] id:181 status:true
39380 Sep 22 23:22:17.636 INFO [182/752] Repair commands completed
39381 Sep 22 23:22:17.636 INFO Pop front: ReconcileIO { id: ReconciliationId(182), op: ExtentRepair { repair_id: ReconciliationId(182), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39382 Sep 22 23:22:17.636 INFO Sent repair work, now wait for resp
39383 Sep 22 23:22:17.636 INFO [0] received reconcile message
39384 Sep 22 23:22:17.636 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(182), op: ExtentRepair { repair_id: ReconciliationId(182), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39385 Sep 22 23:22:17.636 INFO [0] client ExtentRepair { repair_id: ReconciliationId(182), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39386 Sep 22 23:22:17.636 INFO [0] Sending repair request ReconciliationId(182)
39387 Sep 22 23:22:17.636 INFO [1] received reconcile message
39388 Sep 22 23:22:17.636 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(182), op: ExtentRepair { repair_id: ReconciliationId(182), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39389 Sep 22 23:22:17.636 INFO [1] client ExtentRepair { repair_id: ReconciliationId(182), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39390 Sep 22 23:22:17.636 INFO [1] No action required ReconciliationId(182)
39391 Sep 22 23:22:17.636 INFO [2] received reconcile message
39392 Sep 22 23:22:17.636 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(182), op: ExtentRepair { repair_id: ReconciliationId(182), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39393 Sep 22 23:22:17.636 INFO [2] client ExtentRepair { repair_id: ReconciliationId(182), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39394 Sep 22 23:22:17.636 INFO [2] No action required ReconciliationId(182)
39395 Sep 22 23:22:17.636 DEBG 182 Repair extent 171 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
39396 Sep 22 23:22:17.636 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0AB.copy"
39397 Sep 22 23:22:17.697 INFO accepted connection, remote_addr: 127.0.0.1:33985, local_addr: 127.0.0.1:52864, task: repair
39398 Sep 22 23:22:17.698 TRCE incoming request, uri: /extent/171/files, method: GET, req_id: ece9d917-df70-4a82-b1a0-00cb79bd2cde, remote_addr: 127.0.0.1:33985, local_addr: 127.0.0.1:52864, task: repair
39399 Sep 22 23:22:17.698 INFO request completed, latency_us: 205, response_code: 200, uri: /extent/171/files, method: GET, req_id: ece9d917-df70-4a82-b1a0-00cb79bd2cde, remote_addr: 127.0.0.1:33985, local_addr: 127.0.0.1:52864, task: repair
39400 Sep 22 23:22:17.698 INFO eid:171 Found repair files: ["0AB", "0AB.db"]
39401 Sep 22 23:22:17.698 TRCE incoming request, uri: /newextent/171/data, method: GET, req_id: d7789424-3ba1-4ef2-93e2-d1c874159df6, remote_addr: 127.0.0.1:33985, local_addr: 127.0.0.1:52864, task: repair
39402 Sep 22 23:22:17.699 INFO request completed, latency_us: 290, response_code: 200, uri: /newextent/171/data, method: GET, req_id: d7789424-3ba1-4ef2-93e2-d1c874159df6, remote_addr: 127.0.0.1:33985, local_addr: 127.0.0.1:52864, task: repair
39403 Sep 22 23:22:17.704 TRCE incoming request, uri: /newextent/171/db, method: GET, req_id: 8807af28-ee9c-42c4-9ffe-3e41f34a06d2, remote_addr: 127.0.0.1:33985, local_addr: 127.0.0.1:52864, task: repair
39404 Sep 22 23:22:17.704 INFO request completed, latency_us: 289, response_code: 200, uri: /newextent/171/db, method: GET, req_id: 8807af28-ee9c-42c4-9ffe-3e41f34a06d2, remote_addr: 127.0.0.1:33985, local_addr: 127.0.0.1:52864, task: repair
39405 Sep 22 23:22:17.705 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0AB.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0AB.replace"
39406 Sep 22 23:22:17.705 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39407 Sep 22 23:22:17.706 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0AB.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
39408 Sep 22 23:22:17.706 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0AB"
39409 Sep 22 23:22:17.706 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0AB.db"
39410 Sep 22 23:22:17.706 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39411 Sep 22 23:22:17.706 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0AB.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0AB.completed"
39412 Sep 22 23:22:17.706 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39413 Sep 22 23:22:17.706 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39414 Sep 22 23:22:17.706 DEBG [0] It's time to notify for 182
39415 Sep 22 23:22:17.707 INFO Completion from [0] id:182 status:true
39416 Sep 22 23:22:17.707 INFO [183/752] Repair commands completed
39417 Sep 22 23:22:17.707 INFO Pop front: ReconcileIO { id: ReconciliationId(183), op: ExtentReopen { repair_id: ReconciliationId(183), extent_id: 171 }, state: ClientData([New, New, New]) }
39418 Sep 22 23:22:17.707 INFO Sent repair work, now wait for resp
39419 Sep 22 23:22:17.707 INFO [0] received reconcile message
39420 Sep 22 23:22:17.707 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(183), op: ExtentReopen { repair_id: ReconciliationId(183), extent_id: 171 }, state: ClientData([InProgress, New, New]) }, : downstairs
39421 Sep 22 23:22:17.707 INFO [0] client ExtentReopen { repair_id: ReconciliationId(183), extent_id: 171 }
39422 Sep 22 23:22:17.707 INFO [1] received reconcile message
39423 Sep 22 23:22:17.707 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(183), op: ExtentReopen { repair_id: ReconciliationId(183), extent_id: 171 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39424 Sep 22 23:22:17.707 INFO [1] client ExtentReopen { repair_id: ReconciliationId(183), extent_id: 171 }
39425 Sep 22 23:22:17.707 INFO [2] received reconcile message
39426 Sep 22 23:22:17.707 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(183), op: ExtentReopen { repair_id: ReconciliationId(183), extent_id: 171 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39427 Sep 22 23:22:17.707 INFO [2] client ExtentReopen { repair_id: ReconciliationId(183), extent_id: 171 }
39428 Sep 22 23:22:17.707 DEBG 183 Reopen extent 171
39429 Sep 22 23:22:17.708 DEBG 183 Reopen extent 171
39430 Sep 22 23:22:17.708 DEBG 183 Reopen extent 171
39431 Sep 22 23:22:17.709 DEBG [2] It's time to notify for 183
39432 Sep 22 23:22:17.709 INFO Completion from [2] id:183 status:true
39433 Sep 22 23:22:17.709 INFO [184/752] Repair commands completed
39434 Sep 22 23:22:17.709 INFO Pop front: ReconcileIO { id: ReconciliationId(184), op: ExtentFlush { repair_id: ReconciliationId(184), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39435 Sep 22 23:22:17.709 INFO Sent repair work, now wait for resp
39436 Sep 22 23:22:17.709 INFO [0] received reconcile message
39437 Sep 22 23:22:17.709 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(184), op: ExtentFlush { repair_id: ReconciliationId(184), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39438 Sep 22 23:22:17.709 INFO [0] client ExtentFlush { repair_id: ReconciliationId(184), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39439 Sep 22 23:22:17.709 INFO [1] received reconcile message
39440 Sep 22 23:22:17.709 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(184), op: ExtentFlush { repair_id: ReconciliationId(184), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39441 Sep 22 23:22:17.709 INFO [1] client ExtentFlush { repair_id: ReconciliationId(184), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39442 Sep 22 23:22:17.709 INFO [2] received reconcile message
39443 Sep 22 23:22:17.709 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(184), op: ExtentFlush { repair_id: ReconciliationId(184), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39444 Sep 22 23:22:17.709 INFO [2] client ExtentFlush { repair_id: ReconciliationId(184), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39445 Sep 22 23:22:17.709 DEBG 184 Flush extent 165 with f:2 g:2
39446 Sep 22 23:22:17.709 DEBG Flush just extent 165 with f:2 and g:2
39447 Sep 22 23:22:17.709 DEBG [1] It's time to notify for 184
39448 Sep 22 23:22:17.709 INFO Completion from [1] id:184 status:true
39449 Sep 22 23:22:17.709 INFO [185/752] Repair commands completed
39450 Sep 22 23:22:17.709 INFO Pop front: ReconcileIO { id: ReconciliationId(185), op: ExtentClose { repair_id: ReconciliationId(185), extent_id: 165 }, state: ClientData([New, New, New]) }
39451 Sep 22 23:22:17.709 INFO Sent repair work, now wait for resp
39452 Sep 22 23:22:17.709 INFO [0] received reconcile message
39453 Sep 22 23:22:17.709 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(185), op: ExtentClose { repair_id: ReconciliationId(185), extent_id: 165 }, state: ClientData([InProgress, New, New]) }, : downstairs
39454 Sep 22 23:22:17.709 INFO [0] client ExtentClose { repair_id: ReconciliationId(185), extent_id: 165 }
39455 Sep 22 23:22:17.709 INFO [1] received reconcile message
39456 Sep 22 23:22:17.709 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(185), op: ExtentClose { repair_id: ReconciliationId(185), extent_id: 165 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39457 Sep 22 23:22:17.709 INFO [1] client ExtentClose { repair_id: ReconciliationId(185), extent_id: 165 }
39458 Sep 22 23:22:17.709 INFO [2] received reconcile message
39459 Sep 22 23:22:17.710 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(185), op: ExtentClose { repair_id: ReconciliationId(185), extent_id: 165 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39460 Sep 22 23:22:17.710 INFO [2] client ExtentClose { repair_id: ReconciliationId(185), extent_id: 165 }
39461 Sep 22 23:22:17.710 DEBG 185 Close extent 165
39462 Sep 22 23:22:17.710 DEBG 185 Close extent 165
39463 Sep 22 23:22:17.710 DEBG 185 Close extent 165
39464 Sep 22 23:22:17.711 DEBG [2] It's time to notify for 185
39465 Sep 22 23:22:17.711 INFO Completion from [2] id:185 status:true
39466 Sep 22 23:22:17.711 INFO [186/752] Repair commands completed
39467 Sep 22 23:22:17.711 INFO Pop front: ReconcileIO { id: ReconciliationId(186), op: ExtentRepair { repair_id: ReconciliationId(186), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39468 Sep 22 23:22:17.711 INFO Sent repair work, now wait for resp
39469 Sep 22 23:22:17.711 INFO [0] received reconcile message
39470 Sep 22 23:22:17.711 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(186), op: ExtentRepair { repair_id: ReconciliationId(186), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39471 Sep 22 23:22:17.711 INFO [0] client ExtentRepair { repair_id: ReconciliationId(186), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39472 Sep 22 23:22:17.711 INFO [0] Sending repair request ReconciliationId(186)
39473 Sep 22 23:22:17.711 INFO [1] received reconcile message
39474 Sep 22 23:22:17.711 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(186), op: ExtentRepair { repair_id: ReconciliationId(186), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39475 Sep 22 23:22:17.711 INFO [1] client ExtentRepair { repair_id: ReconciliationId(186), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39476 Sep 22 23:22:17.711 INFO [1] No action required ReconciliationId(186)
39477 Sep 22 23:22:17.711 INFO [2] received reconcile message
39478 Sep 22 23:22:17.711 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(186), op: ExtentRepair { repair_id: ReconciliationId(186), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39479 Sep 22 23:22:17.711 INFO [2] client ExtentRepair { repair_id: ReconciliationId(186), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39480 Sep 22 23:22:17.711 INFO [2] No action required ReconciliationId(186)
39481 Sep 22 23:22:17.711 DEBG 186 Repair extent 165 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
39482 Sep 22 23:22:17.711 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0A5.copy"
39483 Sep 22 23:22:17.759 DEBG [2] Read AckReady 1077, : downstairs
39484 Sep 22 23:22:17.760 DEBG up_ds_listen was notified
39485 Sep 22 23:22:17.760 DEBG up_ds_listen process 1077
39486 Sep 22 23:22:17.760 DEBG [A] ack job 1077:78, : downstairs
39487 Sep 22 23:22:17.775 INFO accepted connection, remote_addr: 127.0.0.1:61309, local_addr: 127.0.0.1:52864, task: repair
39488 Sep 22 23:22:17.776 TRCE incoming request, uri: /extent/165/files, method: GET, req_id: 52ea5135-a5df-4d57-9ef4-d39aa05c4dfe, remote_addr: 127.0.0.1:61309, local_addr: 127.0.0.1:52864, task: repair
39489 Sep 22 23:22:17.776 INFO request completed, latency_us: 193, response_code: 200, uri: /extent/165/files, method: GET, req_id: 52ea5135-a5df-4d57-9ef4-d39aa05c4dfe, remote_addr: 127.0.0.1:61309, local_addr: 127.0.0.1:52864, task: repair
39490 Sep 22 23:22:17.776 INFO eid:165 Found repair files: ["0A5", "0A5.db"]
39491 Sep 22 23:22:17.776 TRCE incoming request, uri: /newextent/165/data, method: GET, req_id: 2c1d08f6-50fb-4c17-b32e-7c94d4562d1e, remote_addr: 127.0.0.1:61309, local_addr: 127.0.0.1:52864, task: repair
39492 Sep 22 23:22:17.777 INFO request completed, latency_us: 309, response_code: 200, uri: /newextent/165/data, method: GET, req_id: 2c1d08f6-50fb-4c17-b32e-7c94d4562d1e, remote_addr: 127.0.0.1:61309, local_addr: 127.0.0.1:52864, task: repair
39493 Sep 22 23:22:17.782 TRCE incoming request, uri: /newextent/165/db, method: GET, req_id: 42d18f09-19f9-49ac-b6b9-96897066a57b, remote_addr: 127.0.0.1:61309, local_addr: 127.0.0.1:52864, task: repair
39494 Sep 22 23:22:17.782 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/165/db, method: GET, req_id: 42d18f09-19f9-49ac-b6b9-96897066a57b, remote_addr: 127.0.0.1:61309, local_addr: 127.0.0.1:52864, task: repair
39495 Sep 22 23:22:17.783 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0A5.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0A5.replace"
39496 Sep 22 23:22:17.783 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39497 Sep 22 23:22:17.784 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0A5.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
39498 Sep 22 23:22:17.784 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A5"
39499 Sep 22 23:22:17.784 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A5.db"
39500 Sep 22 23:22:17.784 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39501 Sep 22 23:22:17.784 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0A5.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0A5.completed"
39502 Sep 22 23:22:17.784 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39503 Sep 22 23:22:17.784 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39504 Sep 22 23:22:17.785 DEBG [0] It's time to notify for 186
39505 Sep 22 23:22:17.785 INFO Completion from [0] id:186 status:true
39506 Sep 22 23:22:17.785 INFO [187/752] Repair commands completed
39507 Sep 22 23:22:17.785 INFO Pop front: ReconcileIO { id: ReconciliationId(187), op: ExtentReopen { repair_id: ReconciliationId(187), extent_id: 165 }, state: ClientData([New, New, New]) }
39508 Sep 22 23:22:17.785 INFO Sent repair work, now wait for resp
39509 Sep 22 23:22:17.785 INFO [0] received reconcile message
39510 Sep 22 23:22:17.785 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(187), op: ExtentReopen { repair_id: ReconciliationId(187), extent_id: 165 }, state: ClientData([InProgress, New, New]) }, : downstairs
39511 Sep 22 23:22:17.785 INFO [0] client ExtentReopen { repair_id: ReconciliationId(187), extent_id: 165 }
39512 Sep 22 23:22:17.785 INFO [1] received reconcile message
39513 Sep 22 23:22:17.785 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(187), op: ExtentReopen { repair_id: ReconciliationId(187), extent_id: 165 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39514 Sep 22 23:22:17.785 INFO [1] client ExtentReopen { repair_id: ReconciliationId(187), extent_id: 165 }
39515 Sep 22 23:22:17.785 INFO [2] received reconcile message
39516 Sep 22 23:22:17.785 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(187), op: ExtentReopen { repair_id: ReconciliationId(187), extent_id: 165 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39517 Sep 22 23:22:17.785 INFO [2] client ExtentReopen { repair_id: ReconciliationId(187), extent_id: 165 }
39518 Sep 22 23:22:17.785 DEBG 187 Reopen extent 165
39519 Sep 22 23:22:17.786 DEBG 187 Reopen extent 165
39520 Sep 22 23:22:17.786 DEBG 187 Reopen extent 165
39521 Sep 22 23:22:17.787 DEBG [2] It's time to notify for 187
39522 Sep 22 23:22:17.787 INFO Completion from [2] id:187 status:true
39523 Sep 22 23:22:17.787 INFO [188/752] Repair commands completed
39524 Sep 22 23:22:17.787 INFO Pop front: ReconcileIO { id: ReconciliationId(188), op: ExtentFlush { repair_id: ReconciliationId(188), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39525 Sep 22 23:22:17.787 INFO Sent repair work, now wait for resp
39526 Sep 22 23:22:17.787 INFO [0] received reconcile message
39527 Sep 22 23:22:17.787 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(188), op: ExtentFlush { repair_id: ReconciliationId(188), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39528 Sep 22 23:22:17.787 INFO [0] client ExtentFlush { repair_id: ReconciliationId(188), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39529 Sep 22 23:22:17.787 INFO [1] received reconcile message
39530 Sep 22 23:22:17.787 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(188), op: ExtentFlush { repair_id: ReconciliationId(188), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39531 Sep 22 23:22:17.787 INFO [1] client ExtentFlush { repair_id: ReconciliationId(188), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39532 Sep 22 23:22:17.787 INFO [2] received reconcile message
39533 Sep 22 23:22:17.787 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(188), op: ExtentFlush { repair_id: ReconciliationId(188), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39534 Sep 22 23:22:17.787 INFO [2] client ExtentFlush { repair_id: ReconciliationId(188), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39535 Sep 22 23:22:17.787 DEBG 188 Flush extent 118 with f:2 g:2
39536 Sep 22 23:22:17.787 DEBG Flush just extent 118 with f:2 and g:2
39537 Sep 22 23:22:17.787 DEBG [1] It's time to notify for 188
39538 Sep 22 23:22:17.787 INFO Completion from [1] id:188 status:true
39539 Sep 22 23:22:17.787 INFO [189/752] Repair commands completed
39540 Sep 22 23:22:17.787 INFO Pop front: ReconcileIO { id: ReconciliationId(189), op: ExtentClose { repair_id: ReconciliationId(189), extent_id: 118 }, state: ClientData([New, New, New]) }
39541 Sep 22 23:22:17.787 INFO Sent repair work, now wait for resp
39542 Sep 22 23:22:17.787 INFO [0] received reconcile message
39543 Sep 22 23:22:17.787 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(189), op: ExtentClose { repair_id: ReconciliationId(189), extent_id: 118 }, state: ClientData([InProgress, New, New]) }, : downstairs
39544 Sep 22 23:22:17.787 INFO [0] client ExtentClose { repair_id: ReconciliationId(189), extent_id: 118 }
39545 Sep 22 23:22:17.787 INFO [1] received reconcile message
39546 Sep 22 23:22:17.788 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(189), op: ExtentClose { repair_id: ReconciliationId(189), extent_id: 118 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39547 Sep 22 23:22:17.788 INFO [1] client ExtentClose { repair_id: ReconciliationId(189), extent_id: 118 }
39548 Sep 22 23:22:17.788 INFO [2] received reconcile message
39549 Sep 22 23:22:17.788 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(189), op: ExtentClose { repair_id: ReconciliationId(189), extent_id: 118 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39550 Sep 22 23:22:17.788 INFO [2] client ExtentClose { repair_id: ReconciliationId(189), extent_id: 118 }
39551 Sep 22 23:22:17.788 DEBG 189 Close extent 118
39552 Sep 22 23:22:17.788 DEBG 189 Close extent 118
39553 Sep 22 23:22:17.788 DEBG 189 Close extent 118
39554 Sep 22 23:22:17.789 DEBG [2] It's time to notify for 189
39555 Sep 22 23:22:17.789 INFO Completion from [2] id:189 status:true
39556 Sep 22 23:22:17.789 INFO [190/752] Repair commands completed
39557 Sep 22 23:22:17.789 INFO Pop front: ReconcileIO { id: ReconciliationId(190), op: ExtentRepair { repair_id: ReconciliationId(190), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39558 Sep 22 23:22:17.789 INFO Sent repair work, now wait for resp
39559 Sep 22 23:22:17.789 INFO [0] received reconcile message
39560 Sep 22 23:22:17.789 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(190), op: ExtentRepair { repair_id: ReconciliationId(190), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39561 Sep 22 23:22:17.789 INFO [0] client ExtentRepair { repair_id: ReconciliationId(190), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39562 Sep 22 23:22:17.789 INFO [0] Sending repair request ReconciliationId(190)
39563 Sep 22 23:22:17.789 INFO [1] received reconcile message
39564 Sep 22 23:22:17.789 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(190), op: ExtentRepair { repair_id: ReconciliationId(190), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39565 Sep 22 23:22:17.789 INFO [1] client ExtentRepair { repair_id: ReconciliationId(190), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39566 Sep 22 23:22:17.789 INFO [1] No action required ReconciliationId(190)
39567 Sep 22 23:22:17.789 INFO [2] received reconcile message
39568 Sep 22 23:22:17.789 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(190), op: ExtentRepair { repair_id: ReconciliationId(190), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39569 Sep 22 23:22:17.789 INFO [2] client ExtentRepair { repair_id: ReconciliationId(190), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39570 Sep 22 23:22:17.789 INFO [2] No action required ReconciliationId(190)
39571 Sep 22 23:22:17.789 DEBG 190 Repair extent 118 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
39572 Sep 22 23:22:17.789 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/076.copy"
39573 Sep 22 23:22:17.813 DEBG up_ds_listen checked 1 jobs, back to waiting
39574 Sep 22 23:22:17.816 DEBG Flush :1078 extent_limit None deps:[JobId(1077), JobId(1076)] res:true f:28 g:1
39575 Sep 22 23:22:17.816 INFO [lossy] sleeping 1 second
39576 Sep 22 23:22:17.822 DEBG Read :1077 deps:[JobId(1076)] res:true
39577 Sep 22 23:22:17.852 INFO accepted connection, remote_addr: 127.0.0.1:42860, local_addr: 127.0.0.1:52864, task: repair
39578 Sep 22 23:22:17.852 TRCE incoming request, uri: /extent/118/files, method: GET, req_id: 64194861-c875-4a7c-b46b-29e81635475a, remote_addr: 127.0.0.1:42860, local_addr: 127.0.0.1:52864, task: repair
39579 Sep 22 23:22:17.852 INFO request completed, latency_us: 239, response_code: 200, uri: /extent/118/files, method: GET, req_id: 64194861-c875-4a7c-b46b-29e81635475a, remote_addr: 127.0.0.1:42860, local_addr: 127.0.0.1:52864, task: repair
39580 Sep 22 23:22:17.853 INFO eid:118 Found repair files: ["076", "076.db"]
39581 Sep 22 23:22:17.853 TRCE incoming request, uri: /newextent/118/data, method: GET, req_id: 6571e296-94b5-4a8e-aa64-d74774216b87, remote_addr: 127.0.0.1:42860, local_addr: 127.0.0.1:52864, task: repair
39582 Sep 22 23:22:17.853 INFO request completed, latency_us: 322, response_code: 200, uri: /newextent/118/data, method: GET, req_id: 6571e296-94b5-4a8e-aa64-d74774216b87, remote_addr: 127.0.0.1:42860, local_addr: 127.0.0.1:52864, task: repair
39583 Sep 22 23:22:17.858 TRCE incoming request, uri: /newextent/118/db, method: GET, req_id: c5de7095-c9fe-4212-948c-6f3ddc4c74bf, remote_addr: 127.0.0.1:42860, local_addr: 127.0.0.1:52864, task: repair
39584 Sep 22 23:22:17.859 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/118/db, method: GET, req_id: c5de7095-c9fe-4212-948c-6f3ddc4c74bf, remote_addr: 127.0.0.1:42860, local_addr: 127.0.0.1:52864, task: repair
39585 Sep 22 23:22:17.860 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/076.copy" to "/tmp/downstairs-zrMnlo6G/00/000/076.replace"
39586 Sep 22 23:22:17.860 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39587 Sep 22 23:22:17.861 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/076.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
39588 Sep 22 23:22:17.861 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/076"
39589 Sep 22 23:22:17.861 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/076.db"
39590 Sep 22 23:22:17.861 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39591 Sep 22 23:22:17.861 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/076.replace" to "/tmp/downstairs-zrMnlo6G/00/000/076.completed"
39592 Sep 22 23:22:17.861 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39593 Sep 22 23:22:17.861 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39594 Sep 22 23:22:17.861 DEBG [0] It's time to notify for 190
39595 Sep 22 23:22:17.862 INFO Completion from [0] id:190 status:true
39596 Sep 22 23:22:17.862 INFO [191/752] Repair commands completed
39597 Sep 22 23:22:17.862 INFO Pop front: ReconcileIO { id: ReconciliationId(191), op: ExtentReopen { repair_id: ReconciliationId(191), extent_id: 118 }, state: ClientData([New, New, New]) }
39598 Sep 22 23:22:17.862 INFO Sent repair work, now wait for resp
39599 Sep 22 23:22:17.862 INFO [0] received reconcile message
39600 Sep 22 23:22:17.862 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(191), op: ExtentReopen { repair_id: ReconciliationId(191), extent_id: 118 }, state: ClientData([InProgress, New, New]) }, : downstairs
39601 Sep 22 23:22:17.862 INFO [0] client ExtentReopen { repair_id: ReconciliationId(191), extent_id: 118 }
39602 Sep 22 23:22:17.862 INFO [1] received reconcile message
39603 Sep 22 23:22:17.862 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(191), op: ExtentReopen { repair_id: ReconciliationId(191), extent_id: 118 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39604 Sep 22 23:22:17.862 INFO [1] client ExtentReopen { repair_id: ReconciliationId(191), extent_id: 118 }
39605 Sep 22 23:22:17.862 INFO [2] received reconcile message
39606 Sep 22 23:22:17.862 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(191), op: ExtentReopen { repair_id: ReconciliationId(191), extent_id: 118 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39607 Sep 22 23:22:17.862 INFO [2] client ExtentReopen { repair_id: ReconciliationId(191), extent_id: 118 }
39608 Sep 22 23:22:17.862 DEBG 191 Reopen extent 118
39609 Sep 22 23:22:17.863 DEBG 191 Reopen extent 118
39610 Sep 22 23:22:17.863 DEBG 191 Reopen extent 118
39611 Sep 22 23:22:17.864 DEBG [2] It's time to notify for 191
39612 Sep 22 23:22:17.864 INFO Completion from [2] id:191 status:true
39613 Sep 22 23:22:17.864 INFO [192/752] Repair commands completed
39614 Sep 22 23:22:17.864 INFO Pop front: ReconcileIO { id: ReconciliationId(192), op: ExtentFlush { repair_id: ReconciliationId(192), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39615 Sep 22 23:22:17.864 INFO Sent repair work, now wait for resp
39616 Sep 22 23:22:17.864 INFO [0] received reconcile message
39617 Sep 22 23:22:17.864 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(192), op: ExtentFlush { repair_id: ReconciliationId(192), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39618 Sep 22 23:22:17.864 INFO [0] client ExtentFlush { repair_id: ReconciliationId(192), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39619 Sep 22 23:22:17.864 INFO [1] received reconcile message
39620 Sep 22 23:22:17.864 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(192), op: ExtentFlush { repair_id: ReconciliationId(192), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39621 Sep 22 23:22:17.864 INFO [1] client ExtentFlush { repair_id: ReconciliationId(192), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39622 Sep 22 23:22:17.864 INFO [2] received reconcile message
39623 Sep 22 23:22:17.864 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(192), op: ExtentFlush { repair_id: ReconciliationId(192), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39624 Sep 22 23:22:17.864 INFO [2] client ExtentFlush { repair_id: ReconciliationId(192), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39625 Sep 22 23:22:17.864 DEBG 192 Flush extent 111 with f:2 g:2
39626 Sep 22 23:22:17.864 DEBG Flush just extent 111 with f:2 and g:2
39627 Sep 22 23:22:17.864 DEBG [1] It's time to notify for 192
39628 Sep 22 23:22:17.864 INFO Completion from [1] id:192 status:true
39629 Sep 22 23:22:17.864 INFO [193/752] Repair commands completed
39630 Sep 22 23:22:17.864 INFO Pop front: ReconcileIO { id: ReconciliationId(193), op: ExtentClose { repair_id: ReconciliationId(193), extent_id: 111 }, state: ClientData([New, New, New]) }
39631 Sep 22 23:22:17.864 INFO Sent repair work, now wait for resp
39632 Sep 22 23:22:17.865 INFO [0] received reconcile message
39633 Sep 22 23:22:17.865 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(193), op: ExtentClose { repair_id: ReconciliationId(193), extent_id: 111 }, state: ClientData([InProgress, New, New]) }, : downstairs
39634 Sep 22 23:22:17.865 INFO [0] client ExtentClose { repair_id: ReconciliationId(193), extent_id: 111 }
39635 Sep 22 23:22:17.865 INFO [1] received reconcile message
39636 Sep 22 23:22:17.865 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(193), op: ExtentClose { repair_id: ReconciliationId(193), extent_id: 111 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39637 Sep 22 23:22:17.865 INFO [1] client ExtentClose { repair_id: ReconciliationId(193), extent_id: 111 }
39638 Sep 22 23:22:17.865 INFO [2] received reconcile message
39639 Sep 22 23:22:17.865 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(193), op: ExtentClose { repair_id: ReconciliationId(193), extent_id: 111 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39640 Sep 22 23:22:17.865 INFO [2] client ExtentClose { repair_id: ReconciliationId(193), extent_id: 111 }
39641 Sep 22 23:22:17.865 DEBG 193 Close extent 111
39642 Sep 22 23:22:17.865 DEBG 193 Close extent 111
39643 Sep 22 23:22:17.865 DEBG 193 Close extent 111
39644 Sep 22 23:22:17.866 DEBG [2] It's time to notify for 193
39645 Sep 22 23:22:17.866 INFO Completion from [2] id:193 status:true
39646 Sep 22 23:22:17.866 INFO [194/752] Repair commands completed
39647 Sep 22 23:22:17.866 INFO Pop front: ReconcileIO { id: ReconciliationId(194), op: ExtentRepair { repair_id: ReconciliationId(194), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39648 Sep 22 23:22:17.866 INFO Sent repair work, now wait for resp
39649 Sep 22 23:22:17.866 INFO [0] received reconcile message
39650 Sep 22 23:22:17.866 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(194), op: ExtentRepair { repair_id: ReconciliationId(194), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39651 Sep 22 23:22:17.866 INFO [0] client ExtentRepair { repair_id: ReconciliationId(194), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39652 Sep 22 23:22:17.866 INFO [0] Sending repair request ReconciliationId(194)
39653 Sep 22 23:22:17.866 INFO [1] received reconcile message
39654 Sep 22 23:22:17.866 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(194), op: ExtentRepair { repair_id: ReconciliationId(194), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39655 Sep 22 23:22:17.866 INFO [1] client ExtentRepair { repair_id: ReconciliationId(194), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39656 Sep 22 23:22:17.866 INFO [1] No action required ReconciliationId(194)
39657 Sep 22 23:22:17.866 INFO [2] received reconcile message
39658 Sep 22 23:22:17.866 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(194), op: ExtentRepair { repair_id: ReconciliationId(194), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39659 Sep 22 23:22:17.866 INFO [2] client ExtentRepair { repair_id: ReconciliationId(194), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39660 Sep 22 23:22:17.866 INFO [2] No action required ReconciliationId(194)
39661 Sep 22 23:22:17.866 DEBG 194 Repair extent 111 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
39662 Sep 22 23:22:17.866 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/06F.copy"
39663 Sep 22 23:22:17.891 DEBG IO Read 1079 has deps [JobId(1078)]
39664 Sep 22 23:22:17.929 INFO accepted connection, remote_addr: 127.0.0.1:43732, local_addr: 127.0.0.1:52864, task: repair
39665 Sep 22 23:22:17.929 TRCE incoming request, uri: /extent/111/files, method: GET, req_id: 61ab86c4-55c1-4d1f-b6fe-a9e9ef6e110e, remote_addr: 127.0.0.1:43732, local_addr: 127.0.0.1:52864, task: repair
39666 Sep 22 23:22:17.930 INFO request completed, latency_us: 197, response_code: 200, uri: /extent/111/files, method: GET, req_id: 61ab86c4-55c1-4d1f-b6fe-a9e9ef6e110e, remote_addr: 127.0.0.1:43732, local_addr: 127.0.0.1:52864, task: repair
39667 Sep 22 23:22:17.930 INFO eid:111 Found repair files: ["06F", "06F.db"]
39668 Sep 22 23:22:17.930 TRCE incoming request, uri: /newextent/111/data, method: GET, req_id: 8eab960c-dbf2-4ad6-b86b-2449d4f4740d, remote_addr: 127.0.0.1:43732, local_addr: 127.0.0.1:52864, task: repair
39669 Sep 22 23:22:17.930 INFO request completed, latency_us: 326, response_code: 200, uri: /newextent/111/data, method: GET, req_id: 8eab960c-dbf2-4ad6-b86b-2449d4f4740d, remote_addr: 127.0.0.1:43732, local_addr: 127.0.0.1:52864, task: repair
39670 Sep 22 23:22:17.935 TRCE incoming request, uri: /newextent/111/db, method: GET, req_id: f6dd1e53-8fcc-4d09-9fd0-e53082f98423, remote_addr: 127.0.0.1:43732, local_addr: 127.0.0.1:52864, task: repair
39671 Sep 22 23:22:17.936 INFO request completed, latency_us: 290, response_code: 200, uri: /newextent/111/db, method: GET, req_id: f6dd1e53-8fcc-4d09-9fd0-e53082f98423, remote_addr: 127.0.0.1:43732, local_addr: 127.0.0.1:52864, task: repair
39672 Sep 22 23:22:17.937 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/06F.copy" to "/tmp/downstairs-zrMnlo6G/00/000/06F.replace"
39673 Sep 22 23:22:17.937 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39674 Sep 22 23:22:17.938 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/06F.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
39675 Sep 22 23:22:17.938 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/06F"
39676 Sep 22 23:22:17.938 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/06F.db"
39677 Sep 22 23:22:17.938 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39678 Sep 22 23:22:17.938 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/06F.replace" to "/tmp/downstairs-zrMnlo6G/00/000/06F.completed"
39679 Sep 22 23:22:17.938 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39680 Sep 22 23:22:17.938 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39681 Sep 22 23:22:17.938 DEBG [0] It's time to notify for 194
39682 Sep 22 23:22:17.938 INFO Completion from [0] id:194 status:true
39683 Sep 22 23:22:17.938 INFO [195/752] Repair commands completed
39684 Sep 22 23:22:17.938 INFO Pop front: ReconcileIO { id: ReconciliationId(195), op: ExtentReopen { repair_id: ReconciliationId(195), extent_id: 111 }, state: ClientData([New, New, New]) }
39685 Sep 22 23:22:17.938 INFO Sent repair work, now wait for resp
39686 Sep 22 23:22:17.938 INFO [0] received reconcile message
39687 Sep 22 23:22:17.938 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(195), op: ExtentReopen { repair_id: ReconciliationId(195), extent_id: 111 }, state: ClientData([InProgress, New, New]) }, : downstairs
39688 Sep 22 23:22:17.938 INFO [0] client ExtentReopen { repair_id: ReconciliationId(195), extent_id: 111 }
39689 Sep 22 23:22:17.939 INFO [1] received reconcile message
39690 Sep 22 23:22:17.939 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(195), op: ExtentReopen { repair_id: ReconciliationId(195), extent_id: 111 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39691 Sep 22 23:22:17.939 INFO [1] client ExtentReopen { repair_id: ReconciliationId(195), extent_id: 111 }
39692 Sep 22 23:22:17.939 INFO [2] received reconcile message
39693 Sep 22 23:22:17.939 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(195), op: ExtentReopen { repair_id: ReconciliationId(195), extent_id: 111 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39694 Sep 22 23:22:17.939 INFO [2] client ExtentReopen { repair_id: ReconciliationId(195), extent_id: 111 }
39695 Sep 22 23:22:17.939 DEBG 195 Reopen extent 111
39696 Sep 22 23:22:17.939 DEBG 195 Reopen extent 111
39697 Sep 22 23:22:17.940 DEBG 195 Reopen extent 111
39698 Sep 22 23:22:17.940 DEBG [2] It's time to notify for 195
39699 Sep 22 23:22:17.940 INFO Completion from [2] id:195 status:true
39700 Sep 22 23:22:17.940 INFO [196/752] Repair commands completed
39701 Sep 22 23:22:17.940 INFO Pop front: ReconcileIO { id: ReconciliationId(196), op: ExtentFlush { repair_id: ReconciliationId(196), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39702 Sep 22 23:22:17.940 INFO Sent repair work, now wait for resp
39703 Sep 22 23:22:17.941 INFO [0] received reconcile message
39704 Sep 22 23:22:17.941 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(196), op: ExtentFlush { repair_id: ReconciliationId(196), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39705 Sep 22 23:22:17.941 INFO [0] client ExtentFlush { repair_id: ReconciliationId(196), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39706 Sep 22 23:22:17.941 INFO [1] received reconcile message
39707 Sep 22 23:22:17.941 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(196), op: ExtentFlush { repair_id: ReconciliationId(196), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39708 Sep 22 23:22:17.941 INFO [1] client ExtentFlush { repair_id: ReconciliationId(196), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39709 Sep 22 23:22:17.941 INFO [2] received reconcile message
39710 Sep 22 23:22:17.941 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(196), op: ExtentFlush { repair_id: ReconciliationId(196), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39711 Sep 22 23:22:17.941 INFO [2] client ExtentFlush { repair_id: ReconciliationId(196), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39712 Sep 22 23:22:17.941 DEBG 196 Flush extent 74 with f:2 g:2
39713 Sep 22 23:22:17.941 DEBG Flush just extent 74 with f:2 and g:2
39714 Sep 22 23:22:17.941 DEBG [1] It's time to notify for 196
39715 Sep 22 23:22:17.941 INFO Completion from [1] id:196 status:true
39716 Sep 22 23:22:17.941 INFO [197/752] Repair commands completed
39717 Sep 22 23:22:17.941 INFO Pop front: ReconcileIO { id: ReconciliationId(197), op: ExtentClose { repair_id: ReconciliationId(197), extent_id: 74 }, state: ClientData([New, New, New]) }
39718 Sep 22 23:22:17.941 INFO Sent repair work, now wait for resp
39719 Sep 22 23:22:17.941 INFO [0] received reconcile message
39720 Sep 22 23:22:17.941 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(197), op: ExtentClose { repair_id: ReconciliationId(197), extent_id: 74 }, state: ClientData([InProgress, New, New]) }, : downstairs
39721 Sep 22 23:22:17.941 INFO [0] client ExtentClose { repair_id: ReconciliationId(197), extent_id: 74 }
39722 Sep 22 23:22:17.941 INFO [1] received reconcile message
39723 Sep 22 23:22:17.941 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(197), op: ExtentClose { repair_id: ReconciliationId(197), extent_id: 74 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39724 Sep 22 23:22:17.941 INFO [1] client ExtentClose { repair_id: ReconciliationId(197), extent_id: 74 }
39725 Sep 22 23:22:17.941 INFO [2] received reconcile message
39726 Sep 22 23:22:17.941 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(197), op: ExtentClose { repair_id: ReconciliationId(197), extent_id: 74 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39727 Sep 22 23:22:17.941 INFO [2] client ExtentClose { repair_id: ReconciliationId(197), extent_id: 74 }
39728 Sep 22 23:22:17.941 DEBG 197 Close extent 74
39729 Sep 22 23:22:17.942 DEBG 197 Close extent 74
39730 Sep 22 23:22:17.942 DEBG 197 Close extent 74
39731 Sep 22 23:22:17.942 DEBG [2] It's time to notify for 197
39732 Sep 22 23:22:17.942 INFO Completion from [2] id:197 status:true
39733 Sep 22 23:22:17.942 INFO [198/752] Repair commands completed
39734 Sep 22 23:22:17.942 INFO Pop front: ReconcileIO { id: ReconciliationId(198), op: ExtentRepair { repair_id: ReconciliationId(198), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39735 Sep 22 23:22:17.942 INFO Sent repair work, now wait for resp
39736 Sep 22 23:22:17.942 INFO [0] received reconcile message
39737 Sep 22 23:22:17.942 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(198), op: ExtentRepair { repair_id: ReconciliationId(198), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39738 Sep 22 23:22:17.942 INFO [0] client ExtentRepair { repair_id: ReconciliationId(198), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39739 Sep 22 23:22:17.942 INFO [0] Sending repair request ReconciliationId(198)
39740 Sep 22 23:22:17.943 INFO [1] received reconcile message
39741 Sep 22 23:22:17.943 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(198), op: ExtentRepair { repair_id: ReconciliationId(198), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39742 Sep 22 23:22:17.943 INFO [1] client ExtentRepair { repair_id: ReconciliationId(198), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39743 Sep 22 23:22:17.943 INFO [1] No action required ReconciliationId(198)
39744 Sep 22 23:22:17.943 INFO [2] received reconcile message
39745 Sep 22 23:22:17.943 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(198), op: ExtentRepair { repair_id: ReconciliationId(198), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39746 Sep 22 23:22:17.943 INFO [2] client ExtentRepair { repair_id: ReconciliationId(198), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39747 Sep 22 23:22:17.943 INFO [2] No action required ReconciliationId(198)
39748 Sep 22 23:22:17.943 DEBG 198 Repair extent 74 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
39749 Sep 22 23:22:17.943 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/04A.copy"
39750 Sep 22 23:22:18.008 INFO accepted connection, remote_addr: 127.0.0.1:37752, local_addr: 127.0.0.1:52864, task: repair
39751 Sep 22 23:22:18.008 TRCE incoming request, uri: /extent/74/files, method: GET, req_id: 64c89e0a-fbba-4d06-89f1-aacd6404aad5, remote_addr: 127.0.0.1:37752, local_addr: 127.0.0.1:52864, task: repair
39752 Sep 22 23:22:18.009 INFO request completed, latency_us: 268, response_code: 200, uri: /extent/74/files, method: GET, req_id: 64c89e0a-fbba-4d06-89f1-aacd6404aad5, remote_addr: 127.0.0.1:37752, local_addr: 127.0.0.1:52864, task: repair
39753 Sep 22 23:22:18.009 INFO eid:74 Found repair files: ["04A", "04A.db"]
39754 Sep 22 23:22:18.009 TRCE incoming request, uri: /newextent/74/data, method: GET, req_id: b7ad28f1-31fa-43e7-abf9-df8bfcbf3709, remote_addr: 127.0.0.1:37752, local_addr: 127.0.0.1:52864, task: repair
39755 Sep 22 23:22:18.010 INFO request completed, latency_us: 361, response_code: 200, uri: /newextent/74/data, method: GET, req_id: b7ad28f1-31fa-43e7-abf9-df8bfcbf3709, remote_addr: 127.0.0.1:37752, local_addr: 127.0.0.1:52864, task: repair
39756 Sep 22 23:22:18.015 TRCE incoming request, uri: /newextent/74/db, method: GET, req_id: a8fe68c6-22ca-45d6-b791-86fb2037dd26, remote_addr: 127.0.0.1:37752, local_addr: 127.0.0.1:52864, task: repair
39757 Sep 22 23:22:18.015 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/74/db, method: GET, req_id: a8fe68c6-22ca-45d6-b791-86fb2037dd26, remote_addr: 127.0.0.1:37752, local_addr: 127.0.0.1:52864, task: repair
39758 Sep 22 23:22:18.016 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/04A.copy" to "/tmp/downstairs-zrMnlo6G/00/000/04A.replace"
39759 Sep 22 23:22:18.016 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39760 Sep 22 23:22:18.017 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/04A.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
39761 Sep 22 23:22:18.018 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/04A"
39762 Sep 22 23:22:18.018 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/04A.db"
39763 Sep 22 23:22:18.018 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39764 Sep 22 23:22:18.018 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/04A.replace" to "/tmp/downstairs-zrMnlo6G/00/000/04A.completed"
39765 Sep 22 23:22:18.018 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39766 Sep 22 23:22:18.018 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39767 Sep 22 23:22:18.018 DEBG [0] It's time to notify for 198
39768 Sep 22 23:22:18.018 INFO Completion from [0] id:198 status:true
39769 Sep 22 23:22:18.018 INFO [199/752] Repair commands completed
39770 Sep 22 23:22:18.018 INFO Pop front: ReconcileIO { id: ReconciliationId(199), op: ExtentReopen { repair_id: ReconciliationId(199), extent_id: 74 }, state: ClientData([New, New, New]) }
39771 Sep 22 23:22:18.018 INFO Sent repair work, now wait for resp
39772 Sep 22 23:22:18.018 INFO [0] received reconcile message
39773 Sep 22 23:22:18.018 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(199), op: ExtentReopen { repair_id: ReconciliationId(199), extent_id: 74 }, state: ClientData([InProgress, New, New]) }, : downstairs
39774 Sep 22 23:22:18.018 INFO [0] client ExtentReopen { repair_id: ReconciliationId(199), extent_id: 74 }
39775 Sep 22 23:22:18.018 INFO [1] received reconcile message
39776 Sep 22 23:22:18.018 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(199), op: ExtentReopen { repair_id: ReconciliationId(199), extent_id: 74 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39777 Sep 22 23:22:18.018 INFO [1] client ExtentReopen { repair_id: ReconciliationId(199), extent_id: 74 }
39778 Sep 22 23:22:18.018 INFO [2] received reconcile message
39779 Sep 22 23:22:18.018 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(199), op: ExtentReopen { repair_id: ReconciliationId(199), extent_id: 74 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39780 Sep 22 23:22:18.018 INFO [2] client ExtentReopen { repair_id: ReconciliationId(199), extent_id: 74 }
39781 Sep 22 23:22:18.019 DEBG 199 Reopen extent 74
39782 Sep 22 23:22:18.019 DEBG 199 Reopen extent 74
39783 Sep 22 23:22:18.020 DEBG 199 Reopen extent 74
39784 Sep 22 23:22:18.021 DEBG [2] It's time to notify for 199
39785 Sep 22 23:22:18.021 INFO Completion from [2] id:199 status:true
39786 Sep 22 23:22:18.021 INFO [200/752] Repair commands completed
39787 Sep 22 23:22:18.021 INFO Pop front: ReconcileIO { id: ReconciliationId(200), op: ExtentFlush { repair_id: ReconciliationId(200), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39788 Sep 22 23:22:18.021 INFO Sent repair work, now wait for resp
39789 Sep 22 23:22:18.021 INFO [0] received reconcile message
39790 Sep 22 23:22:18.021 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(200), op: ExtentFlush { repair_id: ReconciliationId(200), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39791 Sep 22 23:22:18.021 INFO [0] client ExtentFlush { repair_id: ReconciliationId(200), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39792 Sep 22 23:22:18.021 INFO [1] received reconcile message
39793 Sep 22 23:22:18.021 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(200), op: ExtentFlush { repair_id: ReconciliationId(200), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39794 Sep 22 23:22:18.021 INFO [1] client ExtentFlush { repair_id: ReconciliationId(200), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39795 Sep 22 23:22:18.021 INFO [2] received reconcile message
39796 Sep 22 23:22:18.021 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(200), op: ExtentFlush { repair_id: ReconciliationId(200), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39797 Sep 22 23:22:18.021 INFO [2] client ExtentFlush { repair_id: ReconciliationId(200), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39798 Sep 22 23:22:18.021 DEBG 200 Flush extent 117 with f:2 g:2
39799 Sep 22 23:22:18.021 DEBG Flush just extent 117 with f:2 and g:2
39800 Sep 22 23:22:18.021 DEBG [1] It's time to notify for 200
39801 Sep 22 23:22:18.021 INFO Completion from [1] id:200 status:true
39802 Sep 22 23:22:18.021 INFO [201/752] Repair commands completed
39803 Sep 22 23:22:18.021 INFO Pop front: ReconcileIO { id: ReconciliationId(201), op: ExtentClose { repair_id: ReconciliationId(201), extent_id: 117 }, state: ClientData([New, New, New]) }
39804 Sep 22 23:22:18.021 INFO Sent repair work, now wait for resp
39805 Sep 22 23:22:18.021 INFO [0] received reconcile message
39806 Sep 22 23:22:18.021 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(201), op: ExtentClose { repair_id: ReconciliationId(201), extent_id: 117 }, state: ClientData([InProgress, New, New]) }, : downstairs
39807 Sep 22 23:22:18.021 INFO [0] client ExtentClose { repair_id: ReconciliationId(201), extent_id: 117 }
39808 Sep 22 23:22:18.021 INFO [1] received reconcile message
39809 Sep 22 23:22:18.021 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(201), op: ExtentClose { repair_id: ReconciliationId(201), extent_id: 117 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39810 Sep 22 23:22:18.021 INFO [1] client ExtentClose { repair_id: ReconciliationId(201), extent_id: 117 }
39811 Sep 22 23:22:18.021 INFO [2] received reconcile message
39812 Sep 22 23:22:18.021 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(201), op: ExtentClose { repair_id: ReconciliationId(201), extent_id: 117 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39813 Sep 22 23:22:18.021 INFO [2] client ExtentClose { repair_id: ReconciliationId(201), extent_id: 117 }
39814 Sep 22 23:22:18.022 DEBG 201 Close extent 117
39815 Sep 22 23:22:18.022 DEBG 201 Close extent 117
39816 Sep 22 23:22:18.022 DEBG 201 Close extent 117
39817 Sep 22 23:22:18.022 DEBG [2] It's time to notify for 201
39818 Sep 22 23:22:18.023 INFO Completion from [2] id:201 status:true
39819 Sep 22 23:22:18.023 INFO [202/752] Repair commands completed
39820 Sep 22 23:22:18.023 INFO Pop front: ReconcileIO { id: ReconciliationId(202), op: ExtentRepair { repair_id: ReconciliationId(202), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39821 Sep 22 23:22:18.023 INFO Sent repair work, now wait for resp
39822 Sep 22 23:22:18.023 INFO [0] received reconcile message
39823 Sep 22 23:22:18.023 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(202), op: ExtentRepair { repair_id: ReconciliationId(202), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39824 Sep 22 23:22:18.023 INFO [0] client ExtentRepair { repair_id: ReconciliationId(202), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39825 Sep 22 23:22:18.023 INFO [0] Sending repair request ReconciliationId(202)
39826 Sep 22 23:22:18.023 INFO [1] received reconcile message
39827 Sep 22 23:22:18.023 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(202), op: ExtentRepair { repair_id: ReconciliationId(202), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39828 Sep 22 23:22:18.023 INFO [1] client ExtentRepair { repair_id: ReconciliationId(202), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39829 Sep 22 23:22:18.023 INFO [1] No action required ReconciliationId(202)
39830 Sep 22 23:22:18.023 INFO [2] received reconcile message
39831 Sep 22 23:22:18.023 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(202), op: ExtentRepair { repair_id: ReconciliationId(202), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39832 Sep 22 23:22:18.023 INFO [2] client ExtentRepair { repair_id: ReconciliationId(202), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39833 Sep 22 23:22:18.023 INFO [2] No action required ReconciliationId(202)
39834 Sep 22 23:22:18.023 DEBG 202 Repair extent 117 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
39835 Sep 22 23:22:18.023 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/075.copy"
39836 Sep 22 23:22:18.088 INFO accepted connection, remote_addr: 127.0.0.1:63962, local_addr: 127.0.0.1:52864, task: repair
39837 Sep 22 23:22:18.088 TRCE incoming request, uri: /extent/117/files, method: GET, req_id: 63f21dd0-f05b-4eee-a36c-8dfababa19a0, remote_addr: 127.0.0.1:63962, local_addr: 127.0.0.1:52864, task: repair
39838 Sep 22 23:22:18.088 INFO request completed, latency_us: 200, response_code: 200, uri: /extent/117/files, method: GET, req_id: 63f21dd0-f05b-4eee-a36c-8dfababa19a0, remote_addr: 127.0.0.1:63962, local_addr: 127.0.0.1:52864, task: repair
39839 Sep 22 23:22:18.089 INFO eid:117 Found repair files: ["075", "075.db"]
39840 Sep 22 23:22:18.089 TRCE incoming request, uri: /newextent/117/data, method: GET, req_id: 3fa22972-afc6-4a73-89df-f022914ea3e3, remote_addr: 127.0.0.1:63962, local_addr: 127.0.0.1:52864, task: repair
39841 Sep 22 23:22:18.089 INFO request completed, latency_us: 251, response_code: 200, uri: /newextent/117/data, method: GET, req_id: 3fa22972-afc6-4a73-89df-f022914ea3e3, remote_addr: 127.0.0.1:63962, local_addr: 127.0.0.1:52864, task: repair
39842 Sep 22 23:22:18.094 TRCE incoming request, uri: /newextent/117/db, method: GET, req_id: 82ac3232-8296-4d8b-9763-879aa10a7c10, remote_addr: 127.0.0.1:63962, local_addr: 127.0.0.1:52864, task: repair
39843 Sep 22 23:22:18.094 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/117/db, method: GET, req_id: 82ac3232-8296-4d8b-9763-879aa10a7c10, remote_addr: 127.0.0.1:63962, local_addr: 127.0.0.1:52864, task: repair
39844 Sep 22 23:22:18.096 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/075.copy" to "/tmp/downstairs-zrMnlo6G/00/000/075.replace"
39845 Sep 22 23:22:18.096 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39846 Sep 22 23:22:18.096 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/075.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
39847 Sep 22 23:22:18.097 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/075"
39848 Sep 22 23:22:18.097 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/075.db"
39849 Sep 22 23:22:18.097 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39850 Sep 22 23:22:18.097 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/075.replace" to "/tmp/downstairs-zrMnlo6G/00/000/075.completed"
39851 Sep 22 23:22:18.097 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39852 Sep 22 23:22:18.097 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39853 Sep 22 23:22:18.097 DEBG [0] It's time to notify for 202
39854 Sep 22 23:22:18.097 INFO Completion from [0] id:202 status:true
39855 Sep 22 23:22:18.097 INFO [203/752] Repair commands completed
39856 Sep 22 23:22:18.097 INFO Pop front: ReconcileIO { id: ReconciliationId(203), op: ExtentReopen { repair_id: ReconciliationId(203), extent_id: 117 }, state: ClientData([New, New, New]) }
39857 Sep 22 23:22:18.097 INFO Sent repair work, now wait for resp
39858 Sep 22 23:22:18.097 INFO [0] received reconcile message
39859 Sep 22 23:22:18.097 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(203), op: ExtentReopen { repair_id: ReconciliationId(203), extent_id: 117 }, state: ClientData([InProgress, New, New]) }, : downstairs
39860 Sep 22 23:22:18.097 INFO [0] client ExtentReopen { repair_id: ReconciliationId(203), extent_id: 117 }
39861 Sep 22 23:22:18.097 INFO [1] received reconcile message
39862 Sep 22 23:22:18.097 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(203), op: ExtentReopen { repair_id: ReconciliationId(203), extent_id: 117 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39863 Sep 22 23:22:18.097 INFO [1] client ExtentReopen { repair_id: ReconciliationId(203), extent_id: 117 }
39864 Sep 22 23:22:18.097 INFO [2] received reconcile message
39865 Sep 22 23:22:18.097 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(203), op: ExtentReopen { repair_id: ReconciliationId(203), extent_id: 117 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39866 Sep 22 23:22:18.098 INFO [2] client ExtentReopen { repair_id: ReconciliationId(203), extent_id: 117 }
39867 Sep 22 23:22:18.098 DEBG 203 Reopen extent 117
39868 Sep 22 23:22:18.098 DEBG 203 Reopen extent 117
39869 Sep 22 23:22:18.099 DEBG 203 Reopen extent 117
39870 Sep 22 23:22:18.099 DEBG [2] It's time to notify for 203
39871 Sep 22 23:22:18.099 INFO Completion from [2] id:203 status:true
39872 Sep 22 23:22:18.099 INFO [204/752] Repair commands completed
39873 Sep 22 23:22:18.099 INFO Pop front: ReconcileIO { id: ReconciliationId(204), op: ExtentFlush { repair_id: ReconciliationId(204), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39874 Sep 22 23:22:18.099 INFO Sent repair work, now wait for resp
39875 Sep 22 23:22:18.099 INFO [0] received reconcile message
39876 Sep 22 23:22:18.099 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(204), op: ExtentFlush { repair_id: ReconciliationId(204), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39877 Sep 22 23:22:18.099 INFO [0] client ExtentFlush { repair_id: ReconciliationId(204), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39878 Sep 22 23:22:18.099 INFO [1] received reconcile message
39879 Sep 22 23:22:18.100 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(204), op: ExtentFlush { repair_id: ReconciliationId(204), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39880 Sep 22 23:22:18.100 INFO [1] client ExtentFlush { repair_id: ReconciliationId(204), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39881 Sep 22 23:22:18.100 INFO [2] received reconcile message
39882 Sep 22 23:22:18.100 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(204), op: ExtentFlush { repair_id: ReconciliationId(204), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39883 Sep 22 23:22:18.100 INFO [2] client ExtentFlush { repair_id: ReconciliationId(204), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39884 Sep 22 23:22:18.100 DEBG 204 Flush extent 75 with f:2 g:2
39885 Sep 22 23:22:18.100 DEBG Flush just extent 75 with f:2 and g:2
39886 Sep 22 23:22:18.100 DEBG [1] It's time to notify for 204
39887 Sep 22 23:22:18.100 INFO Completion from [1] id:204 status:true
39888 Sep 22 23:22:18.100 INFO [205/752] Repair commands completed
39889 Sep 22 23:22:18.100 INFO Pop front: ReconcileIO { id: ReconciliationId(205), op: ExtentClose { repair_id: ReconciliationId(205), extent_id: 75 }, state: ClientData([New, New, New]) }
39890 Sep 22 23:22:18.100 INFO Sent repair work, now wait for resp
39891 Sep 22 23:22:18.100 INFO [0] received reconcile message
39892 Sep 22 23:22:18.100 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(205), op: ExtentClose { repair_id: ReconciliationId(205), extent_id: 75 }, state: ClientData([InProgress, New, New]) }, : downstairs
39893 Sep 22 23:22:18.100 INFO [0] client ExtentClose { repair_id: ReconciliationId(205), extent_id: 75 }
39894 Sep 22 23:22:18.100 INFO [1] received reconcile message
39895 Sep 22 23:22:18.100 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(205), op: ExtentClose { repair_id: ReconciliationId(205), extent_id: 75 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39896 Sep 22 23:22:18.100 INFO [1] client ExtentClose { repair_id: ReconciliationId(205), extent_id: 75 }
39897 Sep 22 23:22:18.100 INFO [2] received reconcile message
39898 Sep 22 23:22:18.100 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(205), op: ExtentClose { repair_id: ReconciliationId(205), extent_id: 75 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39899 Sep 22 23:22:18.100 INFO [2] client ExtentClose { repair_id: ReconciliationId(205), extent_id: 75 }
39900 Sep 22 23:22:18.100 DEBG 205 Close extent 75
39901 Sep 22 23:22:18.101 DEBG 205 Close extent 75
39902 Sep 22 23:22:18.101 DEBG 205 Close extent 75
39903 Sep 22 23:22:18.101 DEBG [2] It's time to notify for 205
39904 Sep 22 23:22:18.101 INFO Completion from [2] id:205 status:true
39905 Sep 22 23:22:18.101 INFO [206/752] Repair commands completed
39906 Sep 22 23:22:18.101 INFO Pop front: ReconcileIO { id: ReconciliationId(206), op: ExtentRepair { repair_id: ReconciliationId(206), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39907 Sep 22 23:22:18.101 INFO Sent repair work, now wait for resp
39908 Sep 22 23:22:18.101 INFO [0] received reconcile message
39909 Sep 22 23:22:18.101 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(206), op: ExtentRepair { repair_id: ReconciliationId(206), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39910 Sep 22 23:22:18.101 INFO [0] client ExtentRepair { repair_id: ReconciliationId(206), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39911 Sep 22 23:22:18.101 INFO [0] Sending repair request ReconciliationId(206)
39912 Sep 22 23:22:18.101 INFO [1] received reconcile message
39913 Sep 22 23:22:18.101 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(206), op: ExtentRepair { repair_id: ReconciliationId(206), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39914 Sep 22 23:22:18.102 INFO [1] client ExtentRepair { repair_id: ReconciliationId(206), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39915 Sep 22 23:22:18.102 INFO [1] No action required ReconciliationId(206)
39916 Sep 22 23:22:18.102 INFO [2] received reconcile message
39917 Sep 22 23:22:18.102 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(206), op: ExtentRepair { repair_id: ReconciliationId(206), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39918 Sep 22 23:22:18.102 INFO [2] client ExtentRepair { repair_id: ReconciliationId(206), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39919 Sep 22 23:22:18.102 INFO [2] No action required ReconciliationId(206)
39920 Sep 22 23:22:18.102 DEBG 206 Repair extent 75 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
39921 Sep 22 23:22:18.102 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/04B.copy"
39922 Sep 22 23:22:18.166 INFO accepted connection, remote_addr: 127.0.0.1:43026, local_addr: 127.0.0.1:52864, task: repair
39923 Sep 22 23:22:18.166 TRCE incoming request, uri: /extent/75/files, method: GET, req_id: aead01ee-3d45-4dad-96aa-dad051404bf8, remote_addr: 127.0.0.1:43026, local_addr: 127.0.0.1:52864, task: repair
39924 Sep 22 23:22:18.167 INFO request completed, latency_us: 217, response_code: 200, uri: /extent/75/files, method: GET, req_id: aead01ee-3d45-4dad-96aa-dad051404bf8, remote_addr: 127.0.0.1:43026, local_addr: 127.0.0.1:52864, task: repair
39925 Sep 22 23:22:18.167 INFO eid:75 Found repair files: ["04B", "04B.db"]
39926 Sep 22 23:22:18.167 TRCE incoming request, uri: /newextent/75/data, method: GET, req_id: 487e4630-5e41-48c9-bdad-584ebb7255c6, remote_addr: 127.0.0.1:43026, local_addr: 127.0.0.1:52864, task: repair
39927 Sep 22 23:22:18.168 INFO request completed, latency_us: 312, response_code: 200, uri: /newextent/75/data, method: GET, req_id: 487e4630-5e41-48c9-bdad-584ebb7255c6, remote_addr: 127.0.0.1:43026, local_addr: 127.0.0.1:52864, task: repair
39928 Sep 22 23:22:18.173 TRCE incoming request, uri: /newextent/75/db, method: GET, req_id: 83bb672c-9d8d-4646-9468-52e74e7da250, remote_addr: 127.0.0.1:43026, local_addr: 127.0.0.1:52864, task: repair
39929 Sep 22 23:22:18.173 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/75/db, method: GET, req_id: 83bb672c-9d8d-4646-9468-52e74e7da250, remote_addr: 127.0.0.1:43026, local_addr: 127.0.0.1:52864, task: repair
39930 Sep 22 23:22:18.174 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/04B.copy" to "/tmp/downstairs-zrMnlo6G/00/000/04B.replace"
39931 Sep 22 23:22:18.174 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39932 Sep 22 23:22:18.175 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/04B.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
39933 Sep 22 23:22:18.175 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/04B"
39934 Sep 22 23:22:18.175 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/04B.db"
39935 Sep 22 23:22:18.175 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39936 Sep 22 23:22:18.175 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/04B.replace" to "/tmp/downstairs-zrMnlo6G/00/000/04B.completed"
39937 Sep 22 23:22:18.175 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39938 Sep 22 23:22:18.175 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
39939 Sep 22 23:22:18.176 DEBG [0] It's time to notify for 206
39940 Sep 22 23:22:18.176 INFO Completion from [0] id:206 status:true
39941 Sep 22 23:22:18.176 INFO [207/752] Repair commands completed
39942 Sep 22 23:22:18.176 INFO Pop front: ReconcileIO { id: ReconciliationId(207), op: ExtentReopen { repair_id: ReconciliationId(207), extent_id: 75 }, state: ClientData([New, New, New]) }
39943 Sep 22 23:22:18.176 INFO Sent repair work, now wait for resp
39944 Sep 22 23:22:18.176 INFO [0] received reconcile message
39945 Sep 22 23:22:18.176 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(207), op: ExtentReopen { repair_id: ReconciliationId(207), extent_id: 75 }, state: ClientData([InProgress, New, New]) }, : downstairs
39946 Sep 22 23:22:18.176 INFO [0] client ExtentReopen { repair_id: ReconciliationId(207), extent_id: 75 }
39947 Sep 22 23:22:18.176 INFO [1] received reconcile message
39948 Sep 22 23:22:18.176 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(207), op: ExtentReopen { repair_id: ReconciliationId(207), extent_id: 75 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39949 Sep 22 23:22:18.176 INFO [1] client ExtentReopen { repair_id: ReconciliationId(207), extent_id: 75 }
39950 Sep 22 23:22:18.176 INFO [2] received reconcile message
39951 Sep 22 23:22:18.176 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(207), op: ExtentReopen { repair_id: ReconciliationId(207), extent_id: 75 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39952 Sep 22 23:22:18.176 INFO [2] client ExtentReopen { repair_id: ReconciliationId(207), extent_id: 75 }
39953 Sep 22 23:22:18.176 DEBG 207 Reopen extent 75
39954 Sep 22 23:22:18.177 DEBG 207 Reopen extent 75
39955 Sep 22 23:22:18.177 DEBG 207 Reopen extent 75
39956 Sep 22 23:22:18.178 DEBG [2] It's time to notify for 207
39957 Sep 22 23:22:18.178 INFO Completion from [2] id:207 status:true
39958 Sep 22 23:22:18.178 INFO [208/752] Repair commands completed
39959 Sep 22 23:22:18.178 INFO Pop front: ReconcileIO { id: ReconciliationId(208), op: ExtentFlush { repair_id: ReconciliationId(208), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39960 Sep 22 23:22:18.178 INFO Sent repair work, now wait for resp
39961 Sep 22 23:22:18.178 INFO [0] received reconcile message
39962 Sep 22 23:22:18.178 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(208), op: ExtentFlush { repair_id: ReconciliationId(208), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39963 Sep 22 23:22:18.178 INFO [0] client ExtentFlush { repair_id: ReconciliationId(208), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39964 Sep 22 23:22:18.178 INFO [1] received reconcile message
39965 Sep 22 23:22:18.178 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(208), op: ExtentFlush { repair_id: ReconciliationId(208), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39966 Sep 22 23:22:18.178 INFO [1] client ExtentFlush { repair_id: ReconciliationId(208), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39967 Sep 22 23:22:18.178 INFO [2] received reconcile message
39968 Sep 22 23:22:18.178 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(208), op: ExtentFlush { repair_id: ReconciliationId(208), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39969 Sep 22 23:22:18.178 INFO [2] client ExtentFlush { repair_id: ReconciliationId(208), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39970 Sep 22 23:22:18.178 DEBG 208 Flush extent 36 with f:2 g:2
39971 Sep 22 23:22:18.178 DEBG Flush just extent 36 with f:2 and g:2
39972 Sep 22 23:22:18.178 DEBG [1] It's time to notify for 208
39973 Sep 22 23:22:18.178 INFO Completion from [1] id:208 status:true
39974 Sep 22 23:22:18.178 INFO [209/752] Repair commands completed
39975 Sep 22 23:22:18.178 INFO Pop front: ReconcileIO { id: ReconciliationId(209), op: ExtentClose { repair_id: ReconciliationId(209), extent_id: 36 }, state: ClientData([New, New, New]) }
39976 Sep 22 23:22:18.178 INFO Sent repair work, now wait for resp
39977 Sep 22 23:22:18.179 INFO [0] received reconcile message
39978 Sep 22 23:22:18.179 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(209), op: ExtentClose { repair_id: ReconciliationId(209), extent_id: 36 }, state: ClientData([InProgress, New, New]) }, : downstairs
39979 Sep 22 23:22:18.179 INFO [0] client ExtentClose { repair_id: ReconciliationId(209), extent_id: 36 }
39980 Sep 22 23:22:18.179 INFO [1] received reconcile message
39981 Sep 22 23:22:18.179 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(209), op: ExtentClose { repair_id: ReconciliationId(209), extent_id: 36 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39982 Sep 22 23:22:18.179 INFO [1] client ExtentClose { repair_id: ReconciliationId(209), extent_id: 36 }
39983 Sep 22 23:22:18.179 INFO [2] received reconcile message
39984 Sep 22 23:22:18.179 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(209), op: ExtentClose { repair_id: ReconciliationId(209), extent_id: 36 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39985 Sep 22 23:22:18.179 INFO [2] client ExtentClose { repair_id: ReconciliationId(209), extent_id: 36 }
39986 Sep 22 23:22:18.179 DEBG 209 Close extent 36
39987 Sep 22 23:22:18.179 DEBG 209 Close extent 36
39988 Sep 22 23:22:18.179 DEBG 209 Close extent 36
39989 Sep 22 23:22:18.180 DEBG [2] It's time to notify for 209
39990 Sep 22 23:22:18.180 INFO Completion from [2] id:209 status:true
39991 Sep 22 23:22:18.180 INFO [210/752] Repair commands completed
39992 Sep 22 23:22:18.180 INFO Pop front: ReconcileIO { id: ReconciliationId(210), op: ExtentRepair { repair_id: ReconciliationId(210), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39993 Sep 22 23:22:18.180 INFO Sent repair work, now wait for resp
39994 Sep 22 23:22:18.180 INFO [0] received reconcile message
39995 Sep 22 23:22:18.180 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(210), op: ExtentRepair { repair_id: ReconciliationId(210), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39996 Sep 22 23:22:18.180 INFO [0] client ExtentRepair { repair_id: ReconciliationId(210), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
39997 Sep 22 23:22:18.180 INFO [0] Sending repair request ReconciliationId(210)
39998 Sep 22 23:22:18.180 INFO [1] received reconcile message
39999 Sep 22 23:22:18.180 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(210), op: ExtentRepair { repair_id: ReconciliationId(210), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40000 Sep 22 23:22:18.180 INFO [1] client ExtentRepair { repair_id: ReconciliationId(210), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40001 Sep 22 23:22:18.180 INFO [1] No action required ReconciliationId(210)
40002 Sep 22 23:22:18.180 INFO [2] received reconcile message
40003 Sep 22 23:22:18.180 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(210), op: ExtentRepair { repair_id: ReconciliationId(210), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40004 Sep 22 23:22:18.180 INFO [2] client ExtentRepair { repair_id: ReconciliationId(210), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40005 Sep 22 23:22:18.180 INFO [2] No action required ReconciliationId(210)
40006 Sep 22 23:22:18.180 DEBG 210 Repair extent 36 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
40007 Sep 22 23:22:18.180 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/024.copy"
40008 Sep 22 23:22:18.232 DEBG up_ds_listen was notified
40009 Sep 22 23:22:18.232 DEBG up_ds_listen process 1078
40010 Sep 22 23:22:18.232 DEBG [A] ack job 1078:79, : downstairs
40011 Sep 22 23:22:18.232 DEBG up_ds_listen checked 1 jobs, back to waiting
40012 Sep 22 23:22:18.241 WARN returning error on flush!
40013 Sep 22 23:22:18.241 DEBG Flush :1078 extent_limit None deps:[JobId(1077), JobId(1076)] res:false f:28 g:1
40014 Sep 22 23:22:18.241 DEBG Flush :1078 extent_limit None deps:[JobId(1077), JobId(1076)] res:true f:28 g:1
40015 Sep 22 23:22:18.245 INFO accepted connection, remote_addr: 127.0.0.1:35803, local_addr: 127.0.0.1:52864, task: repair
40016 Sep 22 23:22:18.245 TRCE incoming request, uri: /extent/36/files, method: GET, req_id: e88d4089-ac79-4993-96ac-f22fd70a018c, remote_addr: 127.0.0.1:35803, local_addr: 127.0.0.1:52864, task: repair
40017 Sep 22 23:22:18.245 INFO request completed, latency_us: 189, response_code: 200, uri: /extent/36/files, method: GET, req_id: e88d4089-ac79-4993-96ac-f22fd70a018c, remote_addr: 127.0.0.1:35803, local_addr: 127.0.0.1:52864, task: repair
40018 Sep 22 23:22:18.246 INFO eid:36 Found repair files: ["024", "024.db"]
40019 Sep 22 23:22:18.246 TRCE incoming request, uri: /newextent/36/data, method: GET, req_id: 73106bb5-5ccd-4152-a90f-d1e25b896f8d, remote_addr: 127.0.0.1:35803, local_addr: 127.0.0.1:52864, task: repair
40020 Sep 22 23:22:18.246 INFO request completed, latency_us: 246, response_code: 200, uri: /newextent/36/data, method: GET, req_id: 73106bb5-5ccd-4152-a90f-d1e25b896f8d, remote_addr: 127.0.0.1:35803, local_addr: 127.0.0.1:52864, task: repair
40021 Sep 22 23:22:18.247 DEBG Read :1079 deps:[JobId(1078)] res:true
40022 Sep 22 23:22:18.251 TRCE incoming request, uri: /newextent/36/db, method: GET, req_id: 271d5338-c5fa-4a08-866d-9902d69d3324, remote_addr: 127.0.0.1:35803, local_addr: 127.0.0.1:52864, task: repair
40023 Sep 22 23:22:18.252 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/36/db, method: GET, req_id: 271d5338-c5fa-4a08-866d-9902d69d3324, remote_addr: 127.0.0.1:35803, local_addr: 127.0.0.1:52864, task: repair
40024 Sep 22 23:22:18.253 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/024.copy" to "/tmp/downstairs-zrMnlo6G/00/000/024.replace"
40025 Sep 22 23:22:18.253 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40026 Sep 22 23:22:18.253 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/024.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
40027 Sep 22 23:22:18.254 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/024"
40028 Sep 22 23:22:18.254 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/024.db"
40029 Sep 22 23:22:18.254 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40030 Sep 22 23:22:18.254 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/024.replace" to "/tmp/downstairs-zrMnlo6G/00/000/024.completed"
40031 Sep 22 23:22:18.254 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40032 Sep 22 23:22:18.254 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40033 Sep 22 23:22:18.254 DEBG [0] It's time to notify for 210
40034 Sep 22 23:22:18.254 INFO Completion from [0] id:210 status:true
40035 Sep 22 23:22:18.254 INFO [211/752] Repair commands completed
40036 Sep 22 23:22:18.254 INFO Pop front: ReconcileIO { id: ReconciliationId(211), op: ExtentReopen { repair_id: ReconciliationId(211), extent_id: 36 }, state: ClientData([New, New, New]) }
40037 Sep 22 23:22:18.254 INFO Sent repair work, now wait for resp
40038 Sep 22 23:22:18.254 INFO [0] received reconcile message
40039 Sep 22 23:22:18.254 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(211), op: ExtentReopen { repair_id: ReconciliationId(211), extent_id: 36 }, state: ClientData([InProgress, New, New]) }, : downstairs
40040 Sep 22 23:22:18.254 INFO [0] client ExtentReopen { repair_id: ReconciliationId(211), extent_id: 36 }
40041 Sep 22 23:22:18.254 INFO [1] received reconcile message
40042 Sep 22 23:22:18.254 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(211), op: ExtentReopen { repair_id: ReconciliationId(211), extent_id: 36 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40043 Sep 22 23:22:18.254 INFO [1] client ExtentReopen { repair_id: ReconciliationId(211), extent_id: 36 }
40044 Sep 22 23:22:18.255 INFO [2] received reconcile message
40045 Sep 22 23:22:18.255 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(211), op: ExtentReopen { repair_id: ReconciliationId(211), extent_id: 36 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40046 Sep 22 23:22:18.255 INFO [2] client ExtentReopen { repair_id: ReconciliationId(211), extent_id: 36 }
40047 Sep 22 23:22:18.255 DEBG 211 Reopen extent 36
40048 Sep 22 23:22:18.255 DEBG 211 Reopen extent 36
40049 Sep 22 23:22:18.256 DEBG 211 Reopen extent 36
40050 Sep 22 23:22:18.256 DEBG [2] It's time to notify for 211
40051 Sep 22 23:22:18.256 INFO Completion from [2] id:211 status:true
40052 Sep 22 23:22:18.256 INFO [212/752] Repair commands completed
40053 Sep 22 23:22:18.256 INFO Pop front: ReconcileIO { id: ReconciliationId(212), op: ExtentFlush { repair_id: ReconciliationId(212), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40054 Sep 22 23:22:18.256 INFO Sent repair work, now wait for resp
40055 Sep 22 23:22:18.256 INFO [0] received reconcile message
40056 Sep 22 23:22:18.256 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(212), op: ExtentFlush { repair_id: ReconciliationId(212), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40057 Sep 22 23:22:18.256 INFO [0] client ExtentFlush { repair_id: ReconciliationId(212), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40058 Sep 22 23:22:18.256 INFO [1] received reconcile message
40059 Sep 22 23:22:18.257 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(212), op: ExtentFlush { repair_id: ReconciliationId(212), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40060 Sep 22 23:22:18.257 INFO [1] client ExtentFlush { repair_id: ReconciliationId(212), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40061 Sep 22 23:22:18.257 INFO [2] received reconcile message
40062 Sep 22 23:22:18.257 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(212), op: ExtentFlush { repair_id: ReconciliationId(212), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40063 Sep 22 23:22:18.257 INFO [2] client ExtentFlush { repair_id: ReconciliationId(212), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40064 Sep 22 23:22:18.257 DEBG 212 Flush extent 68 with f:2 g:2
40065 Sep 22 23:22:18.257 DEBG Flush just extent 68 with f:2 and g:2
40066 Sep 22 23:22:18.257 DEBG [1] It's time to notify for 212
40067 Sep 22 23:22:18.257 INFO Completion from [1] id:212 status:true
40068 Sep 22 23:22:18.257 INFO [213/752] Repair commands completed
40069 Sep 22 23:22:18.257 INFO Pop front: ReconcileIO { id: ReconciliationId(213), op: ExtentClose { repair_id: ReconciliationId(213), extent_id: 68 }, state: ClientData([New, New, New]) }
40070 Sep 22 23:22:18.257 INFO Sent repair work, now wait for resp
40071 Sep 22 23:22:18.257 INFO [0] received reconcile message
40072 Sep 22 23:22:18.257 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(213), op: ExtentClose { repair_id: ReconciliationId(213), extent_id: 68 }, state: ClientData([InProgress, New, New]) }, : downstairs
40073 Sep 22 23:22:18.257 INFO [0] client ExtentClose { repair_id: ReconciliationId(213), extent_id: 68 }
40074 Sep 22 23:22:18.257 INFO [1] received reconcile message
40075 Sep 22 23:22:18.257 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(213), op: ExtentClose { repair_id: ReconciliationId(213), extent_id: 68 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40076 Sep 22 23:22:18.257 INFO [1] client ExtentClose { repair_id: ReconciliationId(213), extent_id: 68 }
40077 Sep 22 23:22:18.257 INFO [2] received reconcile message
40078 Sep 22 23:22:18.257 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(213), op: ExtentClose { repair_id: ReconciliationId(213), extent_id: 68 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40079 Sep 22 23:22:18.257 INFO [2] client ExtentClose { repair_id: ReconciliationId(213), extent_id: 68 }
40080 Sep 22 23:22:18.257 DEBG 213 Close extent 68
40081 Sep 22 23:22:18.258 DEBG 213 Close extent 68
40082 Sep 22 23:22:18.258 DEBG 213 Close extent 68
40083 Sep 22 23:22:18.258 DEBG [2] It's time to notify for 213
40084 Sep 22 23:22:18.258 INFO Completion from [2] id:213 status:true
40085 Sep 22 23:22:18.258 INFO [214/752] Repair commands completed
40086 Sep 22 23:22:18.258 INFO Pop front: ReconcileIO { id: ReconciliationId(214), op: ExtentRepair { repair_id: ReconciliationId(214), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40087 Sep 22 23:22:18.258 INFO Sent repair work, now wait for resp
40088 Sep 22 23:22:18.258 INFO [0] received reconcile message
40089 Sep 22 23:22:18.258 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(214), op: ExtentRepair { repair_id: ReconciliationId(214), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40090 Sep 22 23:22:18.258 INFO [0] client ExtentRepair { repair_id: ReconciliationId(214), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40091 Sep 22 23:22:18.258 INFO [0] Sending repair request ReconciliationId(214)
40092 Sep 22 23:22:18.258 INFO [1] received reconcile message
40093 Sep 22 23:22:18.258 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(214), op: ExtentRepair { repair_id: ReconciliationId(214), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40094 Sep 22 23:22:18.259 INFO [1] client ExtentRepair { repair_id: ReconciliationId(214), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40095 Sep 22 23:22:18.259 INFO [1] No action required ReconciliationId(214)
40096 Sep 22 23:22:18.259 INFO [2] received reconcile message
40097 Sep 22 23:22:18.259 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(214), op: ExtentRepair { repair_id: ReconciliationId(214), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40098 Sep 22 23:22:18.259 INFO [2] client ExtentRepair { repair_id: ReconciliationId(214), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40099 Sep 22 23:22:18.259 INFO [2] No action required ReconciliationId(214)
40100 Sep 22 23:22:18.259 DEBG 214 Repair extent 68 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
40101 Sep 22 23:22:18.259 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/044.copy"
40102 Sep 22 23:22:18.320 INFO accepted connection, remote_addr: 127.0.0.1:46505, local_addr: 127.0.0.1:52864, task: repair
40103 Sep 22 23:22:18.320 TRCE incoming request, uri: /extent/68/files, method: GET, req_id: 0f06646f-470e-4234-97b4-bd55d0b4888e, remote_addr: 127.0.0.1:46505, local_addr: 127.0.0.1:52864, task: repair
40104 Sep 22 23:22:18.320 INFO request completed, latency_us: 192, response_code: 200, uri: /extent/68/files, method: GET, req_id: 0f06646f-470e-4234-97b4-bd55d0b4888e, remote_addr: 127.0.0.1:46505, local_addr: 127.0.0.1:52864, task: repair
40105 Sep 22 23:22:18.321 INFO eid:68 Found repair files: ["044", "044.db"]
40106 Sep 22 23:22:18.321 TRCE incoming request, uri: /newextent/68/data, method: GET, req_id: 2ee0b688-b753-4cdb-9e08-1474ea08ff1e, remote_addr: 127.0.0.1:46505, local_addr: 127.0.0.1:52864, task: repair
40107 Sep 22 23:22:18.321 INFO request completed, latency_us: 248, response_code: 200, uri: /newextent/68/data, method: GET, req_id: 2ee0b688-b753-4cdb-9e08-1474ea08ff1e, remote_addr: 127.0.0.1:46505, local_addr: 127.0.0.1:52864, task: repair
40108 Sep 22 23:22:18.326 TRCE incoming request, uri: /newextent/68/db, method: GET, req_id: 086cd728-4b80-4448-b637-5d00fbc108df, remote_addr: 127.0.0.1:46505, local_addr: 127.0.0.1:52864, task: repair
40109 Sep 22 23:22:18.326 INFO request completed, latency_us: 289, response_code: 200, uri: /newextent/68/db, method: GET, req_id: 086cd728-4b80-4448-b637-5d00fbc108df, remote_addr: 127.0.0.1:46505, local_addr: 127.0.0.1:52864, task: repair
40110 Sep 22 23:22:18.327 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/044.copy" to "/tmp/downstairs-zrMnlo6G/00/000/044.replace"
40111 Sep 22 23:22:18.327 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40112 Sep 22 23:22:18.328 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/044.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
40113 Sep 22 23:22:18.328 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/044"
40114 Sep 22 23:22:18.329 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/044.db"
40115 Sep 22 23:22:18.329 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40116 Sep 22 23:22:18.329 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/044.replace" to "/tmp/downstairs-zrMnlo6G/00/000/044.completed"
40117 Sep 22 23:22:18.329 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40118 Sep 22 23:22:18.329 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40119 Sep 22 23:22:18.329 DEBG [0] It's time to notify for 214
40120 Sep 22 23:22:18.329 INFO Completion from [0] id:214 status:true
40121 Sep 22 23:22:18.329 INFO [215/752] Repair commands completed
40122 Sep 22 23:22:18.329 INFO Pop front: ReconcileIO { id: ReconciliationId(215), op: ExtentReopen { repair_id: ReconciliationId(215), extent_id: 68 }, state: ClientData([New, New, New]) }
40123 Sep 22 23:22:18.329 INFO Sent repair work, now wait for resp
40124 Sep 22 23:22:18.329 INFO [0] received reconcile message
40125 Sep 22 23:22:18.329 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(215), op: ExtentReopen { repair_id: ReconciliationId(215), extent_id: 68 }, state: ClientData([InProgress, New, New]) }, : downstairs
40126 Sep 22 23:22:18.329 INFO [0] client ExtentReopen { repair_id: ReconciliationId(215), extent_id: 68 }
40127 Sep 22 23:22:18.329 INFO [1] received reconcile message
40128 Sep 22 23:22:18.329 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(215), op: ExtentReopen { repair_id: ReconciliationId(215), extent_id: 68 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40129 Sep 22 23:22:18.329 INFO [1] client ExtentReopen { repair_id: ReconciliationId(215), extent_id: 68 }
40130 Sep 22 23:22:18.329 INFO [2] received reconcile message
40131 Sep 22 23:22:18.329 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(215), op: ExtentReopen { repair_id: ReconciliationId(215), extent_id: 68 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40132 Sep 22 23:22:18.329 INFO [2] client ExtentReopen { repair_id: ReconciliationId(215), extent_id: 68 }
40133 Sep 22 23:22:18.329 DEBG 215 Reopen extent 68
40134 Sep 22 23:22:18.330 DEBG 215 Reopen extent 68
40135 Sep 22 23:22:18.330 DEBG 215 Reopen extent 68
40136 Sep 22 23:22:18.331 DEBG [2] It's time to notify for 215
40137 Sep 22 23:22:18.331 INFO Completion from [2] id:215 status:true
40138 Sep 22 23:22:18.331 INFO [216/752] Repair commands completed
40139 Sep 22 23:22:18.331 INFO Pop front: ReconcileIO { id: ReconciliationId(216), op: ExtentFlush { repair_id: ReconciliationId(216), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40140 Sep 22 23:22:18.331 INFO Sent repair work, now wait for resp
40141 Sep 22 23:22:18.331 INFO [0] received reconcile message
40142 Sep 22 23:22:18.331 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(216), op: ExtentFlush { repair_id: ReconciliationId(216), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40143 Sep 22 23:22:18.331 INFO [0] client ExtentFlush { repair_id: ReconciliationId(216), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40144 Sep 22 23:22:18.331 INFO [1] received reconcile message
40145 Sep 22 23:22:18.331 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(216), op: ExtentFlush { repair_id: ReconciliationId(216), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40146 Sep 22 23:22:18.331 INFO [1] client ExtentFlush { repair_id: ReconciliationId(216), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40147 Sep 22 23:22:18.331 INFO [2] received reconcile message
40148 Sep 22 23:22:18.331 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(216), op: ExtentFlush { repair_id: ReconciliationId(216), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40149 Sep 22 23:22:18.331 INFO [2] client ExtentFlush { repair_id: ReconciliationId(216), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40150 Sep 22 23:22:18.331 DEBG 216 Flush extent 123 with f:2 g:2
40151 Sep 22 23:22:18.331 DEBG Flush just extent 123 with f:2 and g:2
40152 Sep 22 23:22:18.332 DEBG [1] It's time to notify for 216
40153 Sep 22 23:22:18.332 INFO Completion from [1] id:216 status:true
40154 Sep 22 23:22:18.332 INFO [217/752] Repair commands completed
40155 Sep 22 23:22:18.332 INFO Pop front: ReconcileIO { id: ReconciliationId(217), op: ExtentClose { repair_id: ReconciliationId(217), extent_id: 123 }, state: ClientData([New, New, New]) }
40156 Sep 22 23:22:18.332 INFO Sent repair work, now wait for resp
40157 Sep 22 23:22:18.332 INFO [0] received reconcile message
40158 Sep 22 23:22:18.332 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(217), op: ExtentClose { repair_id: ReconciliationId(217), extent_id: 123 }, state: ClientData([InProgress, New, New]) }, : downstairs
40159 Sep 22 23:22:18.332 INFO [0] client ExtentClose { repair_id: ReconciliationId(217), extent_id: 123 }
40160 Sep 22 23:22:18.332 INFO [1] received reconcile message
40161 Sep 22 23:22:18.332 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(217), op: ExtentClose { repair_id: ReconciliationId(217), extent_id: 123 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40162 Sep 22 23:22:18.332 INFO [1] client ExtentClose { repair_id: ReconciliationId(217), extent_id: 123 }
40163 Sep 22 23:22:18.332 INFO [2] received reconcile message
40164 Sep 22 23:22:18.332 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(217), op: ExtentClose { repair_id: ReconciliationId(217), extent_id: 123 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40165 Sep 22 23:22:18.332 INFO [2] client ExtentClose { repair_id: ReconciliationId(217), extent_id: 123 }
40166 Sep 22 23:22:18.332 DEBG 217 Close extent 123
40167 Sep 22 23:22:18.332 DEBG 217 Close extent 123
40168 Sep 22 23:22:18.333 DEBG 217 Close extent 123
40169 Sep 22 23:22:18.333 DEBG [2] It's time to notify for 217
40170 Sep 22 23:22:18.333 INFO Completion from [2] id:217 status:true
40171 Sep 22 23:22:18.333 INFO [218/752] Repair commands completed
40172 Sep 22 23:22:18.333 INFO Pop front: ReconcileIO { id: ReconciliationId(218), op: ExtentRepair { repair_id: ReconciliationId(218), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40173 Sep 22 23:22:18.333 INFO Sent repair work, now wait for resp
40174 Sep 22 23:22:18.333 INFO [0] received reconcile message
40175 Sep 22 23:22:18.333 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(218), op: ExtentRepair { repair_id: ReconciliationId(218), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40176 Sep 22 23:22:18.333 INFO [0] client ExtentRepair { repair_id: ReconciliationId(218), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40177 Sep 22 23:22:18.333 INFO [0] Sending repair request ReconciliationId(218)
40178 Sep 22 23:22:18.333 INFO [1] received reconcile message
40179 Sep 22 23:22:18.333 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(218), op: ExtentRepair { repair_id: ReconciliationId(218), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40180 Sep 22 23:22:18.333 INFO [1] client ExtentRepair { repair_id: ReconciliationId(218), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40181 Sep 22 23:22:18.333 INFO [1] No action required ReconciliationId(218)
40182 Sep 22 23:22:18.333 INFO [2] received reconcile message
40183 Sep 22 23:22:18.333 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(218), op: ExtentRepair { repair_id: ReconciliationId(218), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40184 Sep 22 23:22:18.333 INFO [2] client ExtentRepair { repair_id: ReconciliationId(218), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40185 Sep 22 23:22:18.333 INFO [2] No action required ReconciliationId(218)
40186 Sep 22 23:22:18.333 DEBG 218 Repair extent 123 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
40187 Sep 22 23:22:18.333 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/07B.copy"
40188 Sep 22 23:22:18.398 INFO accepted connection, remote_addr: 127.0.0.1:61337, local_addr: 127.0.0.1:52864, task: repair
40189 Sep 22 23:22:18.398 TRCE incoming request, uri: /extent/123/files, method: GET, req_id: c8fee53d-ebe6-4f07-89f5-e0c768ed804f, remote_addr: 127.0.0.1:61337, local_addr: 127.0.0.1:52864, task: repair
40190 Sep 22 23:22:18.399 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/123/files, method: GET, req_id: c8fee53d-ebe6-4f07-89f5-e0c768ed804f, remote_addr: 127.0.0.1:61337, local_addr: 127.0.0.1:52864, task: repair
40191 Sep 22 23:22:18.399 INFO eid:123 Found repair files: ["07B", "07B.db"]
40192 Sep 22 23:22:18.399 TRCE incoming request, uri: /newextent/123/data, method: GET, req_id: 95cf2971-edd6-4637-9a8f-01c6322fecab, remote_addr: 127.0.0.1:61337, local_addr: 127.0.0.1:52864, task: repair
40193 Sep 22 23:22:18.400 INFO request completed, latency_us: 306, response_code: 200, uri: /newextent/123/data, method: GET, req_id: 95cf2971-edd6-4637-9a8f-01c6322fecab, remote_addr: 127.0.0.1:61337, local_addr: 127.0.0.1:52864, task: repair
40194 Sep 22 23:22:18.405 TRCE incoming request, uri: /newextent/123/db, method: GET, req_id: fded857c-e2a8-421f-a8ba-32b10339259c, remote_addr: 127.0.0.1:61337, local_addr: 127.0.0.1:52864, task: repair
40195 Sep 22 23:22:18.405 INFO request completed, latency_us: 290, response_code: 200, uri: /newextent/123/db, method: GET, req_id: fded857c-e2a8-421f-a8ba-32b10339259c, remote_addr: 127.0.0.1:61337, local_addr: 127.0.0.1:52864, task: repair
40196 Sep 22 23:22:18.406 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/07B.copy" to "/tmp/downstairs-zrMnlo6G/00/000/07B.replace"
40197 Sep 22 23:22:18.406 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40198 Sep 22 23:22:18.407 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/07B.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
40199 Sep 22 23:22:18.407 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/07B"
40200 Sep 22 23:22:18.407 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/07B.db"
40201 Sep 22 23:22:18.407 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40202 Sep 22 23:22:18.407 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/07B.replace" to "/tmp/downstairs-zrMnlo6G/00/000/07B.completed"
40203 Sep 22 23:22:18.407 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40204 Sep 22 23:22:18.407 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40205 Sep 22 23:22:18.407 DEBG [0] It's time to notify for 218
40206 Sep 22 23:22:18.408 INFO Completion from [0] id:218 status:true
40207 Sep 22 23:22:18.408 INFO [219/752] Repair commands completed
40208 Sep 22 23:22:18.408 INFO Pop front: ReconcileIO { id: ReconciliationId(219), op: ExtentReopen { repair_id: ReconciliationId(219), extent_id: 123 }, state: ClientData([New, New, New]) }
40209 Sep 22 23:22:18.408 INFO Sent repair work, now wait for resp
40210 Sep 22 23:22:18.408 INFO [0] received reconcile message
40211 Sep 22 23:22:18.408 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(219), op: ExtentReopen { repair_id: ReconciliationId(219), extent_id: 123 }, state: ClientData([InProgress, New, New]) }, : downstairs
40212 Sep 22 23:22:18.408 INFO [0] client ExtentReopen { repair_id: ReconciliationId(219), extent_id: 123 }
40213 Sep 22 23:22:18.408 INFO [1] received reconcile message
40214 Sep 22 23:22:18.408 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(219), op: ExtentReopen { repair_id: ReconciliationId(219), extent_id: 123 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40215 Sep 22 23:22:18.408 INFO [1] client ExtentReopen { repair_id: ReconciliationId(219), extent_id: 123 }
40216 Sep 22 23:22:18.408 INFO [2] received reconcile message
40217 Sep 22 23:22:18.408 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(219), op: ExtentReopen { repair_id: ReconciliationId(219), extent_id: 123 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40218 Sep 22 23:22:18.408 INFO [2] client ExtentReopen { repair_id: ReconciliationId(219), extent_id: 123 }
40219 Sep 22 23:22:18.408 DEBG 219 Reopen extent 123
40220 Sep 22 23:22:18.409 DEBG 219 Reopen extent 123
40221 Sep 22 23:22:18.409 DEBG 219 Reopen extent 123
40222 Sep 22 23:22:18.410 DEBG [2] It's time to notify for 219
40223 Sep 22 23:22:18.410 INFO Completion from [2] id:219 status:true
40224 Sep 22 23:22:18.410 INFO [220/752] Repair commands completed
40225 Sep 22 23:22:18.410 INFO Pop front: ReconcileIO { id: ReconciliationId(220), op: ExtentFlush { repair_id: ReconciliationId(220), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40226 Sep 22 23:22:18.410 INFO Sent repair work, now wait for resp
40227 Sep 22 23:22:18.410 INFO [0] received reconcile message
40228 Sep 22 23:22:18.410 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(220), op: ExtentFlush { repair_id: ReconciliationId(220), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40229 Sep 22 23:22:18.410 INFO [0] client ExtentFlush { repair_id: ReconciliationId(220), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40230 Sep 22 23:22:18.410 INFO [1] received reconcile message
40231 Sep 22 23:22:18.410 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(220), op: ExtentFlush { repair_id: ReconciliationId(220), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40232 Sep 22 23:22:18.410 INFO [1] client ExtentFlush { repair_id: ReconciliationId(220), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40233 Sep 22 23:22:18.410 INFO [2] received reconcile message
40234 Sep 22 23:22:18.410 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(220), op: ExtentFlush { repair_id: ReconciliationId(220), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40235 Sep 22 23:22:18.410 INFO [2] client ExtentFlush { repair_id: ReconciliationId(220), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40236 Sep 22 23:22:18.410 DEBG 220 Flush extent 55 with f:2 g:2
40237 Sep 22 23:22:18.410 DEBG Flush just extent 55 with f:2 and g:2
40238 Sep 22 23:22:18.410 DEBG [1] It's time to notify for 220
40239 Sep 22 23:22:18.410 INFO Completion from [1] id:220 status:true
40240 Sep 22 23:22:18.410 INFO [221/752] Repair commands completed
40241 Sep 22 23:22:18.410 INFO Pop front: ReconcileIO { id: ReconciliationId(221), op: ExtentClose { repair_id: ReconciliationId(221), extent_id: 55 }, state: ClientData([New, New, New]) }
40242 Sep 22 23:22:18.410 INFO Sent repair work, now wait for resp
40243 Sep 22 23:22:18.410 INFO [0] received reconcile message
40244 Sep 22 23:22:18.410 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(221), op: ExtentClose { repair_id: ReconciliationId(221), extent_id: 55 }, state: ClientData([InProgress, New, New]) }, : downstairs
40245 Sep 22 23:22:18.410 INFO [0] client ExtentClose { repair_id: ReconciliationId(221), extent_id: 55 }
40246 Sep 22 23:22:18.410 INFO [1] received reconcile message
40247 Sep 22 23:22:18.410 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(221), op: ExtentClose { repair_id: ReconciliationId(221), extent_id: 55 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40248 Sep 22 23:22:18.410 INFO [1] client ExtentClose { repair_id: ReconciliationId(221), extent_id: 55 }
40249 Sep 22 23:22:18.411 INFO [2] received reconcile message
40250 Sep 22 23:22:18.411 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(221), op: ExtentClose { repair_id: ReconciliationId(221), extent_id: 55 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40251 Sep 22 23:22:18.411 INFO [2] client ExtentClose { repair_id: ReconciliationId(221), extent_id: 55 }
40252 Sep 22 23:22:18.411 DEBG 221 Close extent 55
40253 Sep 22 23:22:18.411 DEBG 221 Close extent 55
40254 Sep 22 23:22:18.411 DEBG 221 Close extent 55
40255 Sep 22 23:22:18.412 DEBG [2] It's time to notify for 221
40256 Sep 22 23:22:18.412 INFO Completion from [2] id:221 status:true
40257 Sep 22 23:22:18.412 INFO [222/752] Repair commands completed
40258 Sep 22 23:22:18.412 INFO Pop front: ReconcileIO { id: ReconciliationId(222), op: ExtentRepair { repair_id: ReconciliationId(222), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40259 Sep 22 23:22:18.412 INFO Sent repair work, now wait for resp
40260 Sep 22 23:22:18.412 INFO [0] received reconcile message
40261 Sep 22 23:22:18.412 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(222), op: ExtentRepair { repair_id: ReconciliationId(222), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40262 Sep 22 23:22:18.412 INFO [0] client ExtentRepair { repair_id: ReconciliationId(222), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40263 Sep 22 23:22:18.412 INFO [0] Sending repair request ReconciliationId(222)
40264 Sep 22 23:22:18.412 INFO [1] received reconcile message
40265 Sep 22 23:22:18.412 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(222), op: ExtentRepair { repair_id: ReconciliationId(222), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40266 Sep 22 23:22:18.412 INFO [1] client ExtentRepair { repair_id: ReconciliationId(222), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40267 Sep 22 23:22:18.412 INFO [1] No action required ReconciliationId(222)
40268 Sep 22 23:22:18.412 INFO [2] received reconcile message
40269 Sep 22 23:22:18.412 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(222), op: ExtentRepair { repair_id: ReconciliationId(222), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40270 Sep 22 23:22:18.412 INFO [2] client ExtentRepair { repair_id: ReconciliationId(222), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40271 Sep 22 23:22:18.412 INFO [2] No action required ReconciliationId(222)
40272 Sep 22 23:22:18.412 DEBG 222 Repair extent 55 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
40273 Sep 22 23:22:18.412 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/037.copy"
40274 Sep 22 23:22:18.475 INFO accepted connection, remote_addr: 127.0.0.1:52619, local_addr: 127.0.0.1:52864, task: repair
40275 Sep 22 23:22:18.476 TRCE incoming request, uri: /extent/55/files, method: GET, req_id: b9421453-5d39-43d3-ba0b-08e7d720653d, remote_addr: 127.0.0.1:52619, local_addr: 127.0.0.1:52864, task: repair
40276 Sep 22 23:22:18.476 INFO request completed, latency_us: 189, response_code: 200, uri: /extent/55/files, method: GET, req_id: b9421453-5d39-43d3-ba0b-08e7d720653d, remote_addr: 127.0.0.1:52619, local_addr: 127.0.0.1:52864, task: repair
40277 Sep 22 23:22:18.476 INFO eid:55 Found repair files: ["037", "037.db"]
40278 Sep 22 23:22:18.476 TRCE incoming request, uri: /newextent/55/data, method: GET, req_id: d0513051-e217-4cf7-bdb9-05812f4bf8e8, remote_addr: 127.0.0.1:52619, local_addr: 127.0.0.1:52864, task: repair
40279 Sep 22 23:22:18.477 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/55/data, method: GET, req_id: d0513051-e217-4cf7-bdb9-05812f4bf8e8, remote_addr: 127.0.0.1:52619, local_addr: 127.0.0.1:52864, task: repair
40280 Sep 22 23:22:18.482 TRCE incoming request, uri: /newextent/55/db, method: GET, req_id: b0f27a1b-a0ae-4437-b37c-3ad59ccb1ac1, remote_addr: 127.0.0.1:52619, local_addr: 127.0.0.1:52864, task: repair
40281 Sep 22 23:22:18.482 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/55/db, method: GET, req_id: b0f27a1b-a0ae-4437-b37c-3ad59ccb1ac1, remote_addr: 127.0.0.1:52619, local_addr: 127.0.0.1:52864, task: repair
40282 Sep 22 23:22:18.483 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/037.copy" to "/tmp/downstairs-zrMnlo6G/00/000/037.replace"
40283 Sep 22 23:22:18.483 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40284 Sep 22 23:22:18.484 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/037.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
40285 Sep 22 23:22:18.484 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/037"
40286 Sep 22 23:22:18.484 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/037.db"
40287 Sep 22 23:22:18.484 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40288 Sep 22 23:22:18.484 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/037.replace" to "/tmp/downstairs-zrMnlo6G/00/000/037.completed"
40289 Sep 22 23:22:18.484 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40290 Sep 22 23:22:18.484 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40291 Sep 22 23:22:18.485 DEBG [0] It's time to notify for 222
40292 Sep 22 23:22:18.485 INFO Completion from [0] id:222 status:true
40293 Sep 22 23:22:18.485 INFO [223/752] Repair commands completed
40294 Sep 22 23:22:18.485 INFO Pop front: ReconcileIO { id: ReconciliationId(223), op: ExtentReopen { repair_id: ReconciliationId(223), extent_id: 55 }, state: ClientData([New, New, New]) }
40295 Sep 22 23:22:18.485 INFO Sent repair work, now wait for resp
40296 Sep 22 23:22:18.485 INFO [0] received reconcile message
40297 Sep 22 23:22:18.485 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(223), op: ExtentReopen { repair_id: ReconciliationId(223), extent_id: 55 }, state: ClientData([InProgress, New, New]) }, : downstairs
40298 Sep 22 23:22:18.485 INFO [0] client ExtentReopen { repair_id: ReconciliationId(223), extent_id: 55 }
40299 Sep 22 23:22:18.485 INFO [1] received reconcile message
40300 Sep 22 23:22:18.485 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(223), op: ExtentReopen { repair_id: ReconciliationId(223), extent_id: 55 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40301 Sep 22 23:22:18.485 INFO [1] client ExtentReopen { repair_id: ReconciliationId(223), extent_id: 55 }
40302 Sep 22 23:22:18.485 INFO [2] received reconcile message
40303 Sep 22 23:22:18.485 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(223), op: ExtentReopen { repair_id: ReconciliationId(223), extent_id: 55 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40304 Sep 22 23:22:18.485 INFO [2] client ExtentReopen { repair_id: ReconciliationId(223), extent_id: 55 }
40305 Sep 22 23:22:18.485 DEBG 223 Reopen extent 55
40306 Sep 22 23:22:18.486 DEBG 223 Reopen extent 55
40307 Sep 22 23:22:18.486 DEBG 223 Reopen extent 55
40308 Sep 22 23:22:18.487 DEBG [2] It's time to notify for 223
40309 Sep 22 23:22:18.487 INFO Completion from [2] id:223 status:true
40310 Sep 22 23:22:18.487 INFO [224/752] Repair commands completed
40311 Sep 22 23:22:18.487 INFO Pop front: ReconcileIO { id: ReconciliationId(224), op: ExtentFlush { repair_id: ReconciliationId(224), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40312 Sep 22 23:22:18.487 INFO Sent repair work, now wait for resp
40313 Sep 22 23:22:18.487 INFO [0] received reconcile message
40314 Sep 22 23:22:18.487 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(224), op: ExtentFlush { repair_id: ReconciliationId(224), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40315 Sep 22 23:22:18.487 INFO [0] client ExtentFlush { repair_id: ReconciliationId(224), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40316 Sep 22 23:22:18.487 INFO [1] received reconcile message
40317 Sep 22 23:22:18.487 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(224), op: ExtentFlush { repair_id: ReconciliationId(224), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40318 Sep 22 23:22:18.487 INFO [1] client ExtentFlush { repair_id: ReconciliationId(224), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40319 Sep 22 23:22:18.487 INFO [2] received reconcile message
40320 Sep 22 23:22:18.487 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(224), op: ExtentFlush { repair_id: ReconciliationId(224), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40321 Sep 22 23:22:18.487 INFO [2] client ExtentFlush { repair_id: ReconciliationId(224), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40322 Sep 22 23:22:18.487 DEBG 224 Flush extent 159 with f:2 g:2
40323 Sep 22 23:22:18.487 DEBG Flush just extent 159 with f:2 and g:2
40324 Sep 22 23:22:18.487 DEBG [1] It's time to notify for 224
40325 Sep 22 23:22:18.487 INFO Completion from [1] id:224 status:true
40326 Sep 22 23:22:18.487 INFO [225/752] Repair commands completed
40327 Sep 22 23:22:18.487 INFO Pop front: ReconcileIO { id: ReconciliationId(225), op: ExtentClose { repair_id: ReconciliationId(225), extent_id: 159 }, state: ClientData([New, New, New]) }
40328 Sep 22 23:22:18.487 INFO Sent repair work, now wait for resp
40329 Sep 22 23:22:18.487 INFO [0] received reconcile message
40330 Sep 22 23:22:18.487 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(225), op: ExtentClose { repair_id: ReconciliationId(225), extent_id: 159 }, state: ClientData([InProgress, New, New]) }, : downstairs
40331 Sep 22 23:22:18.487 INFO [0] client ExtentClose { repair_id: ReconciliationId(225), extent_id: 159 }
40332 Sep 22 23:22:18.487 INFO [1] received reconcile message
40333 Sep 22 23:22:18.487 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(225), op: ExtentClose { repair_id: ReconciliationId(225), extent_id: 159 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40334 Sep 22 23:22:18.487 INFO [1] client ExtentClose { repair_id: ReconciliationId(225), extent_id: 159 }
40335 Sep 22 23:22:18.488 INFO [2] received reconcile message
40336 Sep 22 23:22:18.488 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(225), op: ExtentClose { repair_id: ReconciliationId(225), extent_id: 159 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40337 Sep 22 23:22:18.488 INFO [2] client ExtentClose { repair_id: ReconciliationId(225), extent_id: 159 }
40338 Sep 22 23:22:18.488 DEBG 225 Close extent 159
40339 Sep 22 23:22:18.488 DEBG 225 Close extent 159
40340 Sep 22 23:22:18.488 DEBG 225 Close extent 159
40341 Sep 22 23:22:18.489 DEBG [2] It's time to notify for 225
40342 Sep 22 23:22:18.489 INFO Completion from [2] id:225 status:true
40343 Sep 22 23:22:18.489 INFO [226/752] Repair commands completed
40344 Sep 22 23:22:18.489 INFO Pop front: ReconcileIO { id: ReconciliationId(226), op: ExtentRepair { repair_id: ReconciliationId(226), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40345 Sep 22 23:22:18.489 INFO Sent repair work, now wait for resp
40346 Sep 22 23:22:18.489 INFO [0] received reconcile message
40347 Sep 22 23:22:18.489 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(226), op: ExtentRepair { repair_id: ReconciliationId(226), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40348 Sep 22 23:22:18.489 INFO [0] client ExtentRepair { repair_id: ReconciliationId(226), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40349 Sep 22 23:22:18.489 INFO [0] Sending repair request ReconciliationId(226)
40350 Sep 22 23:22:18.489 INFO [1] received reconcile message
40351 Sep 22 23:22:18.489 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(226), op: ExtentRepair { repair_id: ReconciliationId(226), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40352 Sep 22 23:22:18.489 INFO [1] client ExtentRepair { repair_id: ReconciliationId(226), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40353 Sep 22 23:22:18.489 INFO [1] No action required ReconciliationId(226)
40354 Sep 22 23:22:18.489 INFO [2] received reconcile message
40355 Sep 22 23:22:18.489 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(226), op: ExtentRepair { repair_id: ReconciliationId(226), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40356 Sep 22 23:22:18.489 INFO [2] client ExtentRepair { repair_id: ReconciliationId(226), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40357 Sep 22 23:22:18.489 INFO [2] No action required ReconciliationId(226)
40358 Sep 22 23:22:18.489 DEBG 226 Repair extent 159 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
40359 Sep 22 23:22:18.489 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/09F.copy"
40360 Sep 22 23:22:18.552 INFO accepted connection, remote_addr: 127.0.0.1:64824, local_addr: 127.0.0.1:52864, task: repair
40361 Sep 22 23:22:18.552 TRCE incoming request, uri: /extent/159/files, method: GET, req_id: a2c4a738-936c-489a-a497-bd2593b836bb, remote_addr: 127.0.0.1:64824, local_addr: 127.0.0.1:52864, task: repair
40362 Sep 22 23:22:18.552 INFO request completed, latency_us: 188, response_code: 200, uri: /extent/159/files, method: GET, req_id: a2c4a738-936c-489a-a497-bd2593b836bb, remote_addr: 127.0.0.1:64824, local_addr: 127.0.0.1:52864, task: repair
40363 Sep 22 23:22:18.553 INFO eid:159 Found repair files: ["09F", "09F.db"]
40364 Sep 22 23:22:18.553 TRCE incoming request, uri: /newextent/159/data, method: GET, req_id: d3012b5b-4822-4e04-b273-924d66b8e4c5, remote_addr: 127.0.0.1:64824, local_addr: 127.0.0.1:52864, task: repair
40365 Sep 22 23:22:18.553 INFO request completed, latency_us: 248, response_code: 200, uri: /newextent/159/data, method: GET, req_id: d3012b5b-4822-4e04-b273-924d66b8e4c5, remote_addr: 127.0.0.1:64824, local_addr: 127.0.0.1:52864, task: repair
40366 Sep 22 23:22:18.558 TRCE incoming request, uri: /newextent/159/db, method: GET, req_id: 978c50d1-4d92-4168-a163-f0f57ce68012, remote_addr: 127.0.0.1:64824, local_addr: 127.0.0.1:52864, task: repair
40367 Sep 22 23:22:18.559 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/159/db, method: GET, req_id: 978c50d1-4d92-4168-a163-f0f57ce68012, remote_addr: 127.0.0.1:64824, local_addr: 127.0.0.1:52864, task: repair
40368 Sep 22 23:22:18.560 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/09F.copy" to "/tmp/downstairs-zrMnlo6G/00/000/09F.replace"
40369 Sep 22 23:22:18.560 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40370 Sep 22 23:22:18.560 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/09F.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
40371 Sep 22 23:22:18.561 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/09F"
40372 Sep 22 23:22:18.561 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/09F.db"
40373 Sep 22 23:22:18.561 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40374 Sep 22 23:22:18.561 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/09F.replace" to "/tmp/downstairs-zrMnlo6G/00/000/09F.completed"
40375 Sep 22 23:22:18.561 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40376 Sep 22 23:22:18.561 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40377 Sep 22 23:22:18.561 DEBG [0] It's time to notify for 226
40378 Sep 22 23:22:18.561 INFO Completion from [0] id:226 status:true
40379 Sep 22 23:22:18.561 INFO [227/752] Repair commands completed
40380 Sep 22 23:22:18.561 INFO Pop front: ReconcileIO { id: ReconciliationId(227), op: ExtentReopen { repair_id: ReconciliationId(227), extent_id: 159 }, state: ClientData([New, New, New]) }
40381 Sep 22 23:22:18.561 INFO Sent repair work, now wait for resp
40382 Sep 22 23:22:18.561 INFO [0] received reconcile message
40383 Sep 22 23:22:18.561 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(227), op: ExtentReopen { repair_id: ReconciliationId(227), extent_id: 159 }, state: ClientData([InProgress, New, New]) }, : downstairs
40384 Sep 22 23:22:18.561 INFO [0] client ExtentReopen { repair_id: ReconciliationId(227), extent_id: 159 }
40385 Sep 22 23:22:18.561 INFO [1] received reconcile message
40386 Sep 22 23:22:18.561 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(227), op: ExtentReopen { repair_id: ReconciliationId(227), extent_id: 159 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40387 Sep 22 23:22:18.561 INFO [1] client ExtentReopen { repair_id: ReconciliationId(227), extent_id: 159 }
40388 Sep 22 23:22:18.561 INFO [2] received reconcile message
40389 Sep 22 23:22:18.561 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(227), op: ExtentReopen { repair_id: ReconciliationId(227), extent_id: 159 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40390 Sep 22 23:22:18.562 INFO [2] client ExtentReopen { repair_id: ReconciliationId(227), extent_id: 159 }
40391 Sep 22 23:22:18.562 DEBG 227 Reopen extent 159
40392 Sep 22 23:22:18.562 DEBG 227 Reopen extent 159
40393 Sep 22 23:22:18.563 DEBG 227 Reopen extent 159
40394 Sep 22 23:22:18.563 DEBG [2] It's time to notify for 227
40395 Sep 22 23:22:18.563 INFO Completion from [2] id:227 status:true
40396 Sep 22 23:22:18.563 INFO [228/752] Repair commands completed
40397 Sep 22 23:22:18.563 INFO Pop front: ReconcileIO { id: ReconciliationId(228), op: ExtentFlush { repair_id: ReconciliationId(228), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40398 Sep 22 23:22:18.563 INFO Sent repair work, now wait for resp
40399 Sep 22 23:22:18.563 INFO [0] received reconcile message
40400 Sep 22 23:22:18.563 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(228), op: ExtentFlush { repair_id: ReconciliationId(228), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40401 Sep 22 23:22:18.563 INFO [0] client ExtentFlush { repair_id: ReconciliationId(228), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40402 Sep 22 23:22:18.563 INFO [1] received reconcile message
40403 Sep 22 23:22:18.563 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(228), op: ExtentFlush { repair_id: ReconciliationId(228), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40404 Sep 22 23:22:18.563 INFO [1] client ExtentFlush { repair_id: ReconciliationId(228), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40405 Sep 22 23:22:18.563 INFO [2] received reconcile message
40406 Sep 22 23:22:18.564 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(228), op: ExtentFlush { repair_id: ReconciliationId(228), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40407 Sep 22 23:22:18.564 INFO [2] client ExtentFlush { repair_id: ReconciliationId(228), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40408 Sep 22 23:22:18.564 DEBG 228 Flush extent 35 with f:2 g:2
40409 Sep 22 23:22:18.564 DEBG Flush just extent 35 with f:2 and g:2
40410 Sep 22 23:22:18.564 DEBG [1] It's time to notify for 228
40411 Sep 22 23:22:18.564 INFO Completion from [1] id:228 status:true
40412 Sep 22 23:22:18.564 INFO [229/752] Repair commands completed
40413 Sep 22 23:22:18.564 INFO Pop front: ReconcileIO { id: ReconciliationId(229), op: ExtentClose { repair_id: ReconciliationId(229), extent_id: 35 }, state: ClientData([New, New, New]) }
40414 Sep 22 23:22:18.564 INFO Sent repair work, now wait for resp
40415 Sep 22 23:22:18.564 INFO [0] received reconcile message
40416 Sep 22 23:22:18.564 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(229), op: ExtentClose { repair_id: ReconciliationId(229), extent_id: 35 }, state: ClientData([InProgress, New, New]) }, : downstairs
40417 Sep 22 23:22:18.564 INFO [0] client ExtentClose { repair_id: ReconciliationId(229), extent_id: 35 }
40418 Sep 22 23:22:18.564 INFO [1] received reconcile message
40419 Sep 22 23:22:18.564 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(229), op: ExtentClose { repair_id: ReconciliationId(229), extent_id: 35 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40420 Sep 22 23:22:18.564 INFO [1] client ExtentClose { repair_id: ReconciliationId(229), extent_id: 35 }
40421 Sep 22 23:22:18.564 INFO [2] received reconcile message
40422 Sep 22 23:22:18.564 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(229), op: ExtentClose { repair_id: ReconciliationId(229), extent_id: 35 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40423 Sep 22 23:22:18.564 INFO [2] client ExtentClose { repair_id: ReconciliationId(229), extent_id: 35 }
40424 Sep 22 23:22:18.564 DEBG 229 Close extent 35
40425 Sep 22 23:22:18.564 DEBG 229 Close extent 35
40426 Sep 22 23:22:18.565 DEBG 229 Close extent 35
40427 Sep 22 23:22:18.565 DEBG [2] It's time to notify for 229
40428 Sep 22 23:22:18.565 INFO Completion from [2] id:229 status:true
40429 Sep 22 23:22:18.565 INFO [230/752] Repair commands completed
40430 Sep 22 23:22:18.565 INFO Pop front: ReconcileIO { id: ReconciliationId(230), op: ExtentRepair { repair_id: ReconciliationId(230), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40431 Sep 22 23:22:18.565 INFO Sent repair work, now wait for resp
40432 Sep 22 23:22:18.565 INFO [0] received reconcile message
40433 Sep 22 23:22:18.565 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(230), op: ExtentRepair { repair_id: ReconciliationId(230), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40434 Sep 22 23:22:18.565 INFO [0] client ExtentRepair { repair_id: ReconciliationId(230), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40435 Sep 22 23:22:18.565 INFO [0] Sending repair request ReconciliationId(230)
40436 Sep 22 23:22:18.565 INFO [1] received reconcile message
40437 Sep 22 23:22:18.565 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(230), op: ExtentRepair { repair_id: ReconciliationId(230), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40438 Sep 22 23:22:18.565 INFO [1] client ExtentRepair { repair_id: ReconciliationId(230), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40439 Sep 22 23:22:18.565 INFO [1] No action required ReconciliationId(230)
40440 Sep 22 23:22:18.565 INFO [2] received reconcile message
40441 Sep 22 23:22:18.565 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(230), op: ExtentRepair { repair_id: ReconciliationId(230), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40442 Sep 22 23:22:18.566 INFO [2] client ExtentRepair { repair_id: ReconciliationId(230), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40443 Sep 22 23:22:18.566 INFO [2] No action required ReconciliationId(230)
40444 Sep 22 23:22:18.566 DEBG 230 Repair extent 35 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
40445 Sep 22 23:22:18.566 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/023.copy"
40446 Sep 22 23:22:18.629 INFO accepted connection, remote_addr: 127.0.0.1:40899, local_addr: 127.0.0.1:52864, task: repair
40447 Sep 22 23:22:18.629 TRCE incoming request, uri: /extent/35/files, method: GET, req_id: 225c9d90-712b-43ef-b653-ea878429a7be, remote_addr: 127.0.0.1:40899, local_addr: 127.0.0.1:52864, task: repair
40448 Sep 22 23:22:18.629 INFO request completed, latency_us: 230, response_code: 200, uri: /extent/35/files, method: GET, req_id: 225c9d90-712b-43ef-b653-ea878429a7be, remote_addr: 127.0.0.1:40899, local_addr: 127.0.0.1:52864, task: repair
40449 Sep 22 23:22:18.629 INFO eid:35 Found repair files: ["023", "023.db"]
40450 Sep 22 23:22:18.630 TRCE incoming request, uri: /newextent/35/data, method: GET, req_id: 3a9cd04a-dae9-4562-9e96-dce1548ec2fb, remote_addr: 127.0.0.1:40899, local_addr: 127.0.0.1:52864, task: repair
40451 Sep 22 23:22:18.630 INFO request completed, latency_us: 335, response_code: 200, uri: /newextent/35/data, method: GET, req_id: 3a9cd04a-dae9-4562-9e96-dce1548ec2fb, remote_addr: 127.0.0.1:40899, local_addr: 127.0.0.1:52864, task: repair
40452 Sep 22 23:22:18.635 TRCE incoming request, uri: /newextent/35/db, method: GET, req_id: 4291d16b-a54b-4265-9470-27718c042181, remote_addr: 127.0.0.1:40899, local_addr: 127.0.0.1:52864, task: repair
40453 Sep 22 23:22:18.636 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/35/db, method: GET, req_id: 4291d16b-a54b-4265-9470-27718c042181, remote_addr: 127.0.0.1:40899, local_addr: 127.0.0.1:52864, task: repair
40454 Sep 22 23:22:18.637 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/023.copy" to "/tmp/downstairs-zrMnlo6G/00/000/023.replace"
40455 Sep 22 23:22:18.637 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40456 Sep 22 23:22:18.638 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/023.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
40457 Sep 22 23:22:18.638 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/023"
40458 Sep 22 23:22:18.639 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/023.db"
40459 Sep 22 23:22:18.639 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40460 Sep 22 23:22:18.639 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/023.replace" to "/tmp/downstairs-zrMnlo6G/00/000/023.completed"
40461 Sep 22 23:22:18.639 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40462 Sep 22 23:22:18.639 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40463 Sep 22 23:22:18.639 DEBG [0] It's time to notify for 230
40464 Sep 22 23:22:18.639 INFO Completion from [0] id:230 status:true
40465 Sep 22 23:22:18.639 INFO [231/752] Repair commands completed
40466 Sep 22 23:22:18.639 INFO Pop front: ReconcileIO { id: ReconciliationId(231), op: ExtentReopen { repair_id: ReconciliationId(231), extent_id: 35 }, state: ClientData([New, New, New]) }
40467 Sep 22 23:22:18.639 INFO Sent repair work, now wait for resp
40468 Sep 22 23:22:18.639 INFO [0] received reconcile message
40469 Sep 22 23:22:18.639 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(231), op: ExtentReopen { repair_id: ReconciliationId(231), extent_id: 35 }, state: ClientData([InProgress, New, New]) }, : downstairs
40470 Sep 22 23:22:18.639 INFO [0] client ExtentReopen { repair_id: ReconciliationId(231), extent_id: 35 }
40471 Sep 22 23:22:18.639 INFO [1] received reconcile message
40472 Sep 22 23:22:18.639 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(231), op: ExtentReopen { repair_id: ReconciliationId(231), extent_id: 35 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40473 Sep 22 23:22:18.639 INFO [1] client ExtentReopen { repair_id: ReconciliationId(231), extent_id: 35 }
40474 Sep 22 23:22:18.639 INFO [2] received reconcile message
40475 Sep 22 23:22:18.639 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(231), op: ExtentReopen { repair_id: ReconciliationId(231), extent_id: 35 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40476 Sep 22 23:22:18.639 INFO [2] client ExtentReopen { repair_id: ReconciliationId(231), extent_id: 35 }
40477 Sep 22 23:22:18.640 DEBG 231 Reopen extent 35
40478 Sep 22 23:22:18.640 DEBG 231 Reopen extent 35
40479 Sep 22 23:22:18.641 DEBG 231 Reopen extent 35
40480 Sep 22 23:22:18.642 DEBG [2] It's time to notify for 231
40481 Sep 22 23:22:18.642 INFO Completion from [2] id:231 status:true
40482 Sep 22 23:22:18.642 INFO [232/752] Repair commands completed
40483 Sep 22 23:22:18.642 INFO Pop front: ReconcileIO { id: ReconciliationId(232), op: ExtentFlush { repair_id: ReconciliationId(232), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40484 Sep 22 23:22:18.642 INFO Sent repair work, now wait for resp
40485 Sep 22 23:22:18.642 INFO [0] received reconcile message
40486 Sep 22 23:22:18.642 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(232), op: ExtentFlush { repair_id: ReconciliationId(232), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40487 Sep 22 23:22:18.642 INFO [0] client ExtentFlush { repair_id: ReconciliationId(232), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40488 Sep 22 23:22:18.642 INFO [1] received reconcile message
40489 Sep 22 23:22:18.642 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(232), op: ExtentFlush { repair_id: ReconciliationId(232), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40490 Sep 22 23:22:18.642 INFO [1] client ExtentFlush { repair_id: ReconciliationId(232), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40491 Sep 22 23:22:18.642 INFO [2] received reconcile message
40492 Sep 22 23:22:18.642 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(232), op: ExtentFlush { repair_id: ReconciliationId(232), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40493 Sep 22 23:22:18.642 INFO [2] client ExtentFlush { repair_id: ReconciliationId(232), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40494 Sep 22 23:22:18.642 DEBG 232 Flush extent 162 with f:2 g:2
40495 Sep 22 23:22:18.642 DEBG Flush just extent 162 with f:2 and g:2
40496 Sep 22 23:22:18.642 DEBG [1] It's time to notify for 232
40497 Sep 22 23:22:18.642 INFO Completion from [1] id:232 status:true
40498 Sep 22 23:22:18.642 INFO [233/752] Repair commands completed
40499 Sep 22 23:22:18.642 INFO Pop front: ReconcileIO { id: ReconciliationId(233), op: ExtentClose { repair_id: ReconciliationId(233), extent_id: 162 }, state: ClientData([New, New, New]) }
40500 Sep 22 23:22:18.642 INFO Sent repair work, now wait for resp
40501 Sep 22 23:22:18.642 INFO [0] received reconcile message
40502 Sep 22 23:22:18.642 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(233), op: ExtentClose { repair_id: ReconciliationId(233), extent_id: 162 }, state: ClientData([InProgress, New, New]) }, : downstairs
40503 Sep 22 23:22:18.642 INFO [0] client ExtentClose { repair_id: ReconciliationId(233), extent_id: 162 }
40504 Sep 22 23:22:18.642 INFO [1] received reconcile message
40505 Sep 22 23:22:18.642 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(233), op: ExtentClose { repair_id: ReconciliationId(233), extent_id: 162 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40506 Sep 22 23:22:18.642 INFO [1] client ExtentClose { repair_id: ReconciliationId(233), extent_id: 162 }
40507 Sep 22 23:22:18.642 INFO [2] received reconcile message
40508 Sep 22 23:22:18.642 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(233), op: ExtentClose { repair_id: ReconciliationId(233), extent_id: 162 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40509 Sep 22 23:22:18.642 INFO [2] client ExtentClose { repair_id: ReconciliationId(233), extent_id: 162 }
40510 Sep 22 23:22:18.643 DEBG 233 Close extent 162
40511 Sep 22 23:22:18.643 DEBG 233 Close extent 162
40512 Sep 22 23:22:18.643 DEBG 233 Close extent 162
40513 Sep 22 23:22:18.644 DEBG [2] It's time to notify for 233
40514 Sep 22 23:22:18.644 INFO Completion from [2] id:233 status:true
40515 Sep 22 23:22:18.644 INFO [234/752] Repair commands completed
40516 Sep 22 23:22:18.644 INFO Pop front: ReconcileIO { id: ReconciliationId(234), op: ExtentRepair { repair_id: ReconciliationId(234), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40517 Sep 22 23:22:18.644 INFO Sent repair work, now wait for resp
40518 Sep 22 23:22:18.644 INFO [0] received reconcile message
40519 Sep 22 23:22:18.644 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(234), op: ExtentRepair { repair_id: ReconciliationId(234), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40520 Sep 22 23:22:18.644 INFO [0] client ExtentRepair { repair_id: ReconciliationId(234), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40521 Sep 22 23:22:18.644 INFO [0] Sending repair request ReconciliationId(234)
40522 Sep 22 23:22:18.644 INFO [1] received reconcile message
40523 Sep 22 23:22:18.644 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(234), op: ExtentRepair { repair_id: ReconciliationId(234), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40524 Sep 22 23:22:18.644 INFO [1] client ExtentRepair { repair_id: ReconciliationId(234), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40525 Sep 22 23:22:18.644 INFO [1] No action required ReconciliationId(234)
40526 Sep 22 23:22:18.644 INFO [2] received reconcile message
40527 Sep 22 23:22:18.644 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(234), op: ExtentRepair { repair_id: ReconciliationId(234), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40528 Sep 22 23:22:18.644 INFO [2] client ExtentRepair { repair_id: ReconciliationId(234), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40529 Sep 22 23:22:18.644 INFO [2] No action required ReconciliationId(234)
40530 Sep 22 23:22:18.644 DEBG 234 Repair extent 162 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
40531 Sep 22 23:22:18.644 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0A2.copy"
40532 Sep 22 23:22:18.648 ERRO [0] job id 1078 saw error GenericError("test error")
40533 Sep 22 23:22:18.648 DEBG [rc] retire 1078 clears [JobId(1077), JobId(1078)], : downstairs
40534 Sep 22 23:22:18.648 DEBG IO Flush 1080 has deps [JobId(1079)]
40535 Sep 22 23:22:18.654 DEBG Read :1079 deps:[JobId(1078)] res:true
40536 Sep 22 23:22:18.678 INFO [lossy] sleeping 1 second
40537 Sep 22 23:22:18.709 INFO accepted connection, remote_addr: 127.0.0.1:44201, local_addr: 127.0.0.1:52864, task: repair
40538 Sep 22 23:22:18.710 TRCE incoming request, uri: /extent/162/files, method: GET, req_id: e9c28114-5c07-4f57-9b84-76579bb9f49c, remote_addr: 127.0.0.1:44201, local_addr: 127.0.0.1:52864, task: repair
40539 Sep 22 23:22:18.710 INFO request completed, latency_us: 275, response_code: 200, uri: /extent/162/files, method: GET, req_id: e9c28114-5c07-4f57-9b84-76579bb9f49c, remote_addr: 127.0.0.1:44201, local_addr: 127.0.0.1:52864, task: repair
40540 Sep 22 23:22:18.710 INFO eid:162 Found repair files: ["0A2", "0A2.db"]
40541 Sep 22 23:22:18.711 TRCE incoming request, uri: /newextent/162/data, method: GET, req_id: 16da6fda-565a-46d7-a395-65524eafb833, remote_addr: 127.0.0.1:44201, local_addr: 127.0.0.1:52864, task: repair
40542 Sep 22 23:22:18.711 INFO request completed, latency_us: 331, response_code: 200, uri: /newextent/162/data, method: GET, req_id: 16da6fda-565a-46d7-a395-65524eafb833, remote_addr: 127.0.0.1:44201, local_addr: 127.0.0.1:52864, task: repair
40543 Sep 22 23:22:18.716 TRCE incoming request, uri: /newextent/162/db, method: GET, req_id: 8799dd52-b6f0-4e20-8695-d873cff8edca, remote_addr: 127.0.0.1:44201, local_addr: 127.0.0.1:52864, task: repair
40544 Sep 22 23:22:18.717 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/162/db, method: GET, req_id: 8799dd52-b6f0-4e20-8695-d873cff8edca, remote_addr: 127.0.0.1:44201, local_addr: 127.0.0.1:52864, task: repair
40545 Sep 22 23:22:18.718 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0A2.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0A2.replace"
40546 Sep 22 23:22:18.718 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40547 Sep 22 23:22:18.719 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0A2.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
40548 Sep 22 23:22:18.719 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A2"
40549 Sep 22 23:22:18.719 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A2.db"
40550 Sep 22 23:22:18.719 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40551 Sep 22 23:22:18.719 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0A2.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0A2.completed"
40552 Sep 22 23:22:18.719 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40553 Sep 22 23:22:18.720 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40554 Sep 22 23:22:18.720 DEBG [0] It's time to notify for 234
40555 Sep 22 23:22:18.720 INFO Completion from [0] id:234 status:true
40556 Sep 22 23:22:18.720 INFO [235/752] Repair commands completed
40557 Sep 22 23:22:18.720 INFO Pop front: ReconcileIO { id: ReconciliationId(235), op: ExtentReopen { repair_id: ReconciliationId(235), extent_id: 162 }, state: ClientData([New, New, New]) }
40558 Sep 22 23:22:18.720 INFO Sent repair work, now wait for resp
40559 Sep 22 23:22:18.720 INFO [0] received reconcile message
40560 Sep 22 23:22:18.720 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(235), op: ExtentReopen { repair_id: ReconciliationId(235), extent_id: 162 }, state: ClientData([InProgress, New, New]) }, : downstairs
40561 Sep 22 23:22:18.720 INFO [0] client ExtentReopen { repair_id: ReconciliationId(235), extent_id: 162 }
40562 Sep 22 23:22:18.720 INFO [1] received reconcile message
40563 Sep 22 23:22:18.720 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(235), op: ExtentReopen { repair_id: ReconciliationId(235), extent_id: 162 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40564 Sep 22 23:22:18.720 INFO [1] client ExtentReopen { repair_id: ReconciliationId(235), extent_id: 162 }
40565 Sep 22 23:22:18.720 INFO [2] received reconcile message
40566 Sep 22 23:22:18.720 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(235), op: ExtentReopen { repair_id: ReconciliationId(235), extent_id: 162 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40567 Sep 22 23:22:18.720 INFO [2] client ExtentReopen { repair_id: ReconciliationId(235), extent_id: 162 }
40568 Sep 22 23:22:18.721 DEBG 235 Reopen extent 162
40569 Sep 22 23:22:18.722 DEBG 235 Reopen extent 162
40570 Sep 22 23:22:18.722 DEBG 235 Reopen extent 162
40571 Sep 22 23:22:18.723 DEBG [2] It's time to notify for 235
40572 Sep 22 23:22:18.723 INFO Completion from [2] id:235 status:true
40573 Sep 22 23:22:18.723 INFO [236/752] Repair commands completed
40574 Sep 22 23:22:18.723 INFO Pop front: ReconcileIO { id: ReconciliationId(236), op: ExtentFlush { repair_id: ReconciliationId(236), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40575 Sep 22 23:22:18.723 INFO Sent repair work, now wait for resp
40576 Sep 22 23:22:18.723 INFO [0] received reconcile message
40577 Sep 22 23:22:18.723 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(236), op: ExtentFlush { repair_id: ReconciliationId(236), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40578 Sep 22 23:22:18.723 INFO [0] client ExtentFlush { repair_id: ReconciliationId(236), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40579 Sep 22 23:22:18.724 INFO [1] received reconcile message
40580 Sep 22 23:22:18.724 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(236), op: ExtentFlush { repair_id: ReconciliationId(236), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40581 Sep 22 23:22:18.724 INFO [1] client ExtentFlush { repair_id: ReconciliationId(236), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40582 Sep 22 23:22:18.724 INFO [2] received reconcile message
40583 Sep 22 23:22:18.724 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(236), op: ExtentFlush { repair_id: ReconciliationId(236), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40584 Sep 22 23:22:18.724 INFO [2] client ExtentFlush { repair_id: ReconciliationId(236), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40585 Sep 22 23:22:18.724 DEBG 236 Flush extent 121 with f:2 g:2
40586 Sep 22 23:22:18.724 DEBG Flush just extent 121 with f:2 and g:2
40587 Sep 22 23:22:18.724 DEBG [1] It's time to notify for 236
40588 Sep 22 23:22:18.724 INFO Completion from [1] id:236 status:true
40589 Sep 22 23:22:18.724 INFO [237/752] Repair commands completed
40590 Sep 22 23:22:18.724 INFO Pop front: ReconcileIO { id: ReconciliationId(237), op: ExtentClose { repair_id: ReconciliationId(237), extent_id: 121 }, state: ClientData([New, New, New]) }
40591 Sep 22 23:22:18.724 INFO Sent repair work, now wait for resp
40592 Sep 22 23:22:18.724 INFO [0] received reconcile message
40593 Sep 22 23:22:18.724 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(237), op: ExtentClose { repair_id: ReconciliationId(237), extent_id: 121 }, state: ClientData([InProgress, New, New]) }, : downstairs
40594 Sep 22 23:22:18.724 INFO [0] client ExtentClose { repair_id: ReconciliationId(237), extent_id: 121 }
40595 Sep 22 23:22:18.724 INFO [1] received reconcile message
40596 Sep 22 23:22:18.724 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(237), op: ExtentClose { repair_id: ReconciliationId(237), extent_id: 121 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40597 Sep 22 23:22:18.724 INFO [1] client ExtentClose { repair_id: ReconciliationId(237), extent_id: 121 }
40598 Sep 22 23:22:18.724 INFO [2] received reconcile message
40599 Sep 22 23:22:18.724 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(237), op: ExtentClose { repair_id: ReconciliationId(237), extent_id: 121 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40600 Sep 22 23:22:18.724 INFO [2] client ExtentClose { repair_id: ReconciliationId(237), extent_id: 121 }
40601 Sep 22 23:22:18.725 DEBG 237 Close extent 121
40602 Sep 22 23:22:18.725 DEBG 237 Close extent 121
40603 Sep 22 23:22:18.725 DEBG 237 Close extent 121
40604 Sep 22 23:22:18.726 DEBG [2] It's time to notify for 237
40605 Sep 22 23:22:18.726 INFO Completion from [2] id:237 status:true
40606 Sep 22 23:22:18.726 INFO [238/752] Repair commands completed
40607 Sep 22 23:22:18.726 INFO Pop front: ReconcileIO { id: ReconciliationId(238), op: ExtentRepair { repair_id: ReconciliationId(238), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40608 Sep 22 23:22:18.726 INFO Sent repair work, now wait for resp
40609 Sep 22 23:22:18.726 INFO [0] received reconcile message
40610 Sep 22 23:22:18.726 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(238), op: ExtentRepair { repair_id: ReconciliationId(238), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40611 Sep 22 23:22:18.726 INFO [0] client ExtentRepair { repair_id: ReconciliationId(238), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40612 Sep 22 23:22:18.726 INFO [0] Sending repair request ReconciliationId(238)
40613 Sep 22 23:22:18.726 INFO [1] received reconcile message
40614 Sep 22 23:22:18.726 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(238), op: ExtentRepair { repair_id: ReconciliationId(238), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40615 Sep 22 23:22:18.726 INFO [1] client ExtentRepair { repair_id: ReconciliationId(238), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40616 Sep 22 23:22:18.726 INFO [1] No action required ReconciliationId(238)
40617 Sep 22 23:22:18.726 INFO [2] received reconcile message
40618 Sep 22 23:22:18.726 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(238), op: ExtentRepair { repair_id: ReconciliationId(238), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40619 Sep 22 23:22:18.726 INFO [2] client ExtentRepair { repair_id: ReconciliationId(238), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40620 Sep 22 23:22:18.727 INFO [2] No action required ReconciliationId(238)
40621 Sep 22 23:22:18.727 DEBG 238 Repair extent 121 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
40622 Sep 22 23:22:18.727 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/079.copy"
40623 Sep 22 23:22:18.794 INFO accepted connection, remote_addr: 127.0.0.1:39641, local_addr: 127.0.0.1:52864, task: repair
40624 Sep 22 23:22:18.794 TRCE incoming request, uri: /extent/121/files, method: GET, req_id: 3970fe43-b992-4681-bcaf-6f28a33f63e8, remote_addr: 127.0.0.1:39641, local_addr: 127.0.0.1:52864, task: repair
40625 Sep 22 23:22:18.794 INFO request completed, latency_us: 251, response_code: 200, uri: /extent/121/files, method: GET, req_id: 3970fe43-b992-4681-bcaf-6f28a33f63e8, remote_addr: 127.0.0.1:39641, local_addr: 127.0.0.1:52864, task: repair
40626 Sep 22 23:22:18.795 INFO eid:121 Found repair files: ["079", "079.db"]
40627 Sep 22 23:22:18.795 TRCE incoming request, uri: /newextent/121/data, method: GET, req_id: d390b511-5bff-4098-a403-ab8651c4b24e, remote_addr: 127.0.0.1:39641, local_addr: 127.0.0.1:52864, task: repair
40628 Sep 22 23:22:18.795 INFO request completed, latency_us: 340, response_code: 200, uri: /newextent/121/data, method: GET, req_id: d390b511-5bff-4098-a403-ab8651c4b24e, remote_addr: 127.0.0.1:39641, local_addr: 127.0.0.1:52864, task: repair
40629 Sep 22 23:22:18.800 TRCE incoming request, uri: /newextent/121/db, method: GET, req_id: 0fc18e80-1916-4f36-84d2-f7f5d8a9b47b, remote_addr: 127.0.0.1:39641, local_addr: 127.0.0.1:52864, task: repair
40630 Sep 22 23:22:18.801 INFO request completed, latency_us: 288, response_code: 200, uri: /newextent/121/db, method: GET, req_id: 0fc18e80-1916-4f36-84d2-f7f5d8a9b47b, remote_addr: 127.0.0.1:39641, local_addr: 127.0.0.1:52864, task: repair
40631 Sep 22 23:22:18.802 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/079.copy" to "/tmp/downstairs-zrMnlo6G/00/000/079.replace"
40632 Sep 22 23:22:18.802 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40633 Sep 22 23:22:18.803 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/079.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
40634 Sep 22 23:22:18.803 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/079"
40635 Sep 22 23:22:18.803 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/079.db"
40636 Sep 22 23:22:18.803 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40637 Sep 22 23:22:18.803 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/079.replace" to "/tmp/downstairs-zrMnlo6G/00/000/079.completed"
40638 Sep 22 23:22:18.803 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40639 Sep 22 23:22:18.803 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40640 Sep 22 23:22:18.804 DEBG [0] It's time to notify for 238
40641 Sep 22 23:22:18.804 INFO Completion from [0] id:238 status:true
40642 Sep 22 23:22:18.804 INFO [239/752] Repair commands completed
40643 Sep 22 23:22:18.804 INFO Pop front: ReconcileIO { id: ReconciliationId(239), op: ExtentReopen { repair_id: ReconciliationId(239), extent_id: 121 }, state: ClientData([New, New, New]) }
40644 Sep 22 23:22:18.804 INFO Sent repair work, now wait for resp
40645 Sep 22 23:22:18.804 INFO [0] received reconcile message
40646 Sep 22 23:22:18.804 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(239), op: ExtentReopen { repair_id: ReconciliationId(239), extent_id: 121 }, state: ClientData([InProgress, New, New]) }, : downstairs
40647 Sep 22 23:22:18.804 INFO [0] client ExtentReopen { repair_id: ReconciliationId(239), extent_id: 121 }
40648 Sep 22 23:22:18.804 INFO [1] received reconcile message
40649 Sep 22 23:22:18.804 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(239), op: ExtentReopen { repair_id: ReconciliationId(239), extent_id: 121 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40650 Sep 22 23:22:18.804 INFO [1] client ExtentReopen { repair_id: ReconciliationId(239), extent_id: 121 }
40651 Sep 22 23:22:18.804 INFO [2] received reconcile message
40652 Sep 22 23:22:18.804 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(239), op: ExtentReopen { repair_id: ReconciliationId(239), extent_id: 121 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40653 Sep 22 23:22:18.804 INFO [2] client ExtentReopen { repair_id: ReconciliationId(239), extent_id: 121 }
40654 Sep 22 23:22:18.804 DEBG 239 Reopen extent 121
40655 Sep 22 23:22:18.805 DEBG 239 Reopen extent 121
40656 Sep 22 23:22:18.806 DEBG 239 Reopen extent 121
40657 Sep 22 23:22:18.806 DEBG [2] It's time to notify for 239
40658 Sep 22 23:22:18.806 INFO Completion from [2] id:239 status:true
40659 Sep 22 23:22:18.806 INFO [240/752] Repair commands completed
40660 Sep 22 23:22:18.806 INFO Pop front: ReconcileIO { id: ReconciliationId(240), op: ExtentFlush { repair_id: ReconciliationId(240), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40661 Sep 22 23:22:18.806 INFO Sent repair work, now wait for resp
40662 Sep 22 23:22:18.806 INFO [0] received reconcile message
40663 Sep 22 23:22:18.806 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(240), op: ExtentFlush { repair_id: ReconciliationId(240), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40664 Sep 22 23:22:18.806 INFO [0] client ExtentFlush { repair_id: ReconciliationId(240), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40665 Sep 22 23:22:18.806 INFO [1] received reconcile message
40666 Sep 22 23:22:18.806 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(240), op: ExtentFlush { repair_id: ReconciliationId(240), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40667 Sep 22 23:22:18.806 INFO [1] client ExtentFlush { repair_id: ReconciliationId(240), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40668 Sep 22 23:22:18.806 INFO [2] received reconcile message
40669 Sep 22 23:22:18.806 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(240), op: ExtentFlush { repair_id: ReconciliationId(240), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40670 Sep 22 23:22:18.806 INFO [2] client ExtentFlush { repair_id: ReconciliationId(240), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40671 Sep 22 23:22:18.807 DEBG 240 Flush extent 65 with f:2 g:2
40672 Sep 22 23:22:18.807 DEBG Flush just extent 65 with f:2 and g:2
40673 Sep 22 23:22:18.807 DEBG [1] It's time to notify for 240
40674 Sep 22 23:22:18.807 INFO Completion from [1] id:240 status:true
40675 Sep 22 23:22:18.807 INFO [241/752] Repair commands completed
40676 Sep 22 23:22:18.807 INFO Pop front: ReconcileIO { id: ReconciliationId(241), op: ExtentClose { repair_id: ReconciliationId(241), extent_id: 65 }, state: ClientData([New, New, New]) }
40677 Sep 22 23:22:18.807 INFO Sent repair work, now wait for resp
40678 Sep 22 23:22:18.807 INFO [0] received reconcile message
40679 Sep 22 23:22:18.807 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(241), op: ExtentClose { repair_id: ReconciliationId(241), extent_id: 65 }, state: ClientData([InProgress, New, New]) }, : downstairs
40680 Sep 22 23:22:18.807 INFO [0] client ExtentClose { repair_id: ReconciliationId(241), extent_id: 65 }
40681 Sep 22 23:22:18.807 INFO [1] received reconcile message
40682 Sep 22 23:22:18.807 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(241), op: ExtentClose { repair_id: ReconciliationId(241), extent_id: 65 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40683 Sep 22 23:22:18.807 INFO [1] client ExtentClose { repair_id: ReconciliationId(241), extent_id: 65 }
40684 Sep 22 23:22:18.807 INFO [2] received reconcile message
40685 Sep 22 23:22:18.807 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(241), op: ExtentClose { repair_id: ReconciliationId(241), extent_id: 65 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40686 Sep 22 23:22:18.807 INFO [2] client ExtentClose { repair_id: ReconciliationId(241), extent_id: 65 }
40687 Sep 22 23:22:18.807 DEBG 241 Close extent 65
40688 Sep 22 23:22:18.807 DEBG 241 Close extent 65
40689 Sep 22 23:22:18.808 DEBG 241 Close extent 65
40690 Sep 22 23:22:18.808 DEBG [2] It's time to notify for 241
40691 Sep 22 23:22:18.808 INFO Completion from [2] id:241 status:true
40692 Sep 22 23:22:18.808 INFO [242/752] Repair commands completed
40693 Sep 22 23:22:18.808 INFO Pop front: ReconcileIO { id: ReconciliationId(242), op: ExtentRepair { repair_id: ReconciliationId(242), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40694 Sep 22 23:22:18.808 INFO Sent repair work, now wait for resp
40695 Sep 22 23:22:18.808 INFO [0] received reconcile message
40696 Sep 22 23:22:18.808 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(242), op: ExtentRepair { repair_id: ReconciliationId(242), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40697 Sep 22 23:22:18.808 INFO [0] client ExtentRepair { repair_id: ReconciliationId(242), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40698 Sep 22 23:22:18.808 INFO [0] Sending repair request ReconciliationId(242)
40699 Sep 22 23:22:18.808 INFO [1] received reconcile message
40700 Sep 22 23:22:18.808 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(242), op: ExtentRepair { repair_id: ReconciliationId(242), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40701 Sep 22 23:22:18.808 INFO [1] client ExtentRepair { repair_id: ReconciliationId(242), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40702 Sep 22 23:22:18.808 INFO [1] No action required ReconciliationId(242)
40703 Sep 22 23:22:18.808 INFO [2] received reconcile message
40704 Sep 22 23:22:18.809 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(242), op: ExtentRepair { repair_id: ReconciliationId(242), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40705 Sep 22 23:22:18.809 INFO [2] client ExtentRepair { repair_id: ReconciliationId(242), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40706 Sep 22 23:22:18.809 INFO [2] No action required ReconciliationId(242)
40707 Sep 22 23:22:18.809 DEBG 242 Repair extent 65 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
40708 Sep 22 23:22:18.809 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/041.copy"
40709 Sep 22 23:22:18.872 INFO accepted connection, remote_addr: 127.0.0.1:44802, local_addr: 127.0.0.1:52864, task: repair
40710 Sep 22 23:22:18.873 TRCE incoming request, uri: /extent/65/files, method: GET, req_id: 8c7b47ce-948d-432f-9fc9-956864960b00, remote_addr: 127.0.0.1:44802, local_addr: 127.0.0.1:52864, task: repair
40711 Sep 22 23:22:18.873 INFO request completed, latency_us: 272, response_code: 200, uri: /extent/65/files, method: GET, req_id: 8c7b47ce-948d-432f-9fc9-956864960b00, remote_addr: 127.0.0.1:44802, local_addr: 127.0.0.1:52864, task: repair
40712 Sep 22 23:22:18.873 INFO eid:65 Found repair files: ["041", "041.db"]
40713 Sep 22 23:22:18.874 TRCE incoming request, uri: /newextent/65/data, method: GET, req_id: 634dc24a-b102-4c66-a83f-7aea60b9d054, remote_addr: 127.0.0.1:44802, local_addr: 127.0.0.1:52864, task: repair
40714 Sep 22 23:22:18.874 INFO request completed, latency_us: 363, response_code: 200, uri: /newextent/65/data, method: GET, req_id: 634dc24a-b102-4c66-a83f-7aea60b9d054, remote_addr: 127.0.0.1:44802, local_addr: 127.0.0.1:52864, task: repair
40715 Sep 22 23:22:18.879 TRCE incoming request, uri: /newextent/65/db, method: GET, req_id: 7bd11843-9a6f-48bf-9f63-d26eff5d7842, remote_addr: 127.0.0.1:44802, local_addr: 127.0.0.1:52864, task: repair
40716 Sep 22 23:22:18.879 INFO request completed, latency_us: 288, response_code: 200, uri: /newextent/65/db, method: GET, req_id: 7bd11843-9a6f-48bf-9f63-d26eff5d7842, remote_addr: 127.0.0.1:44802, local_addr: 127.0.0.1:52864, task: repair
40717 Sep 22 23:22:18.880 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/041.copy" to "/tmp/downstairs-zrMnlo6G/00/000/041.replace"
40718 Sep 22 23:22:18.880 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40719 Sep 22 23:22:18.882 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/041.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
40720 Sep 22 23:22:18.882 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/041"
40721 Sep 22 23:22:18.882 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/041.db"
40722 Sep 22 23:22:18.882 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40723 Sep 22 23:22:18.882 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/041.replace" to "/tmp/downstairs-zrMnlo6G/00/000/041.completed"
40724 Sep 22 23:22:18.882 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40725 Sep 22 23:22:18.882 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40726 Sep 22 23:22:18.882 DEBG [0] It's time to notify for 242
40727 Sep 22 23:22:18.883 INFO Completion from [0] id:242 status:true
40728 Sep 22 23:22:18.883 INFO [243/752] Repair commands completed
40729 Sep 22 23:22:18.883 INFO Pop front: ReconcileIO { id: ReconciliationId(243), op: ExtentReopen { repair_id: ReconciliationId(243), extent_id: 65 }, state: ClientData([New, New, New]) }
40730 Sep 22 23:22:18.883 INFO Sent repair work, now wait for resp
40731 Sep 22 23:22:18.883 INFO [0] received reconcile message
40732 Sep 22 23:22:18.883 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(243), op: ExtentReopen { repair_id: ReconciliationId(243), extent_id: 65 }, state: ClientData([InProgress, New, New]) }, : downstairs
40733 Sep 22 23:22:18.883 INFO [0] client ExtentReopen { repair_id: ReconciliationId(243), extent_id: 65 }
40734 Sep 22 23:22:18.883 INFO [1] received reconcile message
40735 Sep 22 23:22:18.883 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(243), op: ExtentReopen { repair_id: ReconciliationId(243), extent_id: 65 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40736 Sep 22 23:22:18.883 INFO [1] client ExtentReopen { repair_id: ReconciliationId(243), extent_id: 65 }
40737 Sep 22 23:22:18.883 INFO [2] received reconcile message
40738 Sep 22 23:22:18.883 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(243), op: ExtentReopen { repair_id: ReconciliationId(243), extent_id: 65 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40739 Sep 22 23:22:18.883 INFO [2] client ExtentReopen { repair_id: ReconciliationId(243), extent_id: 65 }
40740 Sep 22 23:22:18.883 DEBG 243 Reopen extent 65
40741 Sep 22 23:22:18.884 DEBG 243 Reopen extent 65
40742 Sep 22 23:22:18.884 DEBG 243 Reopen extent 65
40743 Sep 22 23:22:18.885 DEBG [2] It's time to notify for 243
40744 Sep 22 23:22:18.885 INFO Completion from [2] id:243 status:true
40745 Sep 22 23:22:18.885 INFO [244/752] Repair commands completed
40746 Sep 22 23:22:18.885 INFO Pop front: ReconcileIO { id: ReconciliationId(244), op: ExtentFlush { repair_id: ReconciliationId(244), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40747 Sep 22 23:22:18.885 INFO Sent repair work, now wait for resp
40748 Sep 22 23:22:18.885 INFO [0] received reconcile message
40749 Sep 22 23:22:18.885 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(244), op: ExtentFlush { repair_id: ReconciliationId(244), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40750 Sep 22 23:22:18.885 INFO [0] client ExtentFlush { repair_id: ReconciliationId(244), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40751 Sep 22 23:22:18.885 INFO [1] received reconcile message
40752 Sep 22 23:22:18.885 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(244), op: ExtentFlush { repair_id: ReconciliationId(244), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40753 Sep 22 23:22:18.885 INFO [1] client ExtentFlush { repair_id: ReconciliationId(244), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40754 Sep 22 23:22:18.885 INFO [2] received reconcile message
40755 Sep 22 23:22:18.885 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(244), op: ExtentFlush { repair_id: ReconciliationId(244), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40756 Sep 22 23:22:18.885 INFO [2] client ExtentFlush { repair_id: ReconciliationId(244), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40757 Sep 22 23:22:18.885 DEBG 244 Flush extent 106 with f:2 g:2
40758 Sep 22 23:22:18.885 DEBG Flush just extent 106 with f:2 and g:2
40759 Sep 22 23:22:18.886 DEBG [1] It's time to notify for 244
40760 Sep 22 23:22:18.886 INFO Completion from [1] id:244 status:true
40761 Sep 22 23:22:18.886 INFO [245/752] Repair commands completed
40762 Sep 22 23:22:18.886 INFO Pop front: ReconcileIO { id: ReconciliationId(245), op: ExtentClose { repair_id: ReconciliationId(245), extent_id: 106 }, state: ClientData([New, New, New]) }
40763 Sep 22 23:22:18.886 INFO Sent repair work, now wait for resp
40764 Sep 22 23:22:18.886 INFO [0] received reconcile message
40765 Sep 22 23:22:18.886 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(245), op: ExtentClose { repair_id: ReconciliationId(245), extent_id: 106 }, state: ClientData([InProgress, New, New]) }, : downstairs
40766 Sep 22 23:22:18.886 INFO [0] client ExtentClose { repair_id: ReconciliationId(245), extent_id: 106 }
40767 Sep 22 23:22:18.886 INFO [1] received reconcile message
40768 Sep 22 23:22:18.886 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(245), op: ExtentClose { repair_id: ReconciliationId(245), extent_id: 106 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40769 Sep 22 23:22:18.886 INFO [1] client ExtentClose { repair_id: ReconciliationId(245), extent_id: 106 }
40770 Sep 22 23:22:18.886 INFO [2] received reconcile message
40771 Sep 22 23:22:18.886 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(245), op: ExtentClose { repair_id: ReconciliationId(245), extent_id: 106 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40772 Sep 22 23:22:18.886 INFO [2] client ExtentClose { repair_id: ReconciliationId(245), extent_id: 106 }
40773 Sep 22 23:22:18.886 DEBG 245 Close extent 106
40774 Sep 22 23:22:18.886 DEBG 245 Close extent 106
40775 Sep 22 23:22:18.887 DEBG 245 Close extent 106
40776 Sep 22 23:22:18.887 DEBG [2] It's time to notify for 245
40777 Sep 22 23:22:18.887 INFO Completion from [2] id:245 status:true
40778 Sep 22 23:22:18.887 INFO [246/752] Repair commands completed
40779 Sep 22 23:22:18.887 INFO Pop front: ReconcileIO { id: ReconciliationId(246), op: ExtentRepair { repair_id: ReconciliationId(246), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40780 Sep 22 23:22:18.887 INFO Sent repair work, now wait for resp
40781 Sep 22 23:22:18.887 INFO [0] received reconcile message
40782 Sep 22 23:22:18.887 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(246), op: ExtentRepair { repair_id: ReconciliationId(246), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40783 Sep 22 23:22:18.887 INFO [0] client ExtentRepair { repair_id: ReconciliationId(246), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40784 Sep 22 23:22:18.887 INFO [0] Sending repair request ReconciliationId(246)
40785 Sep 22 23:22:18.887 INFO [1] received reconcile message
40786 Sep 22 23:22:18.887 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(246), op: ExtentRepair { repair_id: ReconciliationId(246), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40787 Sep 22 23:22:18.887 INFO [1] client ExtentRepair { repair_id: ReconciliationId(246), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40788 Sep 22 23:22:18.887 INFO [1] No action required ReconciliationId(246)
40789 Sep 22 23:22:18.887 INFO [2] received reconcile message
40790 Sep 22 23:22:18.887 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(246), op: ExtentRepair { repair_id: ReconciliationId(246), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40791 Sep 22 23:22:18.887 INFO [2] client ExtentRepair { repair_id: ReconciliationId(246), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40792 Sep 22 23:22:18.887 INFO [2] No action required ReconciliationId(246)
40793 Sep 22 23:22:18.887 DEBG 246 Repair extent 106 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
40794 Sep 22 23:22:18.888 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/06A.copy"
40795 Sep 22 23:22:18.950 INFO accepted connection, remote_addr: 127.0.0.1:49077, local_addr: 127.0.0.1:52864, task: repair
40796 Sep 22 23:22:18.951 TRCE incoming request, uri: /extent/106/files, method: GET, req_id: e3f9a519-2f8f-413f-9f62-95e6bbe8cdbc, remote_addr: 127.0.0.1:49077, local_addr: 127.0.0.1:52864, task: repair
40797 Sep 22 23:22:18.951 INFO request completed, latency_us: 266, response_code: 200, uri: /extent/106/files, method: GET, req_id: e3f9a519-2f8f-413f-9f62-95e6bbe8cdbc, remote_addr: 127.0.0.1:49077, local_addr: 127.0.0.1:52864, task: repair
40798 Sep 22 23:22:18.951 INFO eid:106 Found repair files: ["06A", "06A.db"]
40799 Sep 22 23:22:18.952 TRCE incoming request, uri: /newextent/106/data, method: GET, req_id: 341f5512-d632-49bf-b29d-4ee34d3de257, remote_addr: 127.0.0.1:49077, local_addr: 127.0.0.1:52864, task: repair
40800 Sep 22 23:22:18.952 INFO request completed, latency_us: 345, response_code: 200, uri: /newextent/106/data, method: GET, req_id: 341f5512-d632-49bf-b29d-4ee34d3de257, remote_addr: 127.0.0.1:49077, local_addr: 127.0.0.1:52864, task: repair
40801 Sep 22 23:22:18.957 TRCE incoming request, uri: /newextent/106/db, method: GET, req_id: dfc1fdfd-a3cb-4eff-84c0-712b2881708f, remote_addr: 127.0.0.1:49077, local_addr: 127.0.0.1:52864, task: repair
40802 Sep 22 23:22:18.957 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/106/db, method: GET, req_id: dfc1fdfd-a3cb-4eff-84c0-712b2881708f, remote_addr: 127.0.0.1:49077, local_addr: 127.0.0.1:52864, task: repair
40803 Sep 22 23:22:18.958 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/06A.copy" to "/tmp/downstairs-zrMnlo6G/00/000/06A.replace"
40804 Sep 22 23:22:18.958 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40805 Sep 22 23:22:18.960 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/06A.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
40806 Sep 22 23:22:18.960 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/06A"
40807 Sep 22 23:22:18.960 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/06A.db"
40808 Sep 22 23:22:18.960 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40809 Sep 22 23:22:18.960 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/06A.replace" to "/tmp/downstairs-zrMnlo6G/00/000/06A.completed"
40810 Sep 22 23:22:18.960 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40811 Sep 22 23:22:18.960 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40812 Sep 22 23:22:18.960 DEBG [0] It's time to notify for 246
40813 Sep 22 23:22:18.961 INFO Completion from [0] id:246 status:true
40814 Sep 22 23:22:18.961 INFO [247/752] Repair commands completed
40815 Sep 22 23:22:18.961 INFO Pop front: ReconcileIO { id: ReconciliationId(247), op: ExtentReopen { repair_id: ReconciliationId(247), extent_id: 106 }, state: ClientData([New, New, New]) }
40816 Sep 22 23:22:18.961 INFO Sent repair work, now wait for resp
40817 Sep 22 23:22:18.961 INFO [0] received reconcile message
40818 Sep 22 23:22:18.961 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(247), op: ExtentReopen { repair_id: ReconciliationId(247), extent_id: 106 }, state: ClientData([InProgress, New, New]) }, : downstairs
40819 Sep 22 23:22:18.961 INFO [0] client ExtentReopen { repair_id: ReconciliationId(247), extent_id: 106 }
40820 Sep 22 23:22:18.961 INFO [1] received reconcile message
40821 Sep 22 23:22:18.961 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(247), op: ExtentReopen { repair_id: ReconciliationId(247), extent_id: 106 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40822 Sep 22 23:22:18.961 INFO [1] client ExtentReopen { repair_id: ReconciliationId(247), extent_id: 106 }
40823 Sep 22 23:22:18.961 INFO [2] received reconcile message
40824 Sep 22 23:22:18.961 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(247), op: ExtentReopen { repair_id: ReconciliationId(247), extent_id: 106 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40825 Sep 22 23:22:18.961 INFO [2] client ExtentReopen { repair_id: ReconciliationId(247), extent_id: 106 }
40826 Sep 22 23:22:18.961 DEBG 247 Reopen extent 106
40827 Sep 22 23:22:18.962 DEBG 247 Reopen extent 106
40828 Sep 22 23:22:18.962 DEBG 247 Reopen extent 106
40829 Sep 22 23:22:18.963 DEBG [2] It's time to notify for 247
40830 Sep 22 23:22:18.963 INFO Completion from [2] id:247 status:true
40831 Sep 22 23:22:18.963 INFO [248/752] Repair commands completed
40832 Sep 22 23:22:18.963 INFO Pop front: ReconcileIO { id: ReconciliationId(248), op: ExtentFlush { repair_id: ReconciliationId(248), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40833 Sep 22 23:22:18.963 INFO Sent repair work, now wait for resp
40834 Sep 22 23:22:18.963 INFO [0] received reconcile message
40835 Sep 22 23:22:18.963 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(248), op: ExtentFlush { repair_id: ReconciliationId(248), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40836 Sep 22 23:22:18.963 INFO [0] client ExtentFlush { repair_id: ReconciliationId(248), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40837 Sep 22 23:22:18.963 INFO [1] received reconcile message
40838 Sep 22 23:22:18.963 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(248), op: ExtentFlush { repair_id: ReconciliationId(248), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40839 Sep 22 23:22:18.963 INFO [1] client ExtentFlush { repair_id: ReconciliationId(248), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40840 Sep 22 23:22:18.963 INFO [2] received reconcile message
40841 Sep 22 23:22:18.963 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(248), op: ExtentFlush { repair_id: ReconciliationId(248), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40842 Sep 22 23:22:18.963 INFO [2] client ExtentFlush { repair_id: ReconciliationId(248), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40843 Sep 22 23:22:18.963 DEBG 248 Flush extent 22 with f:2 g:2
40844 Sep 22 23:22:18.963 DEBG Flush just extent 22 with f:2 and g:2
40845 Sep 22 23:22:18.963 DEBG [1] It's time to notify for 248
40846 Sep 22 23:22:18.963 INFO Completion from [1] id:248 status:true
40847 Sep 22 23:22:18.963 INFO [249/752] Repair commands completed
40848 Sep 22 23:22:18.964 INFO Pop front: ReconcileIO { id: ReconciliationId(249), op: ExtentClose { repair_id: ReconciliationId(249), extent_id: 22 }, state: ClientData([New, New, New]) }
40849 Sep 22 23:22:18.964 INFO Sent repair work, now wait for resp
40850 Sep 22 23:22:18.964 INFO [0] received reconcile message
40851 Sep 22 23:22:18.964 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(249), op: ExtentClose { repair_id: ReconciliationId(249), extent_id: 22 }, state: ClientData([InProgress, New, New]) }, : downstairs
40852 Sep 22 23:22:18.964 INFO [0] client ExtentClose { repair_id: ReconciliationId(249), extent_id: 22 }
40853 Sep 22 23:22:18.964 INFO [1] received reconcile message
40854 Sep 22 23:22:18.964 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(249), op: ExtentClose { repair_id: ReconciliationId(249), extent_id: 22 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40855 Sep 22 23:22:18.964 INFO [1] client ExtentClose { repair_id: ReconciliationId(249), extent_id: 22 }
40856 Sep 22 23:22:18.964 INFO [2] received reconcile message
40857 Sep 22 23:22:18.964 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(249), op: ExtentClose { repair_id: ReconciliationId(249), extent_id: 22 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40858 Sep 22 23:22:18.964 INFO [2] client ExtentClose { repair_id: ReconciliationId(249), extent_id: 22 }
40859 Sep 22 23:22:18.964 DEBG 249 Close extent 22
40860 Sep 22 23:22:18.964 DEBG 249 Close extent 22
40861 Sep 22 23:22:18.964 DEBG 249 Close extent 22
40862 Sep 22 23:22:18.965 DEBG [2] It's time to notify for 249
40863 Sep 22 23:22:18.965 INFO Completion from [2] id:249 status:true
40864 Sep 22 23:22:18.965 INFO [250/752] Repair commands completed
40865 Sep 22 23:22:18.965 INFO Pop front: ReconcileIO { id: ReconciliationId(250), op: ExtentRepair { repair_id: ReconciliationId(250), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40866 Sep 22 23:22:18.965 INFO Sent repair work, now wait for resp
40867 Sep 22 23:22:18.965 INFO [0] received reconcile message
40868 Sep 22 23:22:18.965 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(250), op: ExtentRepair { repair_id: ReconciliationId(250), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40869 Sep 22 23:22:18.965 INFO [0] client ExtentRepair { repair_id: ReconciliationId(250), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40870 Sep 22 23:22:18.965 INFO [0] Sending repair request ReconciliationId(250)
40871 Sep 22 23:22:18.965 INFO [1] received reconcile message
40872 Sep 22 23:22:18.965 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(250), op: ExtentRepair { repair_id: ReconciliationId(250), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40873 Sep 22 23:22:18.965 INFO [1] client ExtentRepair { repair_id: ReconciliationId(250), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40874 Sep 22 23:22:18.965 INFO [1] No action required ReconciliationId(250)
40875 Sep 22 23:22:18.965 INFO [2] received reconcile message
40876 Sep 22 23:22:18.965 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(250), op: ExtentRepair { repair_id: ReconciliationId(250), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40877 Sep 22 23:22:18.965 INFO [2] client ExtentRepair { repair_id: ReconciliationId(250), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40878 Sep 22 23:22:18.965 INFO [2] No action required ReconciliationId(250)
40879 Sep 22 23:22:18.965 DEBG 250 Repair extent 22 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
40880 Sep 22 23:22:18.965 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/016.copy"
40881 Sep 22 23:22:19.031 INFO accepted connection, remote_addr: 127.0.0.1:36746, local_addr: 127.0.0.1:52864, task: repair
40882 Sep 22 23:22:19.032 TRCE incoming request, uri: /extent/22/files, method: GET, req_id: cb1c28eb-5fd9-41cf-b6c1-502363a78462, remote_addr: 127.0.0.1:36746, local_addr: 127.0.0.1:52864, task: repair
40883 Sep 22 23:22:19.032 INFO request completed, latency_us: 272, response_code: 200, uri: /extent/22/files, method: GET, req_id: cb1c28eb-5fd9-41cf-b6c1-502363a78462, remote_addr: 127.0.0.1:36746, local_addr: 127.0.0.1:52864, task: repair
40884 Sep 22 23:22:19.032 INFO eid:22 Found repair files: ["016", "016.db"]
40885 Sep 22 23:22:19.032 TRCE incoming request, uri: /newextent/22/data, method: GET, req_id: 454fa309-5658-43e8-b471-3f2f5f695c08, remote_addr: 127.0.0.1:36746, local_addr: 127.0.0.1:52864, task: repair
40886 Sep 22 23:22:19.033 INFO request completed, latency_us: 347, response_code: 200, uri: /newextent/22/data, method: GET, req_id: 454fa309-5658-43e8-b471-3f2f5f695c08, remote_addr: 127.0.0.1:36746, local_addr: 127.0.0.1:52864, task: repair
40887 Sep 22 23:22:19.038 TRCE incoming request, uri: /newextent/22/db, method: GET, req_id: bc2779bb-8c14-4627-85c3-9fc592adbe5a, remote_addr: 127.0.0.1:36746, local_addr: 127.0.0.1:52864, task: repair
40888 Sep 22 23:22:19.038 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/22/db, method: GET, req_id: bc2779bb-8c14-4627-85c3-9fc592adbe5a, remote_addr: 127.0.0.1:36746, local_addr: 127.0.0.1:52864, task: repair
40889 Sep 22 23:22:19.039 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/016.copy" to "/tmp/downstairs-zrMnlo6G/00/000/016.replace"
40890 Sep 22 23:22:19.039 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40891 Sep 22 23:22:19.041 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/016.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
40892 Sep 22 23:22:19.041 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/016"
40893 Sep 22 23:22:19.041 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/016.db"
40894 Sep 22 23:22:19.041 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40895 Sep 22 23:22:19.041 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/016.replace" to "/tmp/downstairs-zrMnlo6G/00/000/016.completed"
40896 Sep 22 23:22:19.041 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40897 Sep 22 23:22:19.041 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40898 Sep 22 23:22:19.042 DEBG [0] It's time to notify for 250
40899 Sep 22 23:22:19.042 INFO Completion from [0] id:250 status:true
40900 Sep 22 23:22:19.042 INFO [251/752] Repair commands completed
40901 Sep 22 23:22:19.042 INFO Pop front: ReconcileIO { id: ReconciliationId(251), op: ExtentReopen { repair_id: ReconciliationId(251), extent_id: 22 }, state: ClientData([New, New, New]) }
40902 Sep 22 23:22:19.042 INFO Sent repair work, now wait for resp
40903 Sep 22 23:22:19.042 INFO [0] received reconcile message
40904 Sep 22 23:22:19.042 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(251), op: ExtentReopen { repair_id: ReconciliationId(251), extent_id: 22 }, state: ClientData([InProgress, New, New]) }, : downstairs
40905 Sep 22 23:22:19.042 INFO [0] client ExtentReopen { repair_id: ReconciliationId(251), extent_id: 22 }
40906 Sep 22 23:22:19.042 INFO [1] received reconcile message
40907 Sep 22 23:22:19.042 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(251), op: ExtentReopen { repair_id: ReconciliationId(251), extent_id: 22 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40908 Sep 22 23:22:19.042 INFO [1] client ExtentReopen { repair_id: ReconciliationId(251), extent_id: 22 }
40909 Sep 22 23:22:19.042 INFO [2] received reconcile message
40910 Sep 22 23:22:19.042 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(251), op: ExtentReopen { repair_id: ReconciliationId(251), extent_id: 22 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40911 Sep 22 23:22:19.042 INFO [2] client ExtentReopen { repair_id: ReconciliationId(251), extent_id: 22 }
40912 Sep 22 23:22:19.042 DEBG 251 Reopen extent 22
40913 Sep 22 23:22:19.043 DEBG 251 Reopen extent 22
40914 Sep 22 23:22:19.043 DEBG 251 Reopen extent 22
40915 Sep 22 23:22:19.044 DEBG [2] It's time to notify for 251
40916 Sep 22 23:22:19.044 INFO Completion from [2] id:251 status:true
40917 Sep 22 23:22:19.044 INFO [252/752] Repair commands completed
40918 Sep 22 23:22:19.044 INFO Pop front: ReconcileIO { id: ReconciliationId(252), op: ExtentFlush { repair_id: ReconciliationId(252), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40919 Sep 22 23:22:19.044 INFO Sent repair work, now wait for resp
40920 Sep 22 23:22:19.044 INFO [0] received reconcile message
40921 Sep 22 23:22:19.044 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(252), op: ExtentFlush { repair_id: ReconciliationId(252), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40922 Sep 22 23:22:19.044 INFO [0] client ExtentFlush { repair_id: ReconciliationId(252), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40923 Sep 22 23:22:19.044 INFO [1] received reconcile message
40924 Sep 22 23:22:19.044 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(252), op: ExtentFlush { repair_id: ReconciliationId(252), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40925 Sep 22 23:22:19.044 INFO [1] client ExtentFlush { repair_id: ReconciliationId(252), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40926 Sep 22 23:22:19.044 INFO [2] received reconcile message
40927 Sep 22 23:22:19.044 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(252), op: ExtentFlush { repair_id: ReconciliationId(252), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40928 Sep 22 23:22:19.044 INFO [2] client ExtentFlush { repair_id: ReconciliationId(252), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40929 Sep 22 23:22:19.044 DEBG 252 Flush extent 146 with f:2 g:2
40930 Sep 22 23:22:19.044 DEBG Flush just extent 146 with f:2 and g:2
40931 Sep 22 23:22:19.045 DEBG [1] It's time to notify for 252
40932 Sep 22 23:22:19.045 INFO Completion from [1] id:252 status:true
40933 Sep 22 23:22:19.045 INFO [253/752] Repair commands completed
40934 Sep 22 23:22:19.045 INFO Pop front: ReconcileIO { id: ReconciliationId(253), op: ExtentClose { repair_id: ReconciliationId(253), extent_id: 146 }, state: ClientData([New, New, New]) }
40935 Sep 22 23:22:19.045 INFO Sent repair work, now wait for resp
40936 Sep 22 23:22:19.045 INFO [0] received reconcile message
40937 Sep 22 23:22:19.045 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(253), op: ExtentClose { repair_id: ReconciliationId(253), extent_id: 146 }, state: ClientData([InProgress, New, New]) }, : downstairs
40938 Sep 22 23:22:19.045 INFO [0] client ExtentClose { repair_id: ReconciliationId(253), extent_id: 146 }
40939 Sep 22 23:22:19.045 INFO [1] received reconcile message
40940 Sep 22 23:22:19.045 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(253), op: ExtentClose { repair_id: ReconciliationId(253), extent_id: 146 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40941 Sep 22 23:22:19.045 INFO [1] client ExtentClose { repair_id: ReconciliationId(253), extent_id: 146 }
40942 Sep 22 23:22:19.045 INFO [2] received reconcile message
40943 Sep 22 23:22:19.045 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(253), op: ExtentClose { repair_id: ReconciliationId(253), extent_id: 146 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40944 Sep 22 23:22:19.045 INFO [2] client ExtentClose { repair_id: ReconciliationId(253), extent_id: 146 }
40945 Sep 22 23:22:19.045 DEBG 253 Close extent 146
40946 Sep 22 23:22:19.045 DEBG 253 Close extent 146
40947 Sep 22 23:22:19.046 DEBG 253 Close extent 146
40948 Sep 22 23:22:19.046 DEBG [2] It's time to notify for 253
40949 Sep 22 23:22:19.046 INFO Completion from [2] id:253 status:true
40950 Sep 22 23:22:19.046 INFO [254/752] Repair commands completed
40951 Sep 22 23:22:19.046 INFO Pop front: ReconcileIO { id: ReconciliationId(254), op: ExtentRepair { repair_id: ReconciliationId(254), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40952 Sep 22 23:22:19.046 INFO Sent repair work, now wait for resp
40953 Sep 22 23:22:19.046 INFO [0] received reconcile message
40954 Sep 22 23:22:19.046 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(254), op: ExtentRepair { repair_id: ReconciliationId(254), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40955 Sep 22 23:22:19.046 INFO [0] client ExtentRepair { repair_id: ReconciliationId(254), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40956 Sep 22 23:22:19.046 INFO [0] Sending repair request ReconciliationId(254)
40957 Sep 22 23:22:19.046 INFO [1] received reconcile message
40958 Sep 22 23:22:19.046 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(254), op: ExtentRepair { repair_id: ReconciliationId(254), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40959 Sep 22 23:22:19.046 INFO [1] client ExtentRepair { repair_id: ReconciliationId(254), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40960 Sep 22 23:22:19.046 INFO [1] No action required ReconciliationId(254)
40961 Sep 22 23:22:19.046 INFO [2] received reconcile message
40962 Sep 22 23:22:19.046 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(254), op: ExtentRepair { repair_id: ReconciliationId(254), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40963 Sep 22 23:22:19.046 INFO [2] client ExtentRepair { repair_id: ReconciliationId(254), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
40964 Sep 22 23:22:19.046 INFO [2] No action required ReconciliationId(254)
40965 Sep 22 23:22:19.047 DEBG 254 Repair extent 146 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
40966 Sep 22 23:22:19.047 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/092.copy"
40967 Sep 22 23:22:19.066 DEBG [0] Read AckReady 1079, : downstairs
40968 Sep 22 23:22:19.067 DEBG up_ds_listen was notified
40969 Sep 22 23:22:19.067 DEBG up_ds_listen process 1079
40970 Sep 22 23:22:19.067 DEBG [A] ack job 1079:80, : downstairs
40971 Sep 22 23:22:19.111 INFO accepted connection, remote_addr: 127.0.0.1:61791, local_addr: 127.0.0.1:52864, task: repair
40972 Sep 22 23:22:19.111 TRCE incoming request, uri: /extent/146/files, method: GET, req_id: 4c15c710-d209-4122-ac12-fbbd11b79739, remote_addr: 127.0.0.1:61791, local_addr: 127.0.0.1:52864, task: repair
40973 Sep 22 23:22:19.111 INFO request completed, latency_us: 242, response_code: 200, uri: /extent/146/files, method: GET, req_id: 4c15c710-d209-4122-ac12-fbbd11b79739, remote_addr: 127.0.0.1:61791, local_addr: 127.0.0.1:52864, task: repair
40974 Sep 22 23:22:19.112 INFO eid:146 Found repair files: ["092", "092.db"]
40975 Sep 22 23:22:19.112 TRCE incoming request, uri: /newextent/146/data, method: GET, req_id: 7756b5db-1e96-4605-9d7d-a5ba68c61622, remote_addr: 127.0.0.1:61791, local_addr: 127.0.0.1:52864, task: repair
40976 Sep 22 23:22:19.112 INFO request completed, latency_us: 347, response_code: 200, uri: /newextent/146/data, method: GET, req_id: 7756b5db-1e96-4605-9d7d-a5ba68c61622, remote_addr: 127.0.0.1:61791, local_addr: 127.0.0.1:52864, task: repair
40977 Sep 22 23:22:19.117 TRCE incoming request, uri: /newextent/146/db, method: GET, req_id: 637368e7-b35f-4ffa-86c7-7ad69633481d, remote_addr: 127.0.0.1:61791, local_addr: 127.0.0.1:52864, task: repair
40978 Sep 22 23:22:19.117 INFO request completed, latency_us: 302, response_code: 200, uri: /newextent/146/db, method: GET, req_id: 637368e7-b35f-4ffa-86c7-7ad69633481d, remote_addr: 127.0.0.1:61791, local_addr: 127.0.0.1:52864, task: repair
40979 Sep 22 23:22:19.118 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/092.copy" to "/tmp/downstairs-zrMnlo6G/00/000/092.replace"
40980 Sep 22 23:22:19.118 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40981 Sep 22 23:22:19.120 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/092.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
40982 Sep 22 23:22:19.120 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/092"
40983 Sep 22 23:22:19.120 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/092.db"
40984 Sep 22 23:22:19.120 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40985 Sep 22 23:22:19.120 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/092.replace" to "/tmp/downstairs-zrMnlo6G/00/000/092.completed"
40986 Sep 22 23:22:19.120 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40987 Sep 22 23:22:19.120 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
40988 Sep 22 23:22:19.120 DEBG [0] It's time to notify for 254
40989 Sep 22 23:22:19.120 INFO Completion from [0] id:254 status:true
40990 Sep 22 23:22:19.120 INFO [255/752] Repair commands completed
40991 Sep 22 23:22:19.120 INFO Pop front: ReconcileIO { id: ReconciliationId(255), op: ExtentReopen { repair_id: ReconciliationId(255), extent_id: 146 }, state: ClientData([New, New, New]) }
40992 Sep 22 23:22:19.121 INFO Sent repair work, now wait for resp
40993 Sep 22 23:22:19.121 INFO [0] received reconcile message
40994 Sep 22 23:22:19.121 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(255), op: ExtentReopen { repair_id: ReconciliationId(255), extent_id: 146 }, state: ClientData([InProgress, New, New]) }, : downstairs
40995 Sep 22 23:22:19.121 INFO [0] client ExtentReopen { repair_id: ReconciliationId(255), extent_id: 146 }
40996 Sep 22 23:22:19.121 INFO [1] received reconcile message
40997 Sep 22 23:22:19.121 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(255), op: ExtentReopen { repair_id: ReconciliationId(255), extent_id: 146 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40998 Sep 22 23:22:19.121 INFO [1] client ExtentReopen { repair_id: ReconciliationId(255), extent_id: 146 }
40999 Sep 22 23:22:19.121 INFO [2] received reconcile message
41000 Sep 22 23:22:19.121 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(255), op: ExtentReopen { repair_id: ReconciliationId(255), extent_id: 146 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41001 Sep 22 23:22:19.121 INFO [2] client ExtentReopen { repair_id: ReconciliationId(255), extent_id: 146 }
41002 Sep 22 23:22:19.121 DEBG 255 Reopen extent 146
41003 Sep 22 23:22:19.121 DEBG up_ds_listen checked 1 jobs, back to waiting
41004 Sep 22 23:22:19.122 DEBG 255 Reopen extent 146
41005 Sep 22 23:22:19.122 DEBG 255 Reopen extent 146
41006 Sep 22 23:22:19.123 DEBG [2] It's time to notify for 255
41007 Sep 22 23:22:19.123 INFO Completion from [2] id:255 status:true
41008 Sep 22 23:22:19.123 INFO [256/752] Repair commands completed
41009 Sep 22 23:22:19.123 INFO Pop front: ReconcileIO { id: ReconciliationId(256), op: ExtentFlush { repair_id: ReconciliationId(256), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41010 Sep 22 23:22:19.123 INFO Sent repair work, now wait for resp
41011 Sep 22 23:22:19.123 INFO [0] received reconcile message
41012 Sep 22 23:22:19.123 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(256), op: ExtentFlush { repair_id: ReconciliationId(256), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41013 Sep 22 23:22:19.123 INFO [0] client ExtentFlush { repair_id: ReconciliationId(256), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41014 Sep 22 23:22:19.123 INFO [1] received reconcile message
41015 Sep 22 23:22:19.123 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(256), op: ExtentFlush { repair_id: ReconciliationId(256), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41016 Sep 22 23:22:19.123 INFO [1] client ExtentFlush { repair_id: ReconciliationId(256), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41017 Sep 22 23:22:19.123 INFO [2] received reconcile message
41018 Sep 22 23:22:19.123 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(256), op: ExtentFlush { repair_id: ReconciliationId(256), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41019 Sep 22 23:22:19.123 INFO [2] client ExtentFlush { repair_id: ReconciliationId(256), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41020 Sep 22 23:22:19.123 DEBG 256 Flush extent 148 with f:2 g:2
41021 Sep 22 23:22:19.123 DEBG Flush just extent 148 with f:2 and g:2
41022 Sep 22 23:22:19.123 DEBG [1] It's time to notify for 256
41023 Sep 22 23:22:19.124 INFO Completion from [1] id:256 status:true
41024 Sep 22 23:22:19.124 INFO [257/752] Repair commands completed
41025 Sep 22 23:22:19.124 INFO Pop front: ReconcileIO { id: ReconciliationId(257), op: ExtentClose { repair_id: ReconciliationId(257), extent_id: 148 }, state: ClientData([New, New, New]) }
41026 Sep 22 23:22:19.124 INFO Sent repair work, now wait for resp
41027 Sep 22 23:22:19.124 INFO [0] received reconcile message
41028 Sep 22 23:22:19.124 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(257), op: ExtentClose { repair_id: ReconciliationId(257), extent_id: 148 }, state: ClientData([InProgress, New, New]) }, : downstairs
41029 Sep 22 23:22:19.124 INFO [0] client ExtentClose { repair_id: ReconciliationId(257), extent_id: 148 }
41030 Sep 22 23:22:19.124 INFO [1] received reconcile message
41031 Sep 22 23:22:19.124 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(257), op: ExtentClose { repair_id: ReconciliationId(257), extent_id: 148 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41032 Sep 22 23:22:19.124 INFO [1] client ExtentClose { repair_id: ReconciliationId(257), extent_id: 148 }
41033 Sep 22 23:22:19.124 INFO [2] received reconcile message
41034 Sep 22 23:22:19.124 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(257), op: ExtentClose { repair_id: ReconciliationId(257), extent_id: 148 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41035 Sep 22 23:22:19.124 INFO [2] client ExtentClose { repair_id: ReconciliationId(257), extent_id: 148 }
41036 Sep 22 23:22:19.124 DEBG 257 Close extent 148
41037 Sep 22 23:22:19.124 DEBG 257 Close extent 148
41038 Sep 22 23:22:19.124 DEBG Flush :1080 extent_limit None deps:[JobId(1079)] res:true f:29 g:1
41039 Sep 22 23:22:19.124 INFO [lossy] sleeping 1 second
41040 Sep 22 23:22:19.125 DEBG 257 Close extent 148
41041 Sep 22 23:22:19.125 DEBG [2] It's time to notify for 257
41042 Sep 22 23:22:19.125 INFO Completion from [2] id:257 status:true
41043 Sep 22 23:22:19.125 INFO [258/752] Repair commands completed
41044 Sep 22 23:22:19.125 INFO Pop front: ReconcileIO { id: ReconciliationId(258), op: ExtentRepair { repair_id: ReconciliationId(258), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41045 Sep 22 23:22:19.125 INFO Sent repair work, now wait for resp
41046 Sep 22 23:22:19.125 INFO [0] received reconcile message
41047 Sep 22 23:22:19.125 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(258), op: ExtentRepair { repair_id: ReconciliationId(258), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41048 Sep 22 23:22:19.125 INFO [0] client ExtentRepair { repair_id: ReconciliationId(258), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41049 Sep 22 23:22:19.125 INFO [0] Sending repair request ReconciliationId(258)
41050 Sep 22 23:22:19.125 INFO [1] received reconcile message
41051 Sep 22 23:22:19.125 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(258), op: ExtentRepair { repair_id: ReconciliationId(258), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41052 Sep 22 23:22:19.125 INFO [1] client ExtentRepair { repair_id: ReconciliationId(258), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41053 Sep 22 23:22:19.125 INFO [1] No action required ReconciliationId(258)
41054 Sep 22 23:22:19.125 INFO [2] received reconcile message
41055 Sep 22 23:22:19.125 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(258), op: ExtentRepair { repair_id: ReconciliationId(258), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41056 Sep 22 23:22:19.125 INFO [2] client ExtentRepair { repair_id: ReconciliationId(258), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41057 Sep 22 23:22:19.125 INFO [2] No action required ReconciliationId(258)
41058 Sep 22 23:22:19.125 DEBG 258 Repair extent 148 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
41059 Sep 22 23:22:19.125 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/094.copy"
41060 Sep 22 23:22:19.130 DEBG Read :1079 deps:[JobId(1078)] res:true
41061 Sep 22 23:22:19.191 INFO accepted connection, remote_addr: 127.0.0.1:49081, local_addr: 127.0.0.1:52864, task: repair
41062 Sep 22 23:22:19.191 TRCE incoming request, uri: /extent/148/files, method: GET, req_id: d32eb627-3fa9-492a-843c-9062df76e4fc, remote_addr: 127.0.0.1:49081, local_addr: 127.0.0.1:52864, task: repair
41063 Sep 22 23:22:19.192 INFO request completed, latency_us: 273, response_code: 200, uri: /extent/148/files, method: GET, req_id: d32eb627-3fa9-492a-843c-9062df76e4fc, remote_addr: 127.0.0.1:49081, local_addr: 127.0.0.1:52864, task: repair
41064 Sep 22 23:22:19.192 INFO eid:148 Found repair files: ["094", "094.db"]
41065 Sep 22 23:22:19.192 TRCE incoming request, uri: /newextent/148/data, method: GET, req_id: 10d7312d-5180-4f92-9777-bdbcc0b39e5d, remote_addr: 127.0.0.1:49081, local_addr: 127.0.0.1:52864, task: repair
41066 Sep 22 23:22:19.193 INFO request completed, latency_us: 342, response_code: 200, uri: /newextent/148/data, method: GET, req_id: 10d7312d-5180-4f92-9777-bdbcc0b39e5d, remote_addr: 127.0.0.1:49081, local_addr: 127.0.0.1:52864, task: repair
41067 Sep 22 23:22:19.197 TRCE incoming request, uri: /newextent/148/db, method: GET, req_id: d8c668cb-ada4-42f1-a1e9-772573ae7ca7, remote_addr: 127.0.0.1:49081, local_addr: 127.0.0.1:52864, task: repair
41068 Sep 22 23:22:19.198 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/148/db, method: GET, req_id: d8c668cb-ada4-42f1-a1e9-772573ae7ca7, remote_addr: 127.0.0.1:49081, local_addr: 127.0.0.1:52864, task: repair
41069 Sep 22 23:22:19.199 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/094.copy" to "/tmp/downstairs-zrMnlo6G/00/000/094.replace"
41070 Sep 22 23:22:19.199 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41071 Sep 22 23:22:19.200 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/094.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
41072 Sep 22 23:22:19.200 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/094"
41073 Sep 22 23:22:19.200 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/094.db"
41074 Sep 22 23:22:19.200 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41075 Sep 22 23:22:19.200 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/094.replace" to "/tmp/downstairs-zrMnlo6G/00/000/094.completed"
41076 Sep 22 23:22:19.200 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41077 Sep 22 23:22:19.200 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41078 Sep 22 23:22:19.201 DEBG [0] It's time to notify for 258
41079 Sep 22 23:22:19.201 INFO Completion from [0] id:258 status:true
41080 Sep 22 23:22:19.201 INFO [259/752] Repair commands completed
41081 Sep 22 23:22:19.201 INFO Pop front: ReconcileIO { id: ReconciliationId(259), op: ExtentReopen { repair_id: ReconciliationId(259), extent_id: 148 }, state: ClientData([New, New, New]) }
41082 Sep 22 23:22:19.201 INFO Sent repair work, now wait for resp
41083 Sep 22 23:22:19.201 INFO [0] received reconcile message
41084 Sep 22 23:22:19.201 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(259), op: ExtentReopen { repair_id: ReconciliationId(259), extent_id: 148 }, state: ClientData([InProgress, New, New]) }, : downstairs
41085 Sep 22 23:22:19.201 INFO [0] client ExtentReopen { repair_id: ReconciliationId(259), extent_id: 148 }
41086 Sep 22 23:22:19.201 INFO [1] received reconcile message
41087 Sep 22 23:22:19.201 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(259), op: ExtentReopen { repair_id: ReconciliationId(259), extent_id: 148 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41088 Sep 22 23:22:19.201 INFO [1] client ExtentReopen { repair_id: ReconciliationId(259), extent_id: 148 }
41089 Sep 22 23:22:19.201 INFO [2] received reconcile message
41090 Sep 22 23:22:19.201 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(259), op: ExtentReopen { repair_id: ReconciliationId(259), extent_id: 148 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41091 Sep 22 23:22:19.201 INFO [2] client ExtentReopen { repair_id: ReconciliationId(259), extent_id: 148 }
41092 Sep 22 23:22:19.201 DEBG 259 Reopen extent 148
41093 Sep 22 23:22:19.202 DEBG 259 Reopen extent 148
41094 Sep 22 23:22:19.202 DEBG IO Read 1081 has deps [JobId(1080)]
41095 Sep 22 23:22:19.203 DEBG 259 Reopen extent 148
41096 Sep 22 23:22:19.203 DEBG [2] It's time to notify for 259
41097 Sep 22 23:22:19.203 INFO Completion from [2] id:259 status:true
41098 Sep 22 23:22:19.203 INFO [260/752] Repair commands completed
41099 Sep 22 23:22:19.203 INFO Pop front: ReconcileIO { id: ReconciliationId(260), op: ExtentFlush { repair_id: ReconciliationId(260), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41100 Sep 22 23:22:19.203 INFO Sent repair work, now wait for resp
41101 Sep 22 23:22:19.203 INFO [0] received reconcile message
41102 Sep 22 23:22:19.203 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(260), op: ExtentFlush { repair_id: ReconciliationId(260), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41103 Sep 22 23:22:19.203 INFO [0] client ExtentFlush { repair_id: ReconciliationId(260), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41104 Sep 22 23:22:19.203 INFO [1] received reconcile message
41105 Sep 22 23:22:19.203 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(260), op: ExtentFlush { repair_id: ReconciliationId(260), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41106 Sep 22 23:22:19.203 INFO [1] client ExtentFlush { repair_id: ReconciliationId(260), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41107 Sep 22 23:22:19.204 INFO [2] received reconcile message
41108 Sep 22 23:22:19.204 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(260), op: ExtentFlush { repair_id: ReconciliationId(260), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41109 Sep 22 23:22:19.204 INFO [2] client ExtentFlush { repair_id: ReconciliationId(260), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41110 Sep 22 23:22:19.204 DEBG 260 Flush extent 6 with f:2 g:2
41111 Sep 22 23:22:19.204 DEBG Flush just extent 6 with f:2 and g:2
41112 Sep 22 23:22:19.204 DEBG [1] It's time to notify for 260
41113 Sep 22 23:22:19.204 INFO Completion from [1] id:260 status:true
41114 Sep 22 23:22:19.204 INFO [261/752] Repair commands completed
41115 Sep 22 23:22:19.204 INFO Pop front: ReconcileIO { id: ReconciliationId(261), op: ExtentClose { repair_id: ReconciliationId(261), extent_id: 6 }, state: ClientData([New, New, New]) }
41116 Sep 22 23:22:19.204 INFO Sent repair work, now wait for resp
41117 Sep 22 23:22:19.204 INFO [0] received reconcile message
41118 Sep 22 23:22:19.204 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(261), op: ExtentClose { repair_id: ReconciliationId(261), extent_id: 6 }, state: ClientData([InProgress, New, New]) }, : downstairs
41119 Sep 22 23:22:19.204 INFO [0] client ExtentClose { repair_id: ReconciliationId(261), extent_id: 6 }
41120 Sep 22 23:22:19.204 INFO [1] received reconcile message
41121 Sep 22 23:22:19.204 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(261), op: ExtentClose { repair_id: ReconciliationId(261), extent_id: 6 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41122 Sep 22 23:22:19.204 INFO [1] client ExtentClose { repair_id: ReconciliationId(261), extent_id: 6 }
41123 Sep 22 23:22:19.204 INFO [2] received reconcile message
41124 Sep 22 23:22:19.204 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(261), op: ExtentClose { repair_id: ReconciliationId(261), extent_id: 6 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41125 Sep 22 23:22:19.204 INFO [2] client ExtentClose { repair_id: ReconciliationId(261), extent_id: 6 }
41126 Sep 22 23:22:19.204 DEBG 261 Close extent 6
41127 Sep 22 23:22:19.205 DEBG 261 Close extent 6
41128 Sep 22 23:22:19.205 DEBG 261 Close extent 6
41129 Sep 22 23:22:19.205 DEBG [2] It's time to notify for 261
41130 Sep 22 23:22:19.205 INFO Completion from [2] id:261 status:true
41131 Sep 22 23:22:19.205 INFO [262/752] Repair commands completed
41132 Sep 22 23:22:19.205 INFO Pop front: ReconcileIO { id: ReconciliationId(262), op: ExtentRepair { repair_id: ReconciliationId(262), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41133 Sep 22 23:22:19.205 INFO Sent repair work, now wait for resp
41134 Sep 22 23:22:19.205 INFO [0] received reconcile message
41135 Sep 22 23:22:19.205 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(262), op: ExtentRepair { repair_id: ReconciliationId(262), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41136 Sep 22 23:22:19.205 INFO [0] client ExtentRepair { repair_id: ReconciliationId(262), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41137 Sep 22 23:22:19.205 INFO [0] Sending repair request ReconciliationId(262)
41138 Sep 22 23:22:19.206 INFO [1] received reconcile message
41139 Sep 22 23:22:19.206 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(262), op: ExtentRepair { repair_id: ReconciliationId(262), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41140 Sep 22 23:22:19.206 INFO [1] client ExtentRepair { repair_id: ReconciliationId(262), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41141 Sep 22 23:22:19.206 INFO [1] No action required ReconciliationId(262)
41142 Sep 22 23:22:19.206 INFO [2] received reconcile message
41143 Sep 22 23:22:19.206 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(262), op: ExtentRepair { repair_id: ReconciliationId(262), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41144 Sep 22 23:22:19.206 INFO [2] client ExtentRepair { repair_id: ReconciliationId(262), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41145 Sep 22 23:22:19.206 INFO [2] No action required ReconciliationId(262)
41146 Sep 22 23:22:19.206 DEBG 262 Repair extent 6 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
41147 Sep 22 23:22:19.206 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/006.copy"
41148 Sep 22 23:22:19.272 INFO accepted connection, remote_addr: 127.0.0.1:58499, local_addr: 127.0.0.1:52864, task: repair
41149 Sep 22 23:22:19.272 TRCE incoming request, uri: /extent/6/files, method: GET, req_id: 8239d7e2-4d3a-4ed2-a570-c49bda5ed5d6, remote_addr: 127.0.0.1:58499, local_addr: 127.0.0.1:52864, task: repair
41150 Sep 22 23:22:19.272 INFO request completed, latency_us: 245, response_code: 200, uri: /extent/6/files, method: GET, req_id: 8239d7e2-4d3a-4ed2-a570-c49bda5ed5d6, remote_addr: 127.0.0.1:58499, local_addr: 127.0.0.1:52864, task: repair
41151 Sep 22 23:22:19.272 INFO eid:6 Found repair files: ["006", "006.db"]
41152 Sep 22 23:22:19.273 TRCE incoming request, uri: /newextent/6/data, method: GET, req_id: 1759023c-0d30-43eb-8d61-895005f53446, remote_addr: 127.0.0.1:58499, local_addr: 127.0.0.1:52864, task: repair
41153 Sep 22 23:22:19.273 INFO request completed, latency_us: 351, response_code: 200, uri: /newextent/6/data, method: GET, req_id: 1759023c-0d30-43eb-8d61-895005f53446, remote_addr: 127.0.0.1:58499, local_addr: 127.0.0.1:52864, task: repair
41154 Sep 22 23:22:19.278 TRCE incoming request, uri: /newextent/6/db, method: GET, req_id: 9f529b49-b35c-498c-b4b6-af7d26eba762, remote_addr: 127.0.0.1:58499, local_addr: 127.0.0.1:52864, task: repair
41155 Sep 22 23:22:19.278 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/6/db, method: GET, req_id: 9f529b49-b35c-498c-b4b6-af7d26eba762, remote_addr: 127.0.0.1:58499, local_addr: 127.0.0.1:52864, task: repair
41156 Sep 22 23:22:19.279 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/006.copy" to "/tmp/downstairs-zrMnlo6G/00/000/006.replace"
41157 Sep 22 23:22:19.279 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41158 Sep 22 23:22:19.280 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/006.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
41159 Sep 22 23:22:19.280 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/006"
41160 Sep 22 23:22:19.280 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/006.db"
41161 Sep 22 23:22:19.280 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41162 Sep 22 23:22:19.280 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/006.replace" to "/tmp/downstairs-zrMnlo6G/00/000/006.completed"
41163 Sep 22 23:22:19.280 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41164 Sep 22 23:22:19.280 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41165 Sep 22 23:22:19.281 DEBG [0] It's time to notify for 262
41166 Sep 22 23:22:19.281 INFO Completion from [0] id:262 status:true
41167 Sep 22 23:22:19.281 INFO [263/752] Repair commands completed
41168 Sep 22 23:22:19.281 INFO Pop front: ReconcileIO { id: ReconciliationId(263), op: ExtentReopen { repair_id: ReconciliationId(263), extent_id: 6 }, state: ClientData([New, New, New]) }
41169 Sep 22 23:22:19.281 INFO Sent repair work, now wait for resp
41170 Sep 22 23:22:19.281 INFO [0] received reconcile message
41171 Sep 22 23:22:19.281 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(263), op: ExtentReopen { repair_id: ReconciliationId(263), extent_id: 6 }, state: ClientData([InProgress, New, New]) }, : downstairs
41172 Sep 22 23:22:19.281 INFO [0] client ExtentReopen { repair_id: ReconciliationId(263), extent_id: 6 }
41173 Sep 22 23:22:19.281 INFO [1] received reconcile message
41174 Sep 22 23:22:19.281 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(263), op: ExtentReopen { repair_id: ReconciliationId(263), extent_id: 6 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41175 Sep 22 23:22:19.281 INFO [1] client ExtentReopen { repair_id: ReconciliationId(263), extent_id: 6 }
41176 Sep 22 23:22:19.281 INFO [2] received reconcile message
41177 Sep 22 23:22:19.281 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(263), op: ExtentReopen { repair_id: ReconciliationId(263), extent_id: 6 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41178 Sep 22 23:22:19.281 INFO [2] client ExtentReopen { repair_id: ReconciliationId(263), extent_id: 6 }
41179 Sep 22 23:22:19.281 DEBG 263 Reopen extent 6
41180 Sep 22 23:22:19.282 DEBG 263 Reopen extent 6
41181 Sep 22 23:22:19.282 DEBG 263 Reopen extent 6
41182 Sep 22 23:22:19.283 DEBG [2] It's time to notify for 263
41183 Sep 22 23:22:19.283 INFO Completion from [2] id:263 status:true
41184 Sep 22 23:22:19.283 INFO [264/752] Repair commands completed
41185 Sep 22 23:22:19.283 INFO Pop front: ReconcileIO { id: ReconciliationId(264), op: ExtentFlush { repair_id: ReconciliationId(264), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41186 Sep 22 23:22:19.283 INFO Sent repair work, now wait for resp
41187 Sep 22 23:22:19.283 INFO [0] received reconcile message
41188 Sep 22 23:22:19.283 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(264), op: ExtentFlush { repair_id: ReconciliationId(264), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41189 Sep 22 23:22:19.283 INFO [0] client ExtentFlush { repair_id: ReconciliationId(264), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41190 Sep 22 23:22:19.283 INFO [1] received reconcile message
41191 Sep 22 23:22:19.283 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(264), op: ExtentFlush { repair_id: ReconciliationId(264), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41192 Sep 22 23:22:19.283 INFO [1] client ExtentFlush { repair_id: ReconciliationId(264), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41193 Sep 22 23:22:19.283 INFO [2] received reconcile message
41194 Sep 22 23:22:19.283 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(264), op: ExtentFlush { repair_id: ReconciliationId(264), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41195 Sep 22 23:22:19.283 INFO [2] client ExtentFlush { repair_id: ReconciliationId(264), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41196 Sep 22 23:22:19.284 DEBG 264 Flush extent 62 with f:2 g:2
41197 Sep 22 23:22:19.284 DEBG Flush just extent 62 with f:2 and g:2
41198 Sep 22 23:22:19.284 DEBG [1] It's time to notify for 264
41199 Sep 22 23:22:19.284 INFO Completion from [1] id:264 status:true
41200 Sep 22 23:22:19.284 INFO [265/752] Repair commands completed
41201 Sep 22 23:22:19.284 INFO Pop front: ReconcileIO { id: ReconciliationId(265), op: ExtentClose { repair_id: ReconciliationId(265), extent_id: 62 }, state: ClientData([New, New, New]) }
41202 Sep 22 23:22:19.284 INFO Sent repair work, now wait for resp
41203 Sep 22 23:22:19.284 INFO [0] received reconcile message
41204 Sep 22 23:22:19.284 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(265), op: ExtentClose { repair_id: ReconciliationId(265), extent_id: 62 }, state: ClientData([InProgress, New, New]) }, : downstairs
41205 Sep 22 23:22:19.284 INFO [0] client ExtentClose { repair_id: ReconciliationId(265), extent_id: 62 }
41206 Sep 22 23:22:19.284 INFO [1] received reconcile message
41207 Sep 22 23:22:19.284 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(265), op: ExtentClose { repair_id: ReconciliationId(265), extent_id: 62 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41208 Sep 22 23:22:19.284 INFO [1] client ExtentClose { repair_id: ReconciliationId(265), extent_id: 62 }
41209 Sep 22 23:22:19.284 INFO [2] received reconcile message
41210 Sep 22 23:22:19.284 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(265), op: ExtentClose { repair_id: ReconciliationId(265), extent_id: 62 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41211 Sep 22 23:22:19.284 INFO [2] client ExtentClose { repair_id: ReconciliationId(265), extent_id: 62 }
41212 Sep 22 23:22:19.284 DEBG 265 Close extent 62
41213 Sep 22 23:22:19.284 DEBG 265 Close extent 62
41214 Sep 22 23:22:19.285 DEBG 265 Close extent 62
41215 Sep 22 23:22:19.285 DEBG [2] It's time to notify for 265
41216 Sep 22 23:22:19.285 INFO Completion from [2] id:265 status:true
41217 Sep 22 23:22:19.285 INFO [266/752] Repair commands completed
41218 Sep 22 23:22:19.285 INFO Pop front: ReconcileIO { id: ReconciliationId(266), op: ExtentRepair { repair_id: ReconciliationId(266), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41219 Sep 22 23:22:19.285 INFO Sent repair work, now wait for resp
41220 Sep 22 23:22:19.285 INFO [0] received reconcile message
41221 Sep 22 23:22:19.285 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(266), op: ExtentRepair { repair_id: ReconciliationId(266), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41222 Sep 22 23:22:19.285 INFO [0] client ExtentRepair { repair_id: ReconciliationId(266), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41223 Sep 22 23:22:19.285 INFO [0] Sending repair request ReconciliationId(266)
41224 Sep 22 23:22:19.285 INFO [1] received reconcile message
41225 Sep 22 23:22:19.285 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(266), op: ExtentRepair { repair_id: ReconciliationId(266), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41226 Sep 22 23:22:19.285 INFO [1] client ExtentRepair { repair_id: ReconciliationId(266), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41227 Sep 22 23:22:19.285 INFO [1] No action required ReconciliationId(266)
41228 Sep 22 23:22:19.285 INFO [2] received reconcile message
41229 Sep 22 23:22:19.285 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(266), op: ExtentRepair { repair_id: ReconciliationId(266), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41230 Sep 22 23:22:19.286 INFO [2] client ExtentRepair { repair_id: ReconciliationId(266), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41231 Sep 22 23:22:19.286 INFO [2] No action required ReconciliationId(266)
41232 Sep 22 23:22:19.286 DEBG 266 Repair extent 62 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
41233 Sep 22 23:22:19.286 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/03E.copy"
41234 Sep 22 23:22:19.351 INFO accepted connection, remote_addr: 127.0.0.1:41150, local_addr: 127.0.0.1:52864, task: repair
41235 Sep 22 23:22:19.352 TRCE incoming request, uri: /extent/62/files, method: GET, req_id: 0b68f468-2861-4eb7-97aa-bed6390fa852, remote_addr: 127.0.0.1:41150, local_addr: 127.0.0.1:52864, task: repair
41236 Sep 22 23:22:19.352 INFO request completed, latency_us: 243, response_code: 200, uri: /extent/62/files, method: GET, req_id: 0b68f468-2861-4eb7-97aa-bed6390fa852, remote_addr: 127.0.0.1:41150, local_addr: 127.0.0.1:52864, task: repair
41237 Sep 22 23:22:19.352 INFO eid:62 Found repair files: ["03E", "03E.db"]
41238 Sep 22 23:22:19.353 TRCE incoming request, uri: /newextent/62/data, method: GET, req_id: 3cd6d695-c5d6-4072-b92a-7972a0922a3e, remote_addr: 127.0.0.1:41150, local_addr: 127.0.0.1:52864, task: repair
41239 Sep 22 23:22:19.353 INFO request completed, latency_us: 329, response_code: 200, uri: /newextent/62/data, method: GET, req_id: 3cd6d695-c5d6-4072-b92a-7972a0922a3e, remote_addr: 127.0.0.1:41150, local_addr: 127.0.0.1:52864, task: repair
41240 Sep 22 23:22:19.358 TRCE incoming request, uri: /newextent/62/db, method: GET, req_id: 16d90d14-230b-4fdc-80e4-653a037e3557, remote_addr: 127.0.0.1:41150, local_addr: 127.0.0.1:52864, task: repair
41241 Sep 22 23:22:19.358 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/62/db, method: GET, req_id: 16d90d14-230b-4fdc-80e4-653a037e3557, remote_addr: 127.0.0.1:41150, local_addr: 127.0.0.1:52864, task: repair
41242 Sep 22 23:22:19.359 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/03E.copy" to "/tmp/downstairs-zrMnlo6G/00/000/03E.replace"
41243 Sep 22 23:22:19.359 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41244 Sep 22 23:22:19.360 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/03E.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
41245 Sep 22 23:22:19.361 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/03E"
41246 Sep 22 23:22:19.361 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/03E.db"
41247 Sep 22 23:22:19.361 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41248 Sep 22 23:22:19.361 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/03E.replace" to "/tmp/downstairs-zrMnlo6G/00/000/03E.completed"
41249 Sep 22 23:22:19.361 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41250 Sep 22 23:22:19.361 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41251 Sep 22 23:22:19.361 DEBG [0] It's time to notify for 266
41252 Sep 22 23:22:19.361 INFO Completion from [0] id:266 status:true
41253 Sep 22 23:22:19.361 INFO [267/752] Repair commands completed
41254 Sep 22 23:22:19.361 INFO Pop front: ReconcileIO { id: ReconciliationId(267), op: ExtentReopen { repair_id: ReconciliationId(267), extent_id: 62 }, state: ClientData([New, New, New]) }
41255 Sep 22 23:22:19.361 INFO Sent repair work, now wait for resp
41256 Sep 22 23:22:19.361 INFO [0] received reconcile message
41257 Sep 22 23:22:19.361 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(267), op: ExtentReopen { repair_id: ReconciliationId(267), extent_id: 62 }, state: ClientData([InProgress, New, New]) }, : downstairs
41258 Sep 22 23:22:19.361 INFO [0] client ExtentReopen { repair_id: ReconciliationId(267), extent_id: 62 }
41259 Sep 22 23:22:19.361 INFO [1] received reconcile message
41260 Sep 22 23:22:19.361 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(267), op: ExtentReopen { repair_id: ReconciliationId(267), extent_id: 62 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41261 Sep 22 23:22:19.361 INFO [1] client ExtentReopen { repair_id: ReconciliationId(267), extent_id: 62 }
41262 Sep 22 23:22:19.361 INFO [2] received reconcile message
41263 Sep 22 23:22:19.362 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(267), op: ExtentReopen { repair_id: ReconciliationId(267), extent_id: 62 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41264 Sep 22 23:22:19.362 INFO [2] client ExtentReopen { repair_id: ReconciliationId(267), extent_id: 62 }
41265 Sep 22 23:22:19.362 DEBG 267 Reopen extent 62
41266 Sep 22 23:22:19.362 DEBG 267 Reopen extent 62
41267 Sep 22 23:22:19.363 DEBG 267 Reopen extent 62
41268 Sep 22 23:22:19.364 DEBG [2] It's time to notify for 267
41269 Sep 22 23:22:19.364 INFO Completion from [2] id:267 status:true
41270 Sep 22 23:22:19.364 INFO [268/752] Repair commands completed
41271 Sep 22 23:22:19.364 INFO Pop front: ReconcileIO { id: ReconciliationId(268), op: ExtentFlush { repair_id: ReconciliationId(268), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41272 Sep 22 23:22:19.364 INFO Sent repair work, now wait for resp
41273 Sep 22 23:22:19.364 INFO [0] received reconcile message
41274 Sep 22 23:22:19.364 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(268), op: ExtentFlush { repair_id: ReconciliationId(268), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41275 Sep 22 23:22:19.364 INFO [0] client ExtentFlush { repair_id: ReconciliationId(268), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41276 Sep 22 23:22:19.364 INFO [1] received reconcile message
41277 Sep 22 23:22:19.364 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(268), op: ExtentFlush { repair_id: ReconciliationId(268), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41278 Sep 22 23:22:19.364 INFO [1] client ExtentFlush { repair_id: ReconciliationId(268), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41279 Sep 22 23:22:19.364 INFO [2] received reconcile message
41280 Sep 22 23:22:19.364 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(268), op: ExtentFlush { repair_id: ReconciliationId(268), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41281 Sep 22 23:22:19.364 INFO [2] client ExtentFlush { repair_id: ReconciliationId(268), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41282 Sep 22 23:22:19.364 DEBG 268 Flush extent 126 with f:2 g:2
41283 Sep 22 23:22:19.364 DEBG Flush just extent 126 with f:2 and g:2
41284 Sep 22 23:22:19.364 DEBG [1] It's time to notify for 268
41285 Sep 22 23:22:19.364 INFO Completion from [1] id:268 status:true
41286 Sep 22 23:22:19.364 INFO [269/752] Repair commands completed
41287 Sep 22 23:22:19.364 INFO Pop front: ReconcileIO { id: ReconciliationId(269), op: ExtentClose { repair_id: ReconciliationId(269), extent_id: 126 }, state: ClientData([New, New, New]) }
41288 Sep 22 23:22:19.364 INFO Sent repair work, now wait for resp
41289 Sep 22 23:22:19.364 INFO [0] received reconcile message
41290 Sep 22 23:22:19.364 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(269), op: ExtentClose { repair_id: ReconciliationId(269), extent_id: 126 }, state: ClientData([InProgress, New, New]) }, : downstairs
41291 Sep 22 23:22:19.364 INFO [0] client ExtentClose { repair_id: ReconciliationId(269), extent_id: 126 }
41292 Sep 22 23:22:19.364 INFO [1] received reconcile message
41293 Sep 22 23:22:19.364 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(269), op: ExtentClose { repair_id: ReconciliationId(269), extent_id: 126 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41294 Sep 22 23:22:19.364 INFO [1] client ExtentClose { repair_id: ReconciliationId(269), extent_id: 126 }
41295 Sep 22 23:22:19.364 INFO [2] received reconcile message
41296 Sep 22 23:22:19.364 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(269), op: ExtentClose { repair_id: ReconciliationId(269), extent_id: 126 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41297 Sep 22 23:22:19.364 INFO [2] client ExtentClose { repair_id: ReconciliationId(269), extent_id: 126 }
41298 Sep 22 23:22:19.365 DEBG 269 Close extent 126
41299 Sep 22 23:22:19.365 DEBG 269 Close extent 126
41300 Sep 22 23:22:19.365 DEBG 269 Close extent 126
41301 Sep 22 23:22:19.366 DEBG [2] It's time to notify for 269
41302 Sep 22 23:22:19.366 INFO Completion from [2] id:269 status:true
41303 Sep 22 23:22:19.366 INFO [270/752] Repair commands completed
41304 Sep 22 23:22:19.366 INFO Pop front: ReconcileIO { id: ReconciliationId(270), op: ExtentRepair { repair_id: ReconciliationId(270), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41305 Sep 22 23:22:19.366 INFO Sent repair work, now wait for resp
41306 Sep 22 23:22:19.366 INFO [0] received reconcile message
41307 Sep 22 23:22:19.366 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(270), op: ExtentRepair { repair_id: ReconciliationId(270), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41308 Sep 22 23:22:19.366 INFO [0] client ExtentRepair { repair_id: ReconciliationId(270), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41309 Sep 22 23:22:19.366 INFO [0] Sending repair request ReconciliationId(270)
41310 Sep 22 23:22:19.366 INFO [1] received reconcile message
41311 Sep 22 23:22:19.366 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(270), op: ExtentRepair { repair_id: ReconciliationId(270), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41312 Sep 22 23:22:19.366 INFO [1] client ExtentRepair { repair_id: ReconciliationId(270), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41313 Sep 22 23:22:19.366 INFO [1] No action required ReconciliationId(270)
41314 Sep 22 23:22:19.366 INFO [2] received reconcile message
41315 Sep 22 23:22:19.366 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(270), op: ExtentRepair { repair_id: ReconciliationId(270), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41316 Sep 22 23:22:19.366 INFO [2] client ExtentRepair { repair_id: ReconciliationId(270), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41317 Sep 22 23:22:19.366 INFO [2] No action required ReconciliationId(270)
41318 Sep 22 23:22:19.366 DEBG 270 Repair extent 126 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
41319 Sep 22 23:22:19.366 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/07E.copy"
41320 Sep 22 23:22:19.431 INFO accepted connection, remote_addr: 127.0.0.1:53993, local_addr: 127.0.0.1:52864, task: repair
41321 Sep 22 23:22:19.431 TRCE incoming request, uri: /extent/126/files, method: GET, req_id: 90e158c7-3d63-4551-836d-ec2d05ef5d19, remote_addr: 127.0.0.1:53993, local_addr: 127.0.0.1:52864, task: repair
41322 Sep 22 23:22:19.432 INFO request completed, latency_us: 279, response_code: 200, uri: /extent/126/files, method: GET, req_id: 90e158c7-3d63-4551-836d-ec2d05ef5d19, remote_addr: 127.0.0.1:53993, local_addr: 127.0.0.1:52864, task: repair
41323 Sep 22 23:22:19.432 INFO eid:126 Found repair files: ["07E", "07E.db"]
41324 Sep 22 23:22:19.432 TRCE incoming request, uri: /newextent/126/data, method: GET, req_id: d5a8403b-4876-4e36-855c-d6b6a4ffe7dc, remote_addr: 127.0.0.1:53993, local_addr: 127.0.0.1:52864, task: repair
41325 Sep 22 23:22:19.433 INFO request completed, latency_us: 350, response_code: 200, uri: /newextent/126/data, method: GET, req_id: d5a8403b-4876-4e36-855c-d6b6a4ffe7dc, remote_addr: 127.0.0.1:53993, local_addr: 127.0.0.1:52864, task: repair
41326 Sep 22 23:22:19.438 TRCE incoming request, uri: /newextent/126/db, method: GET, req_id: bbbcfc5f-4c28-419e-82df-c4d71692afd7, remote_addr: 127.0.0.1:53993, local_addr: 127.0.0.1:52864, task: repair
41327 Sep 22 23:22:19.438 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/126/db, method: GET, req_id: bbbcfc5f-4c28-419e-82df-c4d71692afd7, remote_addr: 127.0.0.1:53993, local_addr: 127.0.0.1:52864, task: repair
41328 Sep 22 23:22:19.439 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/07E.copy" to "/tmp/downstairs-zrMnlo6G/00/000/07E.replace"
41329 Sep 22 23:22:19.439 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41330 Sep 22 23:22:19.440 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/07E.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
41331 Sep 22 23:22:19.440 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/07E"
41332 Sep 22 23:22:19.440 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/07E.db"
41333 Sep 22 23:22:19.441 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41334 Sep 22 23:22:19.441 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/07E.replace" to "/tmp/downstairs-zrMnlo6G/00/000/07E.completed"
41335 Sep 22 23:22:19.441 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41336 Sep 22 23:22:19.441 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41337 Sep 22 23:22:19.441 DEBG [0] It's time to notify for 270
41338 Sep 22 23:22:19.441 INFO Completion from [0] id:270 status:true
41339 Sep 22 23:22:19.441 INFO [271/752] Repair commands completed
41340 Sep 22 23:22:19.441 INFO Pop front: ReconcileIO { id: ReconciliationId(271), op: ExtentReopen { repair_id: ReconciliationId(271), extent_id: 126 }, state: ClientData([New, New, New]) }
41341 Sep 22 23:22:19.441 INFO Sent repair work, now wait for resp
41342 Sep 22 23:22:19.441 INFO [0] received reconcile message
41343 Sep 22 23:22:19.441 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(271), op: ExtentReopen { repair_id: ReconciliationId(271), extent_id: 126 }, state: ClientData([InProgress, New, New]) }, : downstairs
41344 Sep 22 23:22:19.441 INFO [0] client ExtentReopen { repair_id: ReconciliationId(271), extent_id: 126 }
41345 Sep 22 23:22:19.441 INFO [1] received reconcile message
41346 Sep 22 23:22:19.441 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(271), op: ExtentReopen { repair_id: ReconciliationId(271), extent_id: 126 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41347 Sep 22 23:22:19.441 INFO [1] client ExtentReopen { repair_id: ReconciliationId(271), extent_id: 126 }
41348 Sep 22 23:22:19.441 INFO [2] received reconcile message
41349 Sep 22 23:22:19.441 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(271), op: ExtentReopen { repair_id: ReconciliationId(271), extent_id: 126 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41350 Sep 22 23:22:19.441 INFO [2] client ExtentReopen { repair_id: ReconciliationId(271), extent_id: 126 }
41351 Sep 22 23:22:19.442 DEBG 271 Reopen extent 126
41352 Sep 22 23:22:19.442 DEBG 271 Reopen extent 126
41353 Sep 22 23:22:19.443 DEBG 271 Reopen extent 126
41354 Sep 22 23:22:19.443 DEBG [2] It's time to notify for 271
41355 Sep 22 23:22:19.443 INFO Completion from [2] id:271 status:true
41356 Sep 22 23:22:19.444 INFO [272/752] Repair commands completed
41357 Sep 22 23:22:19.444 INFO Pop front: ReconcileIO { id: ReconciliationId(272), op: ExtentFlush { repair_id: ReconciliationId(272), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41358 Sep 22 23:22:19.444 INFO Sent repair work, now wait for resp
41359 Sep 22 23:22:19.444 INFO [0] received reconcile message
41360 Sep 22 23:22:19.444 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(272), op: ExtentFlush { repair_id: ReconciliationId(272), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41361 Sep 22 23:22:19.444 INFO [0] client ExtentFlush { repair_id: ReconciliationId(272), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41362 Sep 22 23:22:19.444 INFO [1] received reconcile message
41363 Sep 22 23:22:19.444 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(272), op: ExtentFlush { repair_id: ReconciliationId(272), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41364 Sep 22 23:22:19.444 INFO [1] client ExtentFlush { repair_id: ReconciliationId(272), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41365 Sep 22 23:22:19.444 INFO [2] received reconcile message
41366 Sep 22 23:22:19.444 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(272), op: ExtentFlush { repair_id: ReconciliationId(272), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41367 Sep 22 23:22:19.444 INFO [2] client ExtentFlush { repair_id: ReconciliationId(272), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41368 Sep 22 23:22:19.444 DEBG 272 Flush extent 128 with f:2 g:2
41369 Sep 22 23:22:19.444 DEBG Flush just extent 128 with f:2 and g:2
41370 Sep 22 23:22:19.444 DEBG [1] It's time to notify for 272
41371 Sep 22 23:22:19.444 INFO Completion from [1] id:272 status:true
41372 Sep 22 23:22:19.444 INFO [273/752] Repair commands completed
41373 Sep 22 23:22:19.444 INFO Pop front: ReconcileIO { id: ReconciliationId(273), op: ExtentClose { repair_id: ReconciliationId(273), extent_id: 128 }, state: ClientData([New, New, New]) }
41374 Sep 22 23:22:19.444 INFO Sent repair work, now wait for resp
41375 Sep 22 23:22:19.444 INFO [0] received reconcile message
41376 Sep 22 23:22:19.444 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(273), op: ExtentClose { repair_id: ReconciliationId(273), extent_id: 128 }, state: ClientData([InProgress, New, New]) }, : downstairs
41377 Sep 22 23:22:19.444 INFO [0] client ExtentClose { repair_id: ReconciliationId(273), extent_id: 128 }
41378 Sep 22 23:22:19.444 INFO [1] received reconcile message
41379 Sep 22 23:22:19.444 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(273), op: ExtentClose { repair_id: ReconciliationId(273), extent_id: 128 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41380 Sep 22 23:22:19.444 INFO [1] client ExtentClose { repair_id: ReconciliationId(273), extent_id: 128 }
41381 Sep 22 23:22:19.444 INFO [2] received reconcile message
41382 Sep 22 23:22:19.444 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(273), op: ExtentClose { repair_id: ReconciliationId(273), extent_id: 128 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41383 Sep 22 23:22:19.444 INFO [2] client ExtentClose { repair_id: ReconciliationId(273), extent_id: 128 }
41384 Sep 22 23:22:19.445 DEBG 273 Close extent 128
41385 Sep 22 23:22:19.445 DEBG 273 Close extent 128
41386 Sep 22 23:22:19.445 DEBG 273 Close extent 128
41387 Sep 22 23:22:19.445 DEBG [2] It's time to notify for 273
41388 Sep 22 23:22:19.446 INFO Completion from [2] id:273 status:true
41389 Sep 22 23:22:19.446 INFO [274/752] Repair commands completed
41390 Sep 22 23:22:19.446 INFO Pop front: ReconcileIO { id: ReconciliationId(274), op: ExtentRepair { repair_id: ReconciliationId(274), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41391 Sep 22 23:22:19.446 INFO Sent repair work, now wait for resp
41392 Sep 22 23:22:19.446 INFO [0] received reconcile message
41393 Sep 22 23:22:19.446 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(274), op: ExtentRepair { repair_id: ReconciliationId(274), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41394 Sep 22 23:22:19.446 INFO [0] client ExtentRepair { repair_id: ReconciliationId(274), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41395 Sep 22 23:22:19.446 INFO [0] Sending repair request ReconciliationId(274)
41396 Sep 22 23:22:19.446 INFO [1] received reconcile message
41397 Sep 22 23:22:19.446 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(274), op: ExtentRepair { repair_id: ReconciliationId(274), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41398 Sep 22 23:22:19.446 INFO [1] client ExtentRepair { repair_id: ReconciliationId(274), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41399 Sep 22 23:22:19.446 INFO [1] No action required ReconciliationId(274)
41400 Sep 22 23:22:19.446 INFO [2] received reconcile message
41401 Sep 22 23:22:19.446 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(274), op: ExtentRepair { repair_id: ReconciliationId(274), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41402 Sep 22 23:22:19.446 INFO [2] client ExtentRepair { repair_id: ReconciliationId(274), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41403 Sep 22 23:22:19.446 INFO [2] No action required ReconciliationId(274)
41404 Sep 22 23:22:19.446 DEBG 274 Repair extent 128 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
41405 Sep 22 23:22:19.446 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/080.copy"
41406 Sep 22 23:22:19.509 INFO accepted connection, remote_addr: 127.0.0.1:34045, local_addr: 127.0.0.1:52864, task: repair
41407 Sep 22 23:22:19.509 TRCE incoming request, uri: /extent/128/files, method: GET, req_id: 8f0487c8-02c7-4061-81fe-73f9be8c1301, remote_addr: 127.0.0.1:34045, local_addr: 127.0.0.1:52864, task: repair
41408 Sep 22 23:22:19.509 INFO request completed, latency_us: 205, response_code: 200, uri: /extent/128/files, method: GET, req_id: 8f0487c8-02c7-4061-81fe-73f9be8c1301, remote_addr: 127.0.0.1:34045, local_addr: 127.0.0.1:52864, task: repair
41409 Sep 22 23:22:19.510 INFO eid:128 Found repair files: ["080", "080.db"]
41410 Sep 22 23:22:19.510 TRCE incoming request, uri: /newextent/128/data, method: GET, req_id: 1bb08b04-43fc-493f-8838-265a061e1485, remote_addr: 127.0.0.1:34045, local_addr: 127.0.0.1:52864, task: repair
41411 Sep 22 23:22:19.510 INFO request completed, latency_us: 322, response_code: 200, uri: /newextent/128/data, method: GET, req_id: 1bb08b04-43fc-493f-8838-265a061e1485, remote_addr: 127.0.0.1:34045, local_addr: 127.0.0.1:52864, task: repair
41412 Sep 22 23:22:19.515 TRCE incoming request, uri: /newextent/128/db, method: GET, req_id: 35292bfb-7665-493d-84a4-8c15b437983e, remote_addr: 127.0.0.1:34045, local_addr: 127.0.0.1:52864, task: repair
41413 Sep 22 23:22:19.515 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/128/db, method: GET, req_id: 35292bfb-7665-493d-84a4-8c15b437983e, remote_addr: 127.0.0.1:34045, local_addr: 127.0.0.1:52864, task: repair
41414 Sep 22 23:22:19.516 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/080.copy" to "/tmp/downstairs-zrMnlo6G/00/000/080.replace"
41415 Sep 22 23:22:19.516 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41416 Sep 22 23:22:19.517 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/080.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
41417 Sep 22 23:22:19.517 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/080"
41418 Sep 22 23:22:19.518 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/080.db"
41419 Sep 22 23:22:19.518 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41420 Sep 22 23:22:19.518 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/080.replace" to "/tmp/downstairs-zrMnlo6G/00/000/080.completed"
41421 Sep 22 23:22:19.518 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41422 Sep 22 23:22:19.518 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41423 Sep 22 23:22:19.518 DEBG [0] It's time to notify for 274
41424 Sep 22 23:22:19.518 INFO Completion from [0] id:274 status:true
41425 Sep 22 23:22:19.518 INFO [275/752] Repair commands completed
41426 Sep 22 23:22:19.518 INFO Pop front: ReconcileIO { id: ReconciliationId(275), op: ExtentReopen { repair_id: ReconciliationId(275), extent_id: 128 }, state: ClientData([New, New, New]) }
41427 Sep 22 23:22:19.518 INFO Sent repair work, now wait for resp
41428 Sep 22 23:22:19.518 INFO [0] received reconcile message
41429 Sep 22 23:22:19.518 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(275), op: ExtentReopen { repair_id: ReconciliationId(275), extent_id: 128 }, state: ClientData([InProgress, New, New]) }, : downstairs
41430 Sep 22 23:22:19.518 INFO [0] client ExtentReopen { repair_id: ReconciliationId(275), extent_id: 128 }
41431 Sep 22 23:22:19.518 INFO [1] received reconcile message
41432 Sep 22 23:22:19.518 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(275), op: ExtentReopen { repair_id: ReconciliationId(275), extent_id: 128 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41433 Sep 22 23:22:19.518 INFO [1] client ExtentReopen { repair_id: ReconciliationId(275), extent_id: 128 }
41434 Sep 22 23:22:19.518 INFO [2] received reconcile message
41435 Sep 22 23:22:19.518 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(275), op: ExtentReopen { repair_id: ReconciliationId(275), extent_id: 128 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41436 Sep 22 23:22:19.518 INFO [2] client ExtentReopen { repair_id: ReconciliationId(275), extent_id: 128 }
41437 Sep 22 23:22:19.518 DEBG 275 Reopen extent 128
41438 Sep 22 23:22:19.519 DEBG 275 Reopen extent 128
41439 Sep 22 23:22:19.520 DEBG 275 Reopen extent 128
41440 Sep 22 23:22:19.520 DEBG [2] It's time to notify for 275
41441 Sep 22 23:22:19.520 INFO Completion from [2] id:275 status:true
41442 Sep 22 23:22:19.520 INFO [276/752] Repair commands completed
41443 Sep 22 23:22:19.520 INFO Pop front: ReconcileIO { id: ReconciliationId(276), op: ExtentFlush { repair_id: ReconciliationId(276), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41444 Sep 22 23:22:19.520 INFO Sent repair work, now wait for resp
41445 Sep 22 23:22:19.520 INFO [0] received reconcile message
41446 Sep 22 23:22:19.520 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(276), op: ExtentFlush { repair_id: ReconciliationId(276), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41447 Sep 22 23:22:19.520 INFO [0] client ExtentFlush { repair_id: ReconciliationId(276), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41448 Sep 22 23:22:19.520 INFO [1] received reconcile message
41449 Sep 22 23:22:19.520 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(276), op: ExtentFlush { repair_id: ReconciliationId(276), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41450 Sep 22 23:22:19.520 INFO [1] client ExtentFlush { repair_id: ReconciliationId(276), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41451 Sep 22 23:22:19.520 INFO [2] received reconcile message
41452 Sep 22 23:22:19.520 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(276), op: ExtentFlush { repair_id: ReconciliationId(276), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41453 Sep 22 23:22:19.520 INFO [2] client ExtentFlush { repair_id: ReconciliationId(276), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41454 Sep 22 23:22:19.521 DEBG 276 Flush extent 79 with f:2 g:2
41455 Sep 22 23:22:19.521 DEBG Flush just extent 79 with f:2 and g:2
41456 Sep 22 23:22:19.521 DEBG [1] It's time to notify for 276
41457 Sep 22 23:22:19.521 INFO Completion from [1] id:276 status:true
41458 Sep 22 23:22:19.521 INFO [277/752] Repair commands completed
41459 Sep 22 23:22:19.521 INFO Pop front: ReconcileIO { id: ReconciliationId(277), op: ExtentClose { repair_id: ReconciliationId(277), extent_id: 79 }, state: ClientData([New, New, New]) }
41460 Sep 22 23:22:19.521 INFO Sent repair work, now wait for resp
41461 Sep 22 23:22:19.521 INFO [0] received reconcile message
41462 Sep 22 23:22:19.521 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(277), op: ExtentClose { repair_id: ReconciliationId(277), extent_id: 79 }, state: ClientData([InProgress, New, New]) }, : downstairs
41463 Sep 22 23:22:19.521 INFO [0] client ExtentClose { repair_id: ReconciliationId(277), extent_id: 79 }
41464 Sep 22 23:22:19.521 INFO [1] received reconcile message
41465 Sep 22 23:22:19.521 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(277), op: ExtentClose { repair_id: ReconciliationId(277), extent_id: 79 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41466 Sep 22 23:22:19.521 INFO [1] client ExtentClose { repair_id: ReconciliationId(277), extent_id: 79 }
41467 Sep 22 23:22:19.521 INFO [2] received reconcile message
41468 Sep 22 23:22:19.521 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(277), op: ExtentClose { repair_id: ReconciliationId(277), extent_id: 79 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41469 Sep 22 23:22:19.521 INFO [2] client ExtentClose { repair_id: ReconciliationId(277), extent_id: 79 }
41470 Sep 22 23:22:19.521 DEBG 277 Close extent 79
41471 Sep 22 23:22:19.521 DEBG 277 Close extent 79
41472 Sep 22 23:22:19.522 DEBG 277 Close extent 79
41473 Sep 22 23:22:19.522 DEBG [2] It's time to notify for 277
41474 Sep 22 23:22:19.522 INFO Completion from [2] id:277 status:true
41475 Sep 22 23:22:19.522 INFO [278/752] Repair commands completed
41476 Sep 22 23:22:19.522 INFO Pop front: ReconcileIO { id: ReconciliationId(278), op: ExtentRepair { repair_id: ReconciliationId(278), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41477 Sep 22 23:22:19.522 INFO Sent repair work, now wait for resp
41478 Sep 22 23:22:19.522 INFO [0] received reconcile message
41479 Sep 22 23:22:19.522 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(278), op: ExtentRepair { repair_id: ReconciliationId(278), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41480 Sep 22 23:22:19.522 INFO [0] client ExtentRepair { repair_id: ReconciliationId(278), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41481 Sep 22 23:22:19.522 INFO [0] Sending repair request ReconciliationId(278)
41482 Sep 22 23:22:19.522 INFO [1] received reconcile message
41483 Sep 22 23:22:19.522 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(278), op: ExtentRepair { repair_id: ReconciliationId(278), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41484 Sep 22 23:22:19.522 INFO [1] client ExtentRepair { repair_id: ReconciliationId(278), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41485 Sep 22 23:22:19.522 INFO [1] No action required ReconciliationId(278)
41486 Sep 22 23:22:19.522 INFO [2] received reconcile message
41487 Sep 22 23:22:19.522 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(278), op: ExtentRepair { repair_id: ReconciliationId(278), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41488 Sep 22 23:22:19.523 INFO [2] client ExtentRepair { repair_id: ReconciliationId(278), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41489 Sep 22 23:22:19.523 INFO [2] No action required ReconciliationId(278)
41490 Sep 22 23:22:19.523 DEBG 278 Repair extent 79 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
41491 Sep 22 23:22:19.523 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/04F.copy"
41492 Sep 22 23:22:19.554 DEBG Flush :1080 extent_limit None deps:[JobId(1079)] res:true f:29 g:1
41493 Sep 22 23:22:19.554 WARN returning error on read!
41494 Sep 22 23:22:19.554 DEBG Read :1081 deps:[JobId(1080)] res:false
41495 Sep 22 23:22:19.554 INFO [lossy] skipping 1081
41496 Sep 22 23:22:19.560 DEBG Read :1081 deps:[JobId(1080)] res:true
41497 Sep 22 23:22:19.588 INFO accepted connection, remote_addr: 127.0.0.1:58831, local_addr: 127.0.0.1:52864, task: repair
41498 Sep 22 23:22:19.588 TRCE incoming request, uri: /extent/79/files, method: GET, req_id: 21002347-7169-4518-adb7-7fc33fe36e1a, remote_addr: 127.0.0.1:58831, local_addr: 127.0.0.1:52864, task: repair
41499 Sep 22 23:22:19.589 INFO request completed, latency_us: 218, response_code: 200, uri: /extent/79/files, method: GET, req_id: 21002347-7169-4518-adb7-7fc33fe36e1a, remote_addr: 127.0.0.1:58831, local_addr: 127.0.0.1:52864, task: repair
41500 Sep 22 23:22:19.589 INFO eid:79 Found repair files: ["04F", "04F.db"]
41501 Sep 22 23:22:19.589 TRCE incoming request, uri: /newextent/79/data, method: GET, req_id: 5dd0bccf-b48d-4f76-984e-c1f7cd3f45fb, remote_addr: 127.0.0.1:58831, local_addr: 127.0.0.1:52864, task: repair
41502 Sep 22 23:22:19.589 INFO request completed, latency_us: 333, response_code: 200, uri: /newextent/79/data, method: GET, req_id: 5dd0bccf-b48d-4f76-984e-c1f7cd3f45fb, remote_addr: 127.0.0.1:58831, local_addr: 127.0.0.1:52864, task: repair
41503 Sep 22 23:22:19.594 TRCE incoming request, uri: /newextent/79/db, method: GET, req_id: 640a7219-d062-4d35-aa7a-6b066d3adce7, remote_addr: 127.0.0.1:58831, local_addr: 127.0.0.1:52864, task: repair
41504 Sep 22 23:22:19.594 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/79/db, method: GET, req_id: 640a7219-d062-4d35-aa7a-6b066d3adce7, remote_addr: 127.0.0.1:58831, local_addr: 127.0.0.1:52864, task: repair
41505 Sep 22 23:22:19.596 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/04F.copy" to "/tmp/downstairs-zrMnlo6G/00/000/04F.replace"
41506 Sep 22 23:22:19.596 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41507 Sep 22 23:22:19.596 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/04F.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
41508 Sep 22 23:22:19.597 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/04F"
41509 Sep 22 23:22:19.597 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/04F.db"
41510 Sep 22 23:22:19.597 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41511 Sep 22 23:22:19.597 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/04F.replace" to "/tmp/downstairs-zrMnlo6G/00/000/04F.completed"
41512 Sep 22 23:22:19.597 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41513 Sep 22 23:22:19.597 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41514 Sep 22 23:22:19.597 DEBG [0] It's time to notify for 278
41515 Sep 22 23:22:19.597 INFO Completion from [0] id:278 status:true
41516 Sep 22 23:22:19.597 INFO [279/752] Repair commands completed
41517 Sep 22 23:22:19.597 INFO Pop front: ReconcileIO { id: ReconciliationId(279), op: ExtentReopen { repair_id: ReconciliationId(279), extent_id: 79 }, state: ClientData([New, New, New]) }
41518 Sep 22 23:22:19.597 INFO Sent repair work, now wait for resp
41519 Sep 22 23:22:19.597 INFO [0] received reconcile message
41520 Sep 22 23:22:19.597 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(279), op: ExtentReopen { repair_id: ReconciliationId(279), extent_id: 79 }, state: ClientData([InProgress, New, New]) }, : downstairs
41521 Sep 22 23:22:19.597 INFO [0] client ExtentReopen { repair_id: ReconciliationId(279), extent_id: 79 }
41522 Sep 22 23:22:19.597 INFO [1] received reconcile message
41523 Sep 22 23:22:19.597 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(279), op: ExtentReopen { repair_id: ReconciliationId(279), extent_id: 79 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41524 Sep 22 23:22:19.597 INFO [1] client ExtentReopen { repair_id: ReconciliationId(279), extent_id: 79 }
41525 Sep 22 23:22:19.598 INFO [2] received reconcile message
41526 Sep 22 23:22:19.598 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(279), op: ExtentReopen { repair_id: ReconciliationId(279), extent_id: 79 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41527 Sep 22 23:22:19.598 INFO [2] client ExtentReopen { repair_id: ReconciliationId(279), extent_id: 79 }
41528 Sep 22 23:22:19.598 DEBG 279 Reopen extent 79
41529 Sep 22 23:22:19.598 DEBG 279 Reopen extent 79
41530 Sep 22 23:22:19.599 DEBG 279 Reopen extent 79
41531 Sep 22 23:22:19.599 DEBG [2] It's time to notify for 279
41532 Sep 22 23:22:19.600 INFO Completion from [2] id:279 status:true
41533 Sep 22 23:22:19.600 INFO [280/752] Repair commands completed
41534 Sep 22 23:22:19.600 INFO Pop front: ReconcileIO { id: ReconciliationId(280), op: ExtentFlush { repair_id: ReconciliationId(280), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41535 Sep 22 23:22:19.600 INFO Sent repair work, now wait for resp
41536 Sep 22 23:22:19.600 INFO [0] received reconcile message
41537 Sep 22 23:22:19.600 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(280), op: ExtentFlush { repair_id: ReconciliationId(280), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41538 Sep 22 23:22:19.600 INFO [0] client ExtentFlush { repair_id: ReconciliationId(280), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41539 Sep 22 23:22:19.600 INFO [1] received reconcile message
41540 Sep 22 23:22:19.600 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(280), op: ExtentFlush { repair_id: ReconciliationId(280), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41541 Sep 22 23:22:19.600 INFO [1] client ExtentFlush { repair_id: ReconciliationId(280), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41542 Sep 22 23:22:19.600 INFO [2] received reconcile message
41543 Sep 22 23:22:19.600 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(280), op: ExtentFlush { repair_id: ReconciliationId(280), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41544 Sep 22 23:22:19.600 INFO [2] client ExtentFlush { repair_id: ReconciliationId(280), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41545 Sep 22 23:22:19.600 DEBG 280 Flush extent 144 with f:2 g:2
41546 Sep 22 23:22:19.600 DEBG Flush just extent 144 with f:2 and g:2
41547 Sep 22 23:22:19.600 DEBG [1] It's time to notify for 280
41548 Sep 22 23:22:19.600 INFO Completion from [1] id:280 status:true
41549 Sep 22 23:22:19.600 INFO [281/752] Repair commands completed
41550 Sep 22 23:22:19.600 INFO Pop front: ReconcileIO { id: ReconciliationId(281), op: ExtentClose { repair_id: ReconciliationId(281), extent_id: 144 }, state: ClientData([New, New, New]) }
41551 Sep 22 23:22:19.600 INFO Sent repair work, now wait for resp
41552 Sep 22 23:22:19.600 INFO [0] received reconcile message
41553 Sep 22 23:22:19.600 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(281), op: ExtentClose { repair_id: ReconciliationId(281), extent_id: 144 }, state: ClientData([InProgress, New, New]) }, : downstairs
41554 Sep 22 23:22:19.600 INFO [0] client ExtentClose { repair_id: ReconciliationId(281), extent_id: 144 }
41555 Sep 22 23:22:19.600 INFO [1] received reconcile message
41556 Sep 22 23:22:19.600 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(281), op: ExtentClose { repair_id: ReconciliationId(281), extent_id: 144 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41557 Sep 22 23:22:19.600 INFO [1] client ExtentClose { repair_id: ReconciliationId(281), extent_id: 144 }
41558 Sep 22 23:22:19.600 INFO [2] received reconcile message
41559 Sep 22 23:22:19.600 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(281), op: ExtentClose { repair_id: ReconciliationId(281), extent_id: 144 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41560 Sep 22 23:22:19.600 INFO [2] client ExtentClose { repair_id: ReconciliationId(281), extent_id: 144 }
41561 Sep 22 23:22:19.601 DEBG 281 Close extent 144
41562 Sep 22 23:22:19.601 DEBG 281 Close extent 144
41563 Sep 22 23:22:19.601 DEBG 281 Close extent 144
41564 Sep 22 23:22:19.601 DEBG [2] It's time to notify for 281
41565 Sep 22 23:22:19.602 INFO Completion from [2] id:281 status:true
41566 Sep 22 23:22:19.602 INFO [282/752] Repair commands completed
41567 Sep 22 23:22:19.602 INFO Pop front: ReconcileIO { id: ReconciliationId(282), op: ExtentRepair { repair_id: ReconciliationId(282), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41568 Sep 22 23:22:19.602 INFO Sent repair work, now wait for resp
41569 Sep 22 23:22:19.602 INFO [0] received reconcile message
41570 Sep 22 23:22:19.602 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(282), op: ExtentRepair { repair_id: ReconciliationId(282), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41571 Sep 22 23:22:19.602 INFO [0] client ExtentRepair { repair_id: ReconciliationId(282), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41572 Sep 22 23:22:19.602 INFO [0] Sending repair request ReconciliationId(282)
41573 Sep 22 23:22:19.602 INFO [1] received reconcile message
41574 Sep 22 23:22:19.602 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(282), op: ExtentRepair { repair_id: ReconciliationId(282), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41575 Sep 22 23:22:19.602 INFO [1] client ExtentRepair { repair_id: ReconciliationId(282), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41576 Sep 22 23:22:19.602 INFO [1] No action required ReconciliationId(282)
41577 Sep 22 23:22:19.602 INFO [2] received reconcile message
41578 Sep 22 23:22:19.602 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(282), op: ExtentRepair { repair_id: ReconciliationId(282), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41579 Sep 22 23:22:19.602 INFO [2] client ExtentRepair { repair_id: ReconciliationId(282), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41580 Sep 22 23:22:19.602 INFO [2] No action required ReconciliationId(282)
41581 Sep 22 23:22:19.602 DEBG 282 Repair extent 144 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
41582 Sep 22 23:22:19.602 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/090.copy"
41583 Sep 22 23:22:19.667 INFO accepted connection, remote_addr: 127.0.0.1:43851, local_addr: 127.0.0.1:52864, task: repair
41584 Sep 22 23:22:19.668 TRCE incoming request, uri: /extent/144/files, method: GET, req_id: f2d721e2-54aa-4595-8ead-eec597fb89d2, remote_addr: 127.0.0.1:43851, local_addr: 127.0.0.1:52864, task: repair
41585 Sep 22 23:22:19.668 INFO request completed, latency_us: 269, response_code: 200, uri: /extent/144/files, method: GET, req_id: f2d721e2-54aa-4595-8ead-eec597fb89d2, remote_addr: 127.0.0.1:43851, local_addr: 127.0.0.1:52864, task: repair
41586 Sep 22 23:22:19.668 INFO eid:144 Found repair files: ["090", "090.db"]
41587 Sep 22 23:22:19.669 TRCE incoming request, uri: /newextent/144/data, method: GET, req_id: 0d4b3c41-c732-4673-a782-763384d3e8fb, remote_addr: 127.0.0.1:43851, local_addr: 127.0.0.1:52864, task: repair
41588 Sep 22 23:22:19.669 INFO request completed, latency_us: 346, response_code: 200, uri: /newextent/144/data, method: GET, req_id: 0d4b3c41-c732-4673-a782-763384d3e8fb, remote_addr: 127.0.0.1:43851, local_addr: 127.0.0.1:52864, task: repair
41589 Sep 22 23:22:19.674 TRCE incoming request, uri: /newextent/144/db, method: GET, req_id: 566c0aaf-33e1-4e2b-b6d6-4b5e611e224a, remote_addr: 127.0.0.1:43851, local_addr: 127.0.0.1:52864, task: repair
41590 Sep 22 23:22:19.674 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/144/db, method: GET, req_id: 566c0aaf-33e1-4e2b-b6d6-4b5e611e224a, remote_addr: 127.0.0.1:43851, local_addr: 127.0.0.1:52864, task: repair
41591 Sep 22 23:22:19.675 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/090.copy" to "/tmp/downstairs-zrMnlo6G/00/000/090.replace"
41592 Sep 22 23:22:19.675 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41593 Sep 22 23:22:19.676 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/090.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
41594 Sep 22 23:22:19.677 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/090"
41595 Sep 22 23:22:19.677 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/090.db"
41596 Sep 22 23:22:19.677 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41597 Sep 22 23:22:19.677 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/090.replace" to "/tmp/downstairs-zrMnlo6G/00/000/090.completed"
41598 Sep 22 23:22:19.677 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41599 Sep 22 23:22:19.677 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41600 Sep 22 23:22:19.677 DEBG [0] It's time to notify for 282
41601 Sep 22 23:22:19.677 INFO Completion from [0] id:282 status:true
41602 Sep 22 23:22:19.677 INFO [283/752] Repair commands completed
41603 Sep 22 23:22:19.677 INFO Pop front: ReconcileIO { id: ReconciliationId(283), op: ExtentReopen { repair_id: ReconciliationId(283), extent_id: 144 }, state: ClientData([New, New, New]) }
41604 Sep 22 23:22:19.677 INFO Sent repair work, now wait for resp
41605 Sep 22 23:22:19.677 INFO [0] received reconcile message
41606 Sep 22 23:22:19.677 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(283), op: ExtentReopen { repair_id: ReconciliationId(283), extent_id: 144 }, state: ClientData([InProgress, New, New]) }, : downstairs
41607 Sep 22 23:22:19.677 INFO [0] client ExtentReopen { repair_id: ReconciliationId(283), extent_id: 144 }
41608 Sep 22 23:22:19.677 INFO [1] received reconcile message
41609 Sep 22 23:22:19.677 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(283), op: ExtentReopen { repair_id: ReconciliationId(283), extent_id: 144 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41610 Sep 22 23:22:19.678 INFO [1] client ExtentReopen { repair_id: ReconciliationId(283), extent_id: 144 }
41611 Sep 22 23:22:19.678 INFO [2] received reconcile message
41612 Sep 22 23:22:19.678 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(283), op: ExtentReopen { repair_id: ReconciliationId(283), extent_id: 144 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41613 Sep 22 23:22:19.678 INFO [2] client ExtentReopen { repair_id: ReconciliationId(283), extent_id: 144 }
41614 Sep 22 23:22:19.678 DEBG 283 Reopen extent 144
41615 Sep 22 23:22:19.679 DEBG 283 Reopen extent 144
41616 Sep 22 23:22:19.679 DEBG 283 Reopen extent 144
41617 Sep 22 23:22:19.680 DEBG [2] It's time to notify for 283
41618 Sep 22 23:22:19.680 INFO Completion from [2] id:283 status:true
41619 Sep 22 23:22:19.680 INFO [284/752] Repair commands completed
41620 Sep 22 23:22:19.680 INFO Pop front: ReconcileIO { id: ReconciliationId(284), op: ExtentFlush { repair_id: ReconciliationId(284), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41621 Sep 22 23:22:19.680 INFO Sent repair work, now wait for resp
41622 Sep 22 23:22:19.680 INFO [0] received reconcile message
41623 Sep 22 23:22:19.680 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(284), op: ExtentFlush { repair_id: ReconciliationId(284), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41624 Sep 22 23:22:19.680 INFO [0] client ExtentFlush { repair_id: ReconciliationId(284), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41625 Sep 22 23:22:19.680 INFO [1] received reconcile message
41626 Sep 22 23:22:19.680 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(284), op: ExtentFlush { repair_id: ReconciliationId(284), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41627 Sep 22 23:22:19.680 INFO [1] client ExtentFlush { repair_id: ReconciliationId(284), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41628 Sep 22 23:22:19.680 INFO [2] received reconcile message
41629 Sep 22 23:22:19.680 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(284), op: ExtentFlush { repair_id: ReconciliationId(284), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41630 Sep 22 23:22:19.680 INFO [2] client ExtentFlush { repair_id: ReconciliationId(284), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41631 Sep 22 23:22:19.680 DEBG 284 Flush extent 160 with f:2 g:2
41632 Sep 22 23:22:19.680 DEBG Flush just extent 160 with f:2 and g:2
41633 Sep 22 23:22:19.680 DEBG [1] It's time to notify for 284
41634 Sep 22 23:22:19.680 INFO Completion from [1] id:284 status:true
41635 Sep 22 23:22:19.680 INFO [285/752] Repair commands completed
41636 Sep 22 23:22:19.680 INFO Pop front: ReconcileIO { id: ReconciliationId(285), op: ExtentClose { repair_id: ReconciliationId(285), extent_id: 160 }, state: ClientData([New, New, New]) }
41637 Sep 22 23:22:19.680 INFO Sent repair work, now wait for resp
41638 Sep 22 23:22:19.680 INFO [0] received reconcile message
41639 Sep 22 23:22:19.680 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(285), op: ExtentClose { repair_id: ReconciliationId(285), extent_id: 160 }, state: ClientData([InProgress, New, New]) }, : downstairs
41640 Sep 22 23:22:19.680 INFO [0] client ExtentClose { repair_id: ReconciliationId(285), extent_id: 160 }
41641 Sep 22 23:22:19.680 INFO [1] received reconcile message
41642 Sep 22 23:22:19.681 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(285), op: ExtentClose { repair_id: ReconciliationId(285), extent_id: 160 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41643 Sep 22 23:22:19.681 INFO [1] client ExtentClose { repair_id: ReconciliationId(285), extent_id: 160 }
41644 Sep 22 23:22:19.681 INFO [2] received reconcile message
41645 Sep 22 23:22:19.681 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(285), op: ExtentClose { repair_id: ReconciliationId(285), extent_id: 160 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41646 Sep 22 23:22:19.681 INFO [2] client ExtentClose { repair_id: ReconciliationId(285), extent_id: 160 }
41647 Sep 22 23:22:19.681 DEBG 285 Close extent 160
41648 Sep 22 23:22:19.681 DEBG 285 Close extent 160
41649 Sep 22 23:22:19.681 DEBG 285 Close extent 160
41650 Sep 22 23:22:19.682 DEBG [2] It's time to notify for 285
41651 Sep 22 23:22:19.682 INFO Completion from [2] id:285 status:true
41652 Sep 22 23:22:19.682 INFO [286/752] Repair commands completed
41653 Sep 22 23:22:19.682 INFO Pop front: ReconcileIO { id: ReconciliationId(286), op: ExtentRepair { repair_id: ReconciliationId(286), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41654 Sep 22 23:22:19.682 INFO Sent repair work, now wait for resp
41655 Sep 22 23:22:19.682 INFO [0] received reconcile message
41656 Sep 22 23:22:19.682 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(286), op: ExtentRepair { repair_id: ReconciliationId(286), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41657 Sep 22 23:22:19.682 INFO [0] client ExtentRepair { repair_id: ReconciliationId(286), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41658 Sep 22 23:22:19.682 INFO [0] Sending repair request ReconciliationId(286)
41659 Sep 22 23:22:19.682 INFO [1] received reconcile message
41660 Sep 22 23:22:19.682 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(286), op: ExtentRepair { repair_id: ReconciliationId(286), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41661 Sep 22 23:22:19.682 INFO [1] client ExtentRepair { repair_id: ReconciliationId(286), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41662 Sep 22 23:22:19.682 INFO [1] No action required ReconciliationId(286)
41663 Sep 22 23:22:19.682 INFO [2] received reconcile message
41664 Sep 22 23:22:19.682 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(286), op: ExtentRepair { repair_id: ReconciliationId(286), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41665 Sep 22 23:22:19.682 INFO [2] client ExtentRepair { repair_id: ReconciliationId(286), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41666 Sep 22 23:22:19.682 INFO [2] No action required ReconciliationId(286)
41667 Sep 22 23:22:19.682 DEBG 286 Repair extent 160 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
41668 Sep 22 23:22:19.682 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0A0.copy"
41669 Sep 22 23:22:19.747 INFO accepted connection, remote_addr: 127.0.0.1:39291, local_addr: 127.0.0.1:52864, task: repair
41670 Sep 22 23:22:19.747 TRCE incoming request, uri: /extent/160/files, method: GET, req_id: ea967da3-493c-4d19-9205-0bf359070c4a, remote_addr: 127.0.0.1:39291, local_addr: 127.0.0.1:52864, task: repair
41671 Sep 22 23:22:19.747 INFO request completed, latency_us: 228, response_code: 200, uri: /extent/160/files, method: GET, req_id: ea967da3-493c-4d19-9205-0bf359070c4a, remote_addr: 127.0.0.1:39291, local_addr: 127.0.0.1:52864, task: repair
41672 Sep 22 23:22:19.748 INFO eid:160 Found repair files: ["0A0", "0A0.db"]
41673 Sep 22 23:22:19.748 TRCE incoming request, uri: /newextent/160/data, method: GET, req_id: 3eb0cf72-349e-4474-bae4-6852bddfc97f, remote_addr: 127.0.0.1:39291, local_addr: 127.0.0.1:52864, task: repair
41674 Sep 22 23:22:19.748 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/160/data, method: GET, req_id: 3eb0cf72-349e-4474-bae4-6852bddfc97f, remote_addr: 127.0.0.1:39291, local_addr: 127.0.0.1:52864, task: repair
41675 Sep 22 23:22:19.753 TRCE incoming request, uri: /newextent/160/db, method: GET, req_id: b8b8aaf1-33eb-4aa9-a42c-1f6ebb450da5, remote_addr: 127.0.0.1:39291, local_addr: 127.0.0.1:52864, task: repair
41676 Sep 22 23:22:19.753 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/160/db, method: GET, req_id: b8b8aaf1-33eb-4aa9-a42c-1f6ebb450da5, remote_addr: 127.0.0.1:39291, local_addr: 127.0.0.1:52864, task: repair
41677 Sep 22 23:22:19.754 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0A0.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0A0.replace"
41678 Sep 22 23:22:19.754 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41679 Sep 22 23:22:19.755 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0A0.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
41680 Sep 22 23:22:19.755 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A0"
41681 Sep 22 23:22:19.755 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A0.db"
41682 Sep 22 23:22:19.756 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41683 Sep 22 23:22:19.756 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0A0.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0A0.completed"
41684 Sep 22 23:22:19.756 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41685 Sep 22 23:22:19.756 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41686 Sep 22 23:22:19.756 DEBG [0] It's time to notify for 286
41687 Sep 22 23:22:19.756 INFO Completion from [0] id:286 status:true
41688 Sep 22 23:22:19.756 INFO [287/752] Repair commands completed
41689 Sep 22 23:22:19.756 INFO Pop front: ReconcileIO { id: ReconciliationId(287), op: ExtentReopen { repair_id: ReconciliationId(287), extent_id: 160 }, state: ClientData([New, New, New]) }
41690 Sep 22 23:22:19.756 INFO Sent repair work, now wait for resp
41691 Sep 22 23:22:19.756 INFO [0] received reconcile message
41692 Sep 22 23:22:19.756 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(287), op: ExtentReopen { repair_id: ReconciliationId(287), extent_id: 160 }, state: ClientData([InProgress, New, New]) }, : downstairs
41693 Sep 22 23:22:19.756 INFO [0] client ExtentReopen { repair_id: ReconciliationId(287), extent_id: 160 }
41694 Sep 22 23:22:19.756 INFO [1] received reconcile message
41695 Sep 22 23:22:19.756 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(287), op: ExtentReopen { repair_id: ReconciliationId(287), extent_id: 160 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41696 Sep 22 23:22:19.756 INFO [1] client ExtentReopen { repair_id: ReconciliationId(287), extent_id: 160 }
41697 Sep 22 23:22:19.756 INFO [2] received reconcile message
41698 Sep 22 23:22:19.756 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(287), op: ExtentReopen { repair_id: ReconciliationId(287), extent_id: 160 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41699 Sep 22 23:22:19.756 INFO [2] client ExtentReopen { repair_id: ReconciliationId(287), extent_id: 160 }
41700 Sep 22 23:22:19.756 DEBG 287 Reopen extent 160
41701 Sep 22 23:22:19.757 DEBG 287 Reopen extent 160
41702 Sep 22 23:22:19.758 DEBG 287 Reopen extent 160
41703 Sep 22 23:22:19.758 DEBG [2] It's time to notify for 287
41704 Sep 22 23:22:19.758 INFO Completion from [2] id:287 status:true
41705 Sep 22 23:22:19.758 INFO [288/752] Repair commands completed
41706 Sep 22 23:22:19.758 INFO Pop front: ReconcileIO { id: ReconciliationId(288), op: ExtentFlush { repair_id: ReconciliationId(288), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41707 Sep 22 23:22:19.758 INFO Sent repair work, now wait for resp
41708 Sep 22 23:22:19.758 INFO [0] received reconcile message
41709 Sep 22 23:22:19.758 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(288), op: ExtentFlush { repair_id: ReconciliationId(288), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41710 Sep 22 23:22:19.758 INFO [0] client ExtentFlush { repair_id: ReconciliationId(288), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41711 Sep 22 23:22:19.758 INFO [1] received reconcile message
41712 Sep 22 23:22:19.758 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(288), op: ExtentFlush { repair_id: ReconciliationId(288), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41713 Sep 22 23:22:19.758 INFO [1] client ExtentFlush { repair_id: ReconciliationId(288), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41714 Sep 22 23:22:19.758 INFO [2] received reconcile message
41715 Sep 22 23:22:19.759 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(288), op: ExtentFlush { repair_id: ReconciliationId(288), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41716 Sep 22 23:22:19.759 INFO [2] client ExtentFlush { repair_id: ReconciliationId(288), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41717 Sep 22 23:22:19.759 DEBG 288 Flush extent 133 with f:2 g:2
41718 Sep 22 23:22:19.759 DEBG Flush just extent 133 with f:2 and g:2
41719 Sep 22 23:22:19.759 DEBG [1] It's time to notify for 288
41720 Sep 22 23:22:19.759 INFO Completion from [1] id:288 status:true
41721 Sep 22 23:22:19.759 INFO [289/752] Repair commands completed
41722 Sep 22 23:22:19.759 INFO Pop front: ReconcileIO { id: ReconciliationId(289), op: ExtentClose { repair_id: ReconciliationId(289), extent_id: 133 }, state: ClientData([New, New, New]) }
41723 Sep 22 23:22:19.759 INFO Sent repair work, now wait for resp
41724 Sep 22 23:22:19.759 INFO [0] received reconcile message
41725 Sep 22 23:22:19.759 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(289), op: ExtentClose { repair_id: ReconciliationId(289), extent_id: 133 }, state: ClientData([InProgress, New, New]) }, : downstairs
41726 Sep 22 23:22:19.759 INFO [0] client ExtentClose { repair_id: ReconciliationId(289), extent_id: 133 }
41727 Sep 22 23:22:19.759 INFO [1] received reconcile message
41728 Sep 22 23:22:19.759 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(289), op: ExtentClose { repair_id: ReconciliationId(289), extent_id: 133 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41729 Sep 22 23:22:19.759 INFO [1] client ExtentClose { repair_id: ReconciliationId(289), extent_id: 133 }
41730 Sep 22 23:22:19.759 INFO [2] received reconcile message
41731 Sep 22 23:22:19.759 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(289), op: ExtentClose { repair_id: ReconciliationId(289), extent_id: 133 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41732 Sep 22 23:22:19.759 INFO [2] client ExtentClose { repair_id: ReconciliationId(289), extent_id: 133 }
41733 Sep 22 23:22:19.759 DEBG 289 Close extent 133
41734 Sep 22 23:22:19.759 DEBG 289 Close extent 133
41735 Sep 22 23:22:19.760 DEBG 289 Close extent 133
41736 Sep 22 23:22:19.760 DEBG [2] It's time to notify for 289
41737 Sep 22 23:22:19.760 INFO Completion from [2] id:289 status:true
41738 Sep 22 23:22:19.760 INFO [290/752] Repair commands completed
41739 Sep 22 23:22:19.760 INFO Pop front: ReconcileIO { id: ReconciliationId(290), op: ExtentRepair { repair_id: ReconciliationId(290), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41740 Sep 22 23:22:19.760 INFO Sent repair work, now wait for resp
41741 Sep 22 23:22:19.760 INFO [0] received reconcile message
41742 Sep 22 23:22:19.760 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(290), op: ExtentRepair { repair_id: ReconciliationId(290), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41743 Sep 22 23:22:19.760 INFO [0] client ExtentRepair { repair_id: ReconciliationId(290), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41744 Sep 22 23:22:19.760 INFO [0] Sending repair request ReconciliationId(290)
41745 Sep 22 23:22:19.760 INFO [1] received reconcile message
41746 Sep 22 23:22:19.761 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(290), op: ExtentRepair { repair_id: ReconciliationId(290), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41747 Sep 22 23:22:19.761 INFO [1] client ExtentRepair { repair_id: ReconciliationId(290), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41748 Sep 22 23:22:19.761 INFO [1] No action required ReconciliationId(290)
41749 Sep 22 23:22:19.761 INFO [2] received reconcile message
41750 Sep 22 23:22:19.761 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(290), op: ExtentRepair { repair_id: ReconciliationId(290), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41751 Sep 22 23:22:19.761 INFO [2] client ExtentRepair { repair_id: ReconciliationId(290), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41752 Sep 22 23:22:19.761 INFO [2] No action required ReconciliationId(290)
41753 Sep 22 23:22:19.761 DEBG 290 Repair extent 133 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
41754 Sep 22 23:22:19.761 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/085.copy"
41755 Sep 22 23:22:19.826 INFO accepted connection, remote_addr: 127.0.0.1:53931, local_addr: 127.0.0.1:52864, task: repair
41756 Sep 22 23:22:19.826 TRCE incoming request, uri: /extent/133/files, method: GET, req_id: 31f2709e-fd5e-41fa-b695-fb7b0a2368c3, remote_addr: 127.0.0.1:53931, local_addr: 127.0.0.1:52864, task: repair
41757 Sep 22 23:22:19.827 INFO request completed, latency_us: 280, response_code: 200, uri: /extent/133/files, method: GET, req_id: 31f2709e-fd5e-41fa-b695-fb7b0a2368c3, remote_addr: 127.0.0.1:53931, local_addr: 127.0.0.1:52864, task: repair
41758 Sep 22 23:22:19.827 INFO eid:133 Found repair files: ["085", "085.db"]
41759 Sep 22 23:22:19.827 TRCE incoming request, uri: /newextent/133/data, method: GET, req_id: 2d9ada90-b46a-48ce-a439-431ced8e6874, remote_addr: 127.0.0.1:53931, local_addr: 127.0.0.1:52864, task: repair
41760 Sep 22 23:22:19.828 INFO request completed, latency_us: 350, response_code: 200, uri: /newextent/133/data, method: GET, req_id: 2d9ada90-b46a-48ce-a439-431ced8e6874, remote_addr: 127.0.0.1:53931, local_addr: 127.0.0.1:52864, task: repair
41761 Sep 22 23:22:19.833 TRCE incoming request, uri: /newextent/133/db, method: GET, req_id: a557e406-b269-4b71-ad24-7c069cefcb9e, remote_addr: 127.0.0.1:53931, local_addr: 127.0.0.1:52864, task: repair
41762 Sep 22 23:22:19.833 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/133/db, method: GET, req_id: a557e406-b269-4b71-ad24-7c069cefcb9e, remote_addr: 127.0.0.1:53931, local_addr: 127.0.0.1:52864, task: repair
41763 Sep 22 23:22:19.834 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/085.copy" to "/tmp/downstairs-zrMnlo6G/00/000/085.replace"
41764 Sep 22 23:22:19.834 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41765 Sep 22 23:22:19.835 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/085.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
41766 Sep 22 23:22:19.836 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/085"
41767 Sep 22 23:22:19.836 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/085.db"
41768 Sep 22 23:22:19.836 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41769 Sep 22 23:22:19.836 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/085.replace" to "/tmp/downstairs-zrMnlo6G/00/000/085.completed"
41770 Sep 22 23:22:19.836 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41771 Sep 22 23:22:19.836 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41772 Sep 22 23:22:19.836 DEBG [0] It's time to notify for 290
41773 Sep 22 23:22:19.836 INFO Completion from [0] id:290 status:true
41774 Sep 22 23:22:19.836 INFO [291/752] Repair commands completed
41775 Sep 22 23:22:19.836 INFO Pop front: ReconcileIO { id: ReconciliationId(291), op: ExtentReopen { repair_id: ReconciliationId(291), extent_id: 133 }, state: ClientData([New, New, New]) }
41776 Sep 22 23:22:19.836 INFO Sent repair work, now wait for resp
41777 Sep 22 23:22:19.836 INFO [0] received reconcile message
41778 Sep 22 23:22:19.836 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(291), op: ExtentReopen { repair_id: ReconciliationId(291), extent_id: 133 }, state: ClientData([InProgress, New, New]) }, : downstairs
41779 Sep 22 23:22:19.837 INFO [0] client ExtentReopen { repair_id: ReconciliationId(291), extent_id: 133 }
41780 Sep 22 23:22:19.837 INFO [1] received reconcile message
41781 Sep 22 23:22:19.837 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(291), op: ExtentReopen { repair_id: ReconciliationId(291), extent_id: 133 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41782 Sep 22 23:22:19.837 INFO [1] client ExtentReopen { repair_id: ReconciliationId(291), extent_id: 133 }
41783 Sep 22 23:22:19.837 INFO [2] received reconcile message
41784 Sep 22 23:22:19.837 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(291), op: ExtentReopen { repair_id: ReconciliationId(291), extent_id: 133 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41785 Sep 22 23:22:19.837 INFO [2] client ExtentReopen { repair_id: ReconciliationId(291), extent_id: 133 }
41786 Sep 22 23:22:19.837 DEBG 291 Reopen extent 133
41787 Sep 22 23:22:19.838 DEBG 291 Reopen extent 133
41788 Sep 22 23:22:19.838 DEBG 291 Reopen extent 133
41789 Sep 22 23:22:19.839 DEBG [2] It's time to notify for 291
41790 Sep 22 23:22:19.839 INFO Completion from [2] id:291 status:true
41791 Sep 22 23:22:19.839 INFO [292/752] Repair commands completed
41792 Sep 22 23:22:19.839 INFO Pop front: ReconcileIO { id: ReconciliationId(292), op: ExtentFlush { repair_id: ReconciliationId(292), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41793 Sep 22 23:22:19.839 INFO Sent repair work, now wait for resp
41794 Sep 22 23:22:19.839 INFO [0] received reconcile message
41795 Sep 22 23:22:19.839 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(292), op: ExtentFlush { repair_id: ReconciliationId(292), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41796 Sep 22 23:22:19.839 INFO [0] client ExtentFlush { repair_id: ReconciliationId(292), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41797 Sep 22 23:22:19.839 INFO [1] received reconcile message
41798 Sep 22 23:22:19.839 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(292), op: ExtentFlush { repair_id: ReconciliationId(292), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41799 Sep 22 23:22:19.839 INFO [1] client ExtentFlush { repair_id: ReconciliationId(292), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41800 Sep 22 23:22:19.839 INFO [2] received reconcile message
41801 Sep 22 23:22:19.839 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(292), op: ExtentFlush { repair_id: ReconciliationId(292), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41802 Sep 22 23:22:19.839 INFO [2] client ExtentFlush { repair_id: ReconciliationId(292), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41803 Sep 22 23:22:19.839 DEBG 292 Flush extent 109 with f:2 g:2
41804 Sep 22 23:22:19.839 DEBG Flush just extent 109 with f:2 and g:2
41805 Sep 22 23:22:19.839 DEBG [1] It's time to notify for 292
41806 Sep 22 23:22:19.839 INFO Completion from [1] id:292 status:true
41807 Sep 22 23:22:19.839 INFO [293/752] Repair commands completed
41808 Sep 22 23:22:19.839 INFO Pop front: ReconcileIO { id: ReconciliationId(293), op: ExtentClose { repair_id: ReconciliationId(293), extent_id: 109 }, state: ClientData([New, New, New]) }
41809 Sep 22 23:22:19.840 INFO Sent repair work, now wait for resp
41810 Sep 22 23:22:19.840 INFO [0] received reconcile message
41811 Sep 22 23:22:19.840 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(293), op: ExtentClose { repair_id: ReconciliationId(293), extent_id: 109 }, state: ClientData([InProgress, New, New]) }, : downstairs
41812 Sep 22 23:22:19.840 INFO [0] client ExtentClose { repair_id: ReconciliationId(293), extent_id: 109 }
41813 Sep 22 23:22:19.840 INFO [1] received reconcile message
41814 Sep 22 23:22:19.840 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(293), op: ExtentClose { repair_id: ReconciliationId(293), extent_id: 109 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41815 Sep 22 23:22:19.840 INFO [1] client ExtentClose { repair_id: ReconciliationId(293), extent_id: 109 }
41816 Sep 22 23:22:19.840 INFO [2] received reconcile message
41817 Sep 22 23:22:19.840 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(293), op: ExtentClose { repair_id: ReconciliationId(293), extent_id: 109 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41818 Sep 22 23:22:19.840 INFO [2] client ExtentClose { repair_id: ReconciliationId(293), extent_id: 109 }
41819 Sep 22 23:22:19.840 DEBG 293 Close extent 109
41820 Sep 22 23:22:19.840 DEBG 293 Close extent 109
41821 Sep 22 23:22:19.840 DEBG 293 Close extent 109
41822 Sep 22 23:22:19.841 DEBG [2] It's time to notify for 293
41823 Sep 22 23:22:19.841 INFO Completion from [2] id:293 status:true
41824 Sep 22 23:22:19.841 INFO [294/752] Repair commands completed
41825 Sep 22 23:22:19.841 INFO Pop front: ReconcileIO { id: ReconciliationId(294), op: ExtentRepair { repair_id: ReconciliationId(294), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41826 Sep 22 23:22:19.841 INFO Sent repair work, now wait for resp
41827 Sep 22 23:22:19.841 INFO [0] received reconcile message
41828 Sep 22 23:22:19.841 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(294), op: ExtentRepair { repair_id: ReconciliationId(294), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41829 Sep 22 23:22:19.841 INFO [0] client ExtentRepair { repair_id: ReconciliationId(294), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41830 Sep 22 23:22:19.841 INFO [0] Sending repair request ReconciliationId(294)
41831 Sep 22 23:22:19.841 INFO [1] received reconcile message
41832 Sep 22 23:22:19.841 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(294), op: ExtentRepair { repair_id: ReconciliationId(294), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41833 Sep 22 23:22:19.841 INFO [1] client ExtentRepair { repair_id: ReconciliationId(294), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41834 Sep 22 23:22:19.841 INFO [1] No action required ReconciliationId(294)
41835 Sep 22 23:22:19.841 INFO [2] received reconcile message
41836 Sep 22 23:22:19.841 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(294), op: ExtentRepair { repair_id: ReconciliationId(294), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41837 Sep 22 23:22:19.841 INFO [2] client ExtentRepair { repair_id: ReconciliationId(294), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41838 Sep 22 23:22:19.841 INFO [2] No action required ReconciliationId(294)
41839 Sep 22 23:22:19.841 DEBG 294 Repair extent 109 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
41840 Sep 22 23:22:19.841 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/06D.copy"
41841 Sep 22 23:22:19.907 INFO accepted connection, remote_addr: 127.0.0.1:58922, local_addr: 127.0.0.1:52864, task: repair
41842 Sep 22 23:22:19.907 TRCE incoming request, uri: /extent/109/files, method: GET, req_id: cab63320-c4de-4799-9ae4-e02afc363b39, remote_addr: 127.0.0.1:58922, local_addr: 127.0.0.1:52864, task: repair
41843 Sep 22 23:22:19.908 INFO request completed, latency_us: 271, response_code: 200, uri: /extent/109/files, method: GET, req_id: cab63320-c4de-4799-9ae4-e02afc363b39, remote_addr: 127.0.0.1:58922, local_addr: 127.0.0.1:52864, task: repair
41844 Sep 22 23:22:19.908 INFO eid:109 Found repair files: ["06D", "06D.db"]
41845 Sep 22 23:22:19.908 TRCE incoming request, uri: /newextent/109/data, method: GET, req_id: fe230adc-3caf-4113-885b-7d993f337372, remote_addr: 127.0.0.1:58922, local_addr: 127.0.0.1:52864, task: repair
41846 Sep 22 23:22:19.909 INFO request completed, latency_us: 362, response_code: 200, uri: /newextent/109/data, method: GET, req_id: fe230adc-3caf-4113-885b-7d993f337372, remote_addr: 127.0.0.1:58922, local_addr: 127.0.0.1:52864, task: repair
41847 Sep 22 23:22:19.913 TRCE incoming request, uri: /newextent/109/db, method: GET, req_id: 2f8650df-e693-4ba3-9af2-b043a4160cbd, remote_addr: 127.0.0.1:58922, local_addr: 127.0.0.1:52864, task: repair
41848 Sep 22 23:22:19.913 INFO request completed, latency_us: 302, response_code: 200, uri: /newextent/109/db, method: GET, req_id: 2f8650df-e693-4ba3-9af2-b043a4160cbd, remote_addr: 127.0.0.1:58922, local_addr: 127.0.0.1:52864, task: repair
41849 Sep 22 23:22:19.915 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/06D.copy" to "/tmp/downstairs-zrMnlo6G/00/000/06D.replace"
41850 Sep 22 23:22:19.915 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41851 Sep 22 23:22:19.916 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/06D.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
41852 Sep 22 23:22:19.916 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/06D"
41853 Sep 22 23:22:19.916 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/06D.db"
41854 Sep 22 23:22:19.916 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41855 Sep 22 23:22:19.916 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/06D.replace" to "/tmp/downstairs-zrMnlo6G/00/000/06D.completed"
41856 Sep 22 23:22:19.916 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41857 Sep 22 23:22:19.916 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41858 Sep 22 23:22:19.916 DEBG [0] It's time to notify for 294
41859 Sep 22 23:22:19.916 INFO Completion from [0] id:294 status:true
41860 Sep 22 23:22:19.916 INFO [295/752] Repair commands completed
41861 Sep 22 23:22:19.917 INFO Pop front: ReconcileIO { id: ReconciliationId(295), op: ExtentReopen { repair_id: ReconciliationId(295), extent_id: 109 }, state: ClientData([New, New, New]) }
41862 Sep 22 23:22:19.917 INFO Sent repair work, now wait for resp
41863 Sep 22 23:22:19.917 INFO [0] received reconcile message
41864 Sep 22 23:22:19.917 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(295), op: ExtentReopen { repair_id: ReconciliationId(295), extent_id: 109 }, state: ClientData([InProgress, New, New]) }, : downstairs
41865 Sep 22 23:22:19.917 INFO [0] client ExtentReopen { repair_id: ReconciliationId(295), extent_id: 109 }
41866 Sep 22 23:22:19.917 INFO [1] received reconcile message
41867 Sep 22 23:22:19.917 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(295), op: ExtentReopen { repair_id: ReconciliationId(295), extent_id: 109 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41868 Sep 22 23:22:19.917 INFO [1] client ExtentReopen { repair_id: ReconciliationId(295), extent_id: 109 }
41869 Sep 22 23:22:19.917 INFO [2] received reconcile message
41870 Sep 22 23:22:19.917 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(295), op: ExtentReopen { repair_id: ReconciliationId(295), extent_id: 109 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41871 Sep 22 23:22:19.917 INFO [2] client ExtentReopen { repair_id: ReconciliationId(295), extent_id: 109 }
41872 Sep 22 23:22:19.917 DEBG 295 Reopen extent 109
41873 Sep 22 23:22:19.918 DEBG 295 Reopen extent 109
41874 Sep 22 23:22:19.918 DEBG 295 Reopen extent 109
41875 Sep 22 23:22:19.919 DEBG [2] It's time to notify for 295
41876 Sep 22 23:22:19.919 INFO Completion from [2] id:295 status:true
41877 Sep 22 23:22:19.919 INFO [296/752] Repair commands completed
41878 Sep 22 23:22:19.919 INFO Pop front: ReconcileIO { id: ReconciliationId(296), op: ExtentFlush { repair_id: ReconciliationId(296), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41879 Sep 22 23:22:19.919 INFO Sent repair work, now wait for resp
41880 Sep 22 23:22:19.919 INFO [0] received reconcile message
41881 Sep 22 23:22:19.919 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(296), op: ExtentFlush { repair_id: ReconciliationId(296), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41882 Sep 22 23:22:19.919 INFO [0] client ExtentFlush { repair_id: ReconciliationId(296), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41883 Sep 22 23:22:19.919 INFO [1] received reconcile message
41884 Sep 22 23:22:19.919 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(296), op: ExtentFlush { repair_id: ReconciliationId(296), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41885 Sep 22 23:22:19.919 INFO [1] client ExtentFlush { repair_id: ReconciliationId(296), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41886 Sep 22 23:22:19.919 INFO [2] received reconcile message
41887 Sep 22 23:22:19.919 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(296), op: ExtentFlush { repair_id: ReconciliationId(296), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41888 Sep 22 23:22:19.919 INFO [2] client ExtentFlush { repair_id: ReconciliationId(296), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41889 Sep 22 23:22:19.919 DEBG 296 Flush extent 169 with f:2 g:2
41890 Sep 22 23:22:19.919 DEBG Flush just extent 169 with f:2 and g:2
41891 Sep 22 23:22:19.919 DEBG [1] It's time to notify for 296
41892 Sep 22 23:22:19.920 INFO Completion from [1] id:296 status:true
41893 Sep 22 23:22:19.920 INFO [297/752] Repair commands completed
41894 Sep 22 23:22:19.920 INFO Pop front: ReconcileIO { id: ReconciliationId(297), op: ExtentClose { repair_id: ReconciliationId(297), extent_id: 169 }, state: ClientData([New, New, New]) }
41895 Sep 22 23:22:19.920 INFO Sent repair work, now wait for resp
41896 Sep 22 23:22:19.920 INFO [0] received reconcile message
41897 Sep 22 23:22:19.920 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(297), op: ExtentClose { repair_id: ReconciliationId(297), extent_id: 169 }, state: ClientData([InProgress, New, New]) }, : downstairs
41898 Sep 22 23:22:19.920 INFO [0] client ExtentClose { repair_id: ReconciliationId(297), extent_id: 169 }
41899 Sep 22 23:22:19.920 INFO [1] received reconcile message
41900 Sep 22 23:22:19.920 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(297), op: ExtentClose { repair_id: ReconciliationId(297), extent_id: 169 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41901 Sep 22 23:22:19.920 INFO [1] client ExtentClose { repair_id: ReconciliationId(297), extent_id: 169 }
41902 Sep 22 23:22:19.920 INFO [2] received reconcile message
41903 Sep 22 23:22:19.920 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(297), op: ExtentClose { repair_id: ReconciliationId(297), extent_id: 169 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41904 Sep 22 23:22:19.920 INFO [2] client ExtentClose { repair_id: ReconciliationId(297), extent_id: 169 }
41905 Sep 22 23:22:19.920 DEBG 297 Close extent 169
41906 Sep 22 23:22:19.920 DEBG 297 Close extent 169
41907 Sep 22 23:22:19.921 DEBG 297 Close extent 169
41908 Sep 22 23:22:19.921 DEBG [2] It's time to notify for 297
41909 Sep 22 23:22:19.921 INFO Completion from [2] id:297 status:true
41910 Sep 22 23:22:19.921 INFO [298/752] Repair commands completed
41911 Sep 22 23:22:19.921 INFO Pop front: ReconcileIO { id: ReconciliationId(298), op: ExtentRepair { repair_id: ReconciliationId(298), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41912 Sep 22 23:22:19.921 INFO Sent repair work, now wait for resp
41913 Sep 22 23:22:19.921 INFO [0] received reconcile message
41914 Sep 22 23:22:19.921 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(298), op: ExtentRepair { repair_id: ReconciliationId(298), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41915 Sep 22 23:22:19.921 INFO [0] client ExtentRepair { repair_id: ReconciliationId(298), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41916 Sep 22 23:22:19.921 INFO [0] Sending repair request ReconciliationId(298)
41917 Sep 22 23:22:19.921 INFO [1] received reconcile message
41918 Sep 22 23:22:19.921 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(298), op: ExtentRepair { repair_id: ReconciliationId(298), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41919 Sep 22 23:22:19.921 INFO [1] client ExtentRepair { repair_id: ReconciliationId(298), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41920 Sep 22 23:22:19.921 INFO [1] No action required ReconciliationId(298)
41921 Sep 22 23:22:19.921 INFO [2] received reconcile message
41922 Sep 22 23:22:19.921 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(298), op: ExtentRepair { repair_id: ReconciliationId(298), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41923 Sep 22 23:22:19.921 INFO [2] client ExtentRepair { repair_id: ReconciliationId(298), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
41924 Sep 22 23:22:19.921 INFO [2] No action required ReconciliationId(298)
41925 Sep 22 23:22:19.921 DEBG 298 Repair extent 169 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
41926 Sep 22 23:22:19.922 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0A9.copy"
41927 Sep 22 23:22:19.964 ERRO [1] job id 1081 saw error GenericError("test error")
41928 Sep 22 23:22:19.964 DEBG up_ds_listen was notified
41929 Sep 22 23:22:19.964 DEBG up_ds_listen process 1080
41930 Sep 22 23:22:19.964 DEBG [A] ack job 1080:81, : downstairs
41931 Sep 22 23:22:19.965 DEBG up_ds_listen checked 1 jobs, back to waiting
41932 Sep 22 23:22:19.965 DEBG IO Flush 1082 has deps [JobId(1081), JobId(1080)]
41933 Sep 22 23:22:19.965 DEBG Flush :1080 extent_limit None deps:[JobId(1079)] res:true f:29 g:1
41934 Sep 22 23:22:19.971 DEBG Read :1081 deps:[JobId(1080)] res:true
41935 Sep 22 23:22:19.986 INFO accepted connection, remote_addr: 127.0.0.1:37931, local_addr: 127.0.0.1:52864, task: repair
41936 Sep 22 23:22:19.986 TRCE incoming request, uri: /extent/169/files, method: GET, req_id: 02e0c845-3775-41fc-9e32-c55861482a62, remote_addr: 127.0.0.1:37931, local_addr: 127.0.0.1:52864, task: repair
41937 Sep 22 23:22:19.986 INFO request completed, latency_us: 218, response_code: 200, uri: /extent/169/files, method: GET, req_id: 02e0c845-3775-41fc-9e32-c55861482a62, remote_addr: 127.0.0.1:37931, local_addr: 127.0.0.1:52864, task: repair
41938 Sep 22 23:22:19.986 INFO eid:169 Found repair files: ["0A9", "0A9.db"]
41939 Sep 22 23:22:19.987 TRCE incoming request, uri: /newextent/169/data, method: GET, req_id: 823ca92e-6b90-4e96-b095-ae476b5bd6a8, remote_addr: 127.0.0.1:37931, local_addr: 127.0.0.1:52864, task: repair
41940 Sep 22 23:22:19.987 INFO request completed, latency_us: 265, response_code: 200, uri: /newextent/169/data, method: GET, req_id: 823ca92e-6b90-4e96-b095-ae476b5bd6a8, remote_addr: 127.0.0.1:37931, local_addr: 127.0.0.1:52864, task: repair
41941 Sep 22 23:22:19.992 TRCE incoming request, uri: /newextent/169/db, method: GET, req_id: 36d05e91-b798-45b9-bfaf-f37ed4e8c023, remote_addr: 127.0.0.1:37931, local_addr: 127.0.0.1:52864, task: repair
41942 Sep 22 23:22:19.992 DEBG [rc] retire 1080 clears [JobId(1079), JobId(1080)], : downstairs
41943 Sep 22 23:22:19.992 INFO request completed, latency_us: 302, response_code: 200, uri: /newextent/169/db, method: GET, req_id: 36d05e91-b798-45b9-bfaf-f37ed4e8c023, remote_addr: 127.0.0.1:37931, local_addr: 127.0.0.1:52864, task: repair
41944 Sep 22 23:22:19.994 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0A9.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0A9.replace"
41945 Sep 22 23:22:19.994 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41946 Sep 22 23:22:19.994 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0A9.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
41947 Sep 22 23:22:19.995 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A9"
41948 Sep 22 23:22:19.995 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A9.db"
41949 Sep 22 23:22:19.995 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41950 Sep 22 23:22:19.995 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0A9.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0A9.completed"
41951 Sep 22 23:22:19.995 WARN returning error on flush!
41952 Sep 22 23:22:19.995 DEBG Flush :1082 extent_limit None deps:[JobId(1081), JobId(1080)] res:false f:30 g:1
41953 Sep 22 23:22:19.995 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41954 Sep 22 23:22:19.995 DEBG Flush :1082 extent_limit None deps:[JobId(1081), JobId(1080)] res:true f:30 g:1
41955 Sep 22 23:22:19.995 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
41956 Sep 22 23:22:19.995 INFO [lossy] sleeping 1 second
41957 Sep 22 23:22:19.995 DEBG [0] It's time to notify for 298
41958 Sep 22 23:22:19.995 INFO Completion from [0] id:298 status:true
41959 Sep 22 23:22:19.995 INFO [299/752] Repair commands completed
41960 Sep 22 23:22:19.995 INFO Pop front: ReconcileIO { id: ReconciliationId(299), op: ExtentReopen { repair_id: ReconciliationId(299), extent_id: 169 }, state: ClientData([New, New, New]) }
41961 Sep 22 23:22:19.995 INFO Sent repair work, now wait for resp
41962 Sep 22 23:22:19.995 INFO [0] received reconcile message
41963 Sep 22 23:22:19.995 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(299), op: ExtentReopen { repair_id: ReconciliationId(299), extent_id: 169 }, state: ClientData([InProgress, New, New]) }, : downstairs
41964 Sep 22 23:22:19.995 INFO [0] client ExtentReopen { repair_id: ReconciliationId(299), extent_id: 169 }
41965 Sep 22 23:22:19.995 INFO [1] received reconcile message
41966 Sep 22 23:22:19.996 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(299), op: ExtentReopen { repair_id: ReconciliationId(299), extent_id: 169 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41967 Sep 22 23:22:19.996 INFO [1] client ExtentReopen { repair_id: ReconciliationId(299), extent_id: 169 }
41968 Sep 22 23:22:19.996 INFO [2] received reconcile message
41969 Sep 22 23:22:19.996 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(299), op: ExtentReopen { repair_id: ReconciliationId(299), extent_id: 169 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41970 Sep 22 23:22:19.996 INFO [2] client ExtentReopen { repair_id: ReconciliationId(299), extent_id: 169 }
41971 Sep 22 23:22:19.996 DEBG 299 Reopen extent 169
41972 Sep 22 23:22:19.996 DEBG 299 Reopen extent 169
41973 Sep 22 23:22:19.997 DEBG 299 Reopen extent 169
41974 Sep 22 23:22:19.997 DEBG [2] It's time to notify for 299
41975 Sep 22 23:22:19.998 INFO Completion from [2] id:299 status:true
41976 Sep 22 23:22:19.998 INFO [300/752] Repair commands completed
41977 Sep 22 23:22:19.998 INFO Pop front: ReconcileIO { id: ReconciliationId(300), op: ExtentFlush { repair_id: ReconciliationId(300), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41978 Sep 22 23:22:19.998 INFO Sent repair work, now wait for resp
41979 Sep 22 23:22:19.998 INFO [0] received reconcile message
41980 Sep 22 23:22:19.998 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(300), op: ExtentFlush { repair_id: ReconciliationId(300), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41981 Sep 22 23:22:19.998 INFO [0] client ExtentFlush { repair_id: ReconciliationId(300), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41982 Sep 22 23:22:19.998 INFO [1] received reconcile message
41983 Sep 22 23:22:19.998 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(300), op: ExtentFlush { repair_id: ReconciliationId(300), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41984 Sep 22 23:22:19.998 INFO [1] client ExtentFlush { repair_id: ReconciliationId(300), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41985 Sep 22 23:22:19.998 INFO [2] received reconcile message
41986 Sep 22 23:22:19.998 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(300), op: ExtentFlush { repair_id: ReconciliationId(300), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41987 Sep 22 23:22:19.998 INFO [2] client ExtentFlush { repair_id: ReconciliationId(300), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41988 Sep 22 23:22:19.998 DEBG 300 Flush extent 28 with f:2 g:2
41989 Sep 22 23:22:19.998 DEBG Flush just extent 28 with f:2 and g:2
41990 Sep 22 23:22:19.998 DEBG [1] It's time to notify for 300
41991 Sep 22 23:22:19.998 INFO Completion from [1] id:300 status:true
41992 Sep 22 23:22:19.998 INFO [301/752] Repair commands completed
41993 Sep 22 23:22:19.998 INFO Pop front: ReconcileIO { id: ReconciliationId(301), op: ExtentClose { repair_id: ReconciliationId(301), extent_id: 28 }, state: ClientData([New, New, New]) }
41994 Sep 22 23:22:19.998 INFO Sent repair work, now wait for resp
41995 Sep 22 23:22:19.998 INFO [0] received reconcile message
41996 Sep 22 23:22:19.998 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(301), op: ExtentClose { repair_id: ReconciliationId(301), extent_id: 28 }, state: ClientData([InProgress, New, New]) }, : downstairs
41997 Sep 22 23:22:19.998 INFO [0] client ExtentClose { repair_id: ReconciliationId(301), extent_id: 28 }
41998 Sep 22 23:22:19.998 INFO [1] received reconcile message
41999 Sep 22 23:22:19.998 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(301), op: ExtentClose { repair_id: ReconciliationId(301), extent_id: 28 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42000 Sep 22 23:22:19.998 INFO [1] client ExtentClose { repair_id: ReconciliationId(301), extent_id: 28 }
42001 Sep 22 23:22:19.998 INFO [2] received reconcile message
42002 Sep 22 23:22:19.998 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(301), op: ExtentClose { repair_id: ReconciliationId(301), extent_id: 28 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42003 Sep 22 23:22:19.998 INFO [2] client ExtentClose { repair_id: ReconciliationId(301), extent_id: 28 }
42004 Sep 22 23:22:19.999 DEBG 301 Close extent 28
42005 Sep 22 23:22:19.999 DEBG 301 Close extent 28
42006 Sep 22 23:22:19.999 DEBG 301 Close extent 28
42007 Sep 22 23:22:19.999 DEBG [2] It's time to notify for 301
42008 Sep 22 23:22:20.000 INFO Completion from [2] id:301 status:true
42009 Sep 22 23:22:20.000 INFO [302/752] Repair commands completed
42010 Sep 22 23:22:20.000 INFO Pop front: ReconcileIO { id: ReconciliationId(302), op: ExtentRepair { repair_id: ReconciliationId(302), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42011 Sep 22 23:22:20.000 INFO Sent repair work, now wait for resp
42012 Sep 22 23:22:20.000 INFO [0] received reconcile message
42013 Sep 22 23:22:20.000 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(302), op: ExtentRepair { repair_id: ReconciliationId(302), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42014 Sep 22 23:22:20.000 INFO [0] client ExtentRepair { repair_id: ReconciliationId(302), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42015 Sep 22 23:22:20.000 INFO [0] Sending repair request ReconciliationId(302)
42016 Sep 22 23:22:20.000 INFO [1] received reconcile message
42017 Sep 22 23:22:20.000 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(302), op: ExtentRepair { repair_id: ReconciliationId(302), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42018 Sep 22 23:22:20.000 INFO [1] client ExtentRepair { repair_id: ReconciliationId(302), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42019 Sep 22 23:22:20.000 INFO [1] No action required ReconciliationId(302)
42020 Sep 22 23:22:20.000 INFO [2] received reconcile message
42021 Sep 22 23:22:20.000 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(302), op: ExtentRepair { repair_id: ReconciliationId(302), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42022 Sep 22 23:22:20.000 INFO [2] client ExtentRepair { repair_id: ReconciliationId(302), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42023 Sep 22 23:22:20.000 INFO [2] No action required ReconciliationId(302)
42024 Sep 22 23:22:20.000 DEBG 302 Repair extent 28 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
42025 Sep 22 23:22:20.000 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/01C.copy"
42026 Sep 22 23:22:20.066 INFO accepted connection, remote_addr: 127.0.0.1:58112, local_addr: 127.0.0.1:52864, task: repair
42027 Sep 22 23:22:20.067 TRCE incoming request, uri: /extent/28/files, method: GET, req_id: fd5c7a80-db50-4bae-bb66-709a50ef7f23, remote_addr: 127.0.0.1:58112, local_addr: 127.0.0.1:52864, task: repair
42028 Sep 22 23:22:20.067 INFO request completed, latency_us: 266, response_code: 200, uri: /extent/28/files, method: GET, req_id: fd5c7a80-db50-4bae-bb66-709a50ef7f23, remote_addr: 127.0.0.1:58112, local_addr: 127.0.0.1:52864, task: repair
42029 Sep 22 23:22:20.067 INFO eid:28 Found repair files: ["01C", "01C.db"]
42030 Sep 22 23:22:20.068 TRCE incoming request, uri: /newextent/28/data, method: GET, req_id: 234fbddd-f924-41f2-abc3-901b792ba330, remote_addr: 127.0.0.1:58112, local_addr: 127.0.0.1:52864, task: repair
42031 Sep 22 23:22:20.068 INFO request completed, latency_us: 344, response_code: 200, uri: /newextent/28/data, method: GET, req_id: 234fbddd-f924-41f2-abc3-901b792ba330, remote_addr: 127.0.0.1:58112, local_addr: 127.0.0.1:52864, task: repair
42032 Sep 22 23:22:20.073 TRCE incoming request, uri: /newextent/28/db, method: GET, req_id: 14f6e5e4-e941-4f1c-890b-a73e984ddb46, remote_addr: 127.0.0.1:58112, local_addr: 127.0.0.1:52864, task: repair
42033 Sep 22 23:22:20.073 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/28/db, method: GET, req_id: 14f6e5e4-e941-4f1c-890b-a73e984ddb46, remote_addr: 127.0.0.1:58112, local_addr: 127.0.0.1:52864, task: repair
42034 Sep 22 23:22:20.074 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/01C.copy" to "/tmp/downstairs-zrMnlo6G/00/000/01C.replace"
42035 Sep 22 23:22:20.074 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42036 Sep 22 23:22:20.075 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/01C.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
42037 Sep 22 23:22:20.075 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/01C"
42038 Sep 22 23:22:20.075 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/01C.db"
42039 Sep 22 23:22:20.075 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42040 Sep 22 23:22:20.075 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/01C.replace" to "/tmp/downstairs-zrMnlo6G/00/000/01C.completed"
42041 Sep 22 23:22:20.075 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42042 Sep 22 23:22:20.075 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42043 Sep 22 23:22:20.076 DEBG [0] It's time to notify for 302
42044 Sep 22 23:22:20.076 INFO Completion from [0] id:302 status:true
42045 Sep 22 23:22:20.076 INFO [303/752] Repair commands completed
42046 Sep 22 23:22:20.076 INFO Pop front: ReconcileIO { id: ReconciliationId(303), op: ExtentReopen { repair_id: ReconciliationId(303), extent_id: 28 }, state: ClientData([New, New, New]) }
42047 Sep 22 23:22:20.076 INFO Sent repair work, now wait for resp
42048 Sep 22 23:22:20.076 INFO [0] received reconcile message
42049 Sep 22 23:22:20.076 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(303), op: ExtentReopen { repair_id: ReconciliationId(303), extent_id: 28 }, state: ClientData([InProgress, New, New]) }, : downstairs
42050 Sep 22 23:22:20.076 INFO [0] client ExtentReopen { repair_id: ReconciliationId(303), extent_id: 28 }
42051 Sep 22 23:22:20.076 INFO [1] received reconcile message
42052 Sep 22 23:22:20.076 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(303), op: ExtentReopen { repair_id: ReconciliationId(303), extent_id: 28 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42053 Sep 22 23:22:20.076 INFO [1] client ExtentReopen { repair_id: ReconciliationId(303), extent_id: 28 }
42054 Sep 22 23:22:20.076 INFO [2] received reconcile message
42055 Sep 22 23:22:20.076 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(303), op: ExtentReopen { repair_id: ReconciliationId(303), extent_id: 28 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42056 Sep 22 23:22:20.076 INFO [2] client ExtentReopen { repair_id: ReconciliationId(303), extent_id: 28 }
42057 Sep 22 23:22:20.076 DEBG 303 Reopen extent 28
42058 Sep 22 23:22:20.077 DEBG 303 Reopen extent 28
42059 Sep 22 23:22:20.077 DEBG 303 Reopen extent 28
42060 Sep 22 23:22:20.078 DEBG [2] It's time to notify for 303
42061 Sep 22 23:22:20.078 INFO Completion from [2] id:303 status:true
42062 Sep 22 23:22:20.078 INFO [304/752] Repair commands completed
42063 Sep 22 23:22:20.078 INFO Pop front: ReconcileIO { id: ReconciliationId(304), op: ExtentFlush { repair_id: ReconciliationId(304), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42064 Sep 22 23:22:20.078 INFO Sent repair work, now wait for resp
42065 Sep 22 23:22:20.078 INFO [0] received reconcile message
42066 Sep 22 23:22:20.078 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(304), op: ExtentFlush { repair_id: ReconciliationId(304), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42067 Sep 22 23:22:20.078 INFO [0] client ExtentFlush { repair_id: ReconciliationId(304), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42068 Sep 22 23:22:20.078 INFO [1] received reconcile message
42069 Sep 22 23:22:20.078 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(304), op: ExtentFlush { repair_id: ReconciliationId(304), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42070 Sep 22 23:22:20.078 INFO [1] client ExtentFlush { repair_id: ReconciliationId(304), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42071 Sep 22 23:22:20.078 INFO [2] received reconcile message
42072 Sep 22 23:22:20.078 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(304), op: ExtentFlush { repair_id: ReconciliationId(304), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42073 Sep 22 23:22:20.078 INFO [2] client ExtentFlush { repair_id: ReconciliationId(304), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42074 Sep 22 23:22:20.079 DEBG 304 Flush extent 49 with f:2 g:2
42075 Sep 22 23:22:20.079 DEBG Flush just extent 49 with f:2 and g:2
42076 Sep 22 23:22:20.079 DEBG [1] It's time to notify for 304
42077 Sep 22 23:22:20.079 INFO Completion from [1] id:304 status:true
42078 Sep 22 23:22:20.079 INFO [305/752] Repair commands completed
42079 Sep 22 23:22:20.079 INFO Pop front: ReconcileIO { id: ReconciliationId(305), op: ExtentClose { repair_id: ReconciliationId(305), extent_id: 49 }, state: ClientData([New, New, New]) }
42080 Sep 22 23:22:20.079 INFO Sent repair work, now wait for resp
42081 Sep 22 23:22:20.079 INFO [0] received reconcile message
42082 Sep 22 23:22:20.079 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(305), op: ExtentClose { repair_id: ReconciliationId(305), extent_id: 49 }, state: ClientData([InProgress, New, New]) }, : downstairs
42083 Sep 22 23:22:20.079 INFO [0] client ExtentClose { repair_id: ReconciliationId(305), extent_id: 49 }
42084 Sep 22 23:22:20.079 INFO [1] received reconcile message
42085 Sep 22 23:22:20.079 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(305), op: ExtentClose { repair_id: ReconciliationId(305), extent_id: 49 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42086 Sep 22 23:22:20.079 INFO [1] client ExtentClose { repair_id: ReconciliationId(305), extent_id: 49 }
42087 Sep 22 23:22:20.079 INFO [2] received reconcile message
42088 Sep 22 23:22:20.079 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(305), op: ExtentClose { repair_id: ReconciliationId(305), extent_id: 49 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42089 Sep 22 23:22:20.079 INFO [2] client ExtentClose { repair_id: ReconciliationId(305), extent_id: 49 }
42090 Sep 22 23:22:20.079 DEBG 305 Close extent 49
42091 Sep 22 23:22:20.079 DEBG 305 Close extent 49
42092 Sep 22 23:22:20.080 DEBG 305 Close extent 49
42093 Sep 22 23:22:20.080 DEBG [2] It's time to notify for 305
42094 Sep 22 23:22:20.080 INFO Completion from [2] id:305 status:true
42095 Sep 22 23:22:20.080 INFO [306/752] Repair commands completed
42096 Sep 22 23:22:20.080 INFO Pop front: ReconcileIO { id: ReconciliationId(306), op: ExtentRepair { repair_id: ReconciliationId(306), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42097 Sep 22 23:22:20.080 INFO Sent repair work, now wait for resp
42098 Sep 22 23:22:20.080 INFO [0] received reconcile message
42099 Sep 22 23:22:20.080 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(306), op: ExtentRepair { repair_id: ReconciliationId(306), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42100 Sep 22 23:22:20.080 INFO [0] client ExtentRepair { repair_id: ReconciliationId(306), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42101 Sep 22 23:22:20.080 INFO [0] Sending repair request ReconciliationId(306)
42102 Sep 22 23:22:20.080 INFO [1] received reconcile message
42103 Sep 22 23:22:20.080 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(306), op: ExtentRepair { repair_id: ReconciliationId(306), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42104 Sep 22 23:22:20.080 INFO [1] client ExtentRepair { repair_id: ReconciliationId(306), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42105 Sep 22 23:22:20.080 INFO [1] No action required ReconciliationId(306)
42106 Sep 22 23:22:20.080 INFO [2] received reconcile message
42107 Sep 22 23:22:20.080 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(306), op: ExtentRepair { repair_id: ReconciliationId(306), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42108 Sep 22 23:22:20.080 INFO [2] client ExtentRepair { repair_id: ReconciliationId(306), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42109 Sep 22 23:22:20.081 INFO [2] No action required ReconciliationId(306)
42110 Sep 22 23:22:20.081 DEBG 306 Repair extent 49 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
42111 Sep 22 23:22:20.081 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/031.copy"
42112 Sep 22 23:22:20.145 INFO accepted connection, remote_addr: 127.0.0.1:43274, local_addr: 127.0.0.1:52864, task: repair
42113 Sep 22 23:22:20.146 TRCE incoming request, uri: /extent/49/files, method: GET, req_id: 31305576-2012-450d-9de0-fad4a24c3c0a, remote_addr: 127.0.0.1:43274, local_addr: 127.0.0.1:52864, task: repair
42114 Sep 22 23:22:20.146 INFO request completed, latency_us: 305, response_code: 200, uri: /extent/49/files, method: GET, req_id: 31305576-2012-450d-9de0-fad4a24c3c0a, remote_addr: 127.0.0.1:43274, local_addr: 127.0.0.1:52864, task: repair
42115 Sep 22 23:22:20.146 INFO eid:49 Found repair files: ["031", "031.db"]
42116 Sep 22 23:22:20.147 TRCE incoming request, uri: /newextent/49/data, method: GET, req_id: 7d0b649d-37b4-44f0-87b8-6af1cec1a299, remote_addr: 127.0.0.1:43274, local_addr: 127.0.0.1:52864, task: repair
42117 Sep 22 23:22:20.147 INFO request completed, latency_us: 345, response_code: 200, uri: /newextent/49/data, method: GET, req_id: 7d0b649d-37b4-44f0-87b8-6af1cec1a299, remote_addr: 127.0.0.1:43274, local_addr: 127.0.0.1:52864, task: repair
42118 Sep 22 23:22:20.152 TRCE incoming request, uri: /newextent/49/db, method: GET, req_id: 4c85e619-2c09-4f99-aeca-6523e1d273d4, remote_addr: 127.0.0.1:43274, local_addr: 127.0.0.1:52864, task: repair
42119 Sep 22 23:22:20.152 INFO request completed, latency_us: 290, response_code: 200, uri: /newextent/49/db, method: GET, req_id: 4c85e619-2c09-4f99-aeca-6523e1d273d4, remote_addr: 127.0.0.1:43274, local_addr: 127.0.0.1:52864, task: repair
42120 Sep 22 23:22:20.153 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/031.copy" to "/tmp/downstairs-zrMnlo6G/00/000/031.replace"
42121 Sep 22 23:22:20.153 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42122 Sep 22 23:22:20.154 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/031.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
42123 Sep 22 23:22:20.155 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/031"
42124 Sep 22 23:22:20.155 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/031.db"
42125 Sep 22 23:22:20.155 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42126 Sep 22 23:22:20.155 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/031.replace" to "/tmp/downstairs-zrMnlo6G/00/000/031.completed"
42127 Sep 22 23:22:20.155 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42128 Sep 22 23:22:20.155 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42129 Sep 22 23:22:20.155 DEBG [0] It's time to notify for 306
42130 Sep 22 23:22:20.155 INFO Completion from [0] id:306 status:true
42131 Sep 22 23:22:20.155 INFO [307/752] Repair commands completed
42132 Sep 22 23:22:20.155 INFO Pop front: ReconcileIO { id: ReconciliationId(307), op: ExtentReopen { repair_id: ReconciliationId(307), extent_id: 49 }, state: ClientData([New, New, New]) }
42133 Sep 22 23:22:20.155 INFO Sent repair work, now wait for resp
42134 Sep 22 23:22:20.155 INFO [0] received reconcile message
42135 Sep 22 23:22:20.155 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(307), op: ExtentReopen { repair_id: ReconciliationId(307), extent_id: 49 }, state: ClientData([InProgress, New, New]) }, : downstairs
42136 Sep 22 23:22:20.155 INFO [0] client ExtentReopen { repair_id: ReconciliationId(307), extent_id: 49 }
42137 Sep 22 23:22:20.155 INFO [1] received reconcile message
42138 Sep 22 23:22:20.155 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(307), op: ExtentReopen { repair_id: ReconciliationId(307), extent_id: 49 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42139 Sep 22 23:22:20.155 INFO [1] client ExtentReopen { repair_id: ReconciliationId(307), extent_id: 49 }
42140 Sep 22 23:22:20.155 INFO [2] received reconcile message
42141 Sep 22 23:22:20.155 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(307), op: ExtentReopen { repair_id: ReconciliationId(307), extent_id: 49 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42142 Sep 22 23:22:20.155 INFO [2] client ExtentReopen { repair_id: ReconciliationId(307), extent_id: 49 }
42143 Sep 22 23:22:20.156 DEBG 307 Reopen extent 49
42144 Sep 22 23:22:20.156 DEBG 307 Reopen extent 49
42145 Sep 22 23:22:20.157 DEBG 307 Reopen extent 49
42146 Sep 22 23:22:20.157 DEBG [2] It's time to notify for 307
42147 Sep 22 23:22:20.157 INFO Completion from [2] id:307 status:true
42148 Sep 22 23:22:20.157 INFO [308/752] Repair commands completed
42149 Sep 22 23:22:20.157 INFO Pop front: ReconcileIO { id: ReconciliationId(308), op: ExtentFlush { repair_id: ReconciliationId(308), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42150 Sep 22 23:22:20.157 INFO Sent repair work, now wait for resp
42151 Sep 22 23:22:20.157 INFO [0] received reconcile message
42152 Sep 22 23:22:20.157 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(308), op: ExtentFlush { repair_id: ReconciliationId(308), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42153 Sep 22 23:22:20.157 INFO [0] client ExtentFlush { repair_id: ReconciliationId(308), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42154 Sep 22 23:22:20.158 INFO [1] received reconcile message
42155 Sep 22 23:22:20.158 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(308), op: ExtentFlush { repair_id: ReconciliationId(308), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42156 Sep 22 23:22:20.158 INFO [1] client ExtentFlush { repair_id: ReconciliationId(308), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42157 Sep 22 23:22:20.158 INFO [2] received reconcile message
42158 Sep 22 23:22:20.158 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(308), op: ExtentFlush { repair_id: ReconciliationId(308), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42159 Sep 22 23:22:20.158 INFO [2] client ExtentFlush { repair_id: ReconciliationId(308), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42160 Sep 22 23:22:20.158 DEBG 308 Flush extent 15 with f:2 g:2
42161 Sep 22 23:22:20.158 DEBG Flush just extent 15 with f:2 and g:2
42162 Sep 22 23:22:20.158 DEBG [1] It's time to notify for 308
42163 Sep 22 23:22:20.158 INFO Completion from [1] id:308 status:true
42164 Sep 22 23:22:20.158 INFO [309/752] Repair commands completed
42165 Sep 22 23:22:20.158 INFO Pop front: ReconcileIO { id: ReconciliationId(309), op: ExtentClose { repair_id: ReconciliationId(309), extent_id: 15 }, state: ClientData([New, New, New]) }
42166 Sep 22 23:22:20.158 INFO Sent repair work, now wait for resp
42167 Sep 22 23:22:20.158 INFO [0] received reconcile message
42168 Sep 22 23:22:20.158 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(309), op: ExtentClose { repair_id: ReconciliationId(309), extent_id: 15 }, state: ClientData([InProgress, New, New]) }, : downstairs
42169 Sep 22 23:22:20.158 INFO [0] client ExtentClose { repair_id: ReconciliationId(309), extent_id: 15 }
42170 Sep 22 23:22:20.158 INFO [1] received reconcile message
42171 Sep 22 23:22:20.158 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(309), op: ExtentClose { repair_id: ReconciliationId(309), extent_id: 15 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42172 Sep 22 23:22:20.158 INFO [1] client ExtentClose { repair_id: ReconciliationId(309), extent_id: 15 }
42173 Sep 22 23:22:20.158 INFO [2] received reconcile message
42174 Sep 22 23:22:20.158 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(309), op: ExtentClose { repair_id: ReconciliationId(309), extent_id: 15 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42175 Sep 22 23:22:20.158 INFO [2] client ExtentClose { repair_id: ReconciliationId(309), extent_id: 15 }
42176 Sep 22 23:22:20.158 DEBG 309 Close extent 15
42177 Sep 22 23:22:20.159 DEBG 309 Close extent 15
42178 Sep 22 23:22:20.159 DEBG 309 Close extent 15
42179 Sep 22 23:22:20.159 DEBG [2] It's time to notify for 309
42180 Sep 22 23:22:20.159 INFO Completion from [2] id:309 status:true
42181 Sep 22 23:22:20.159 INFO [310/752] Repair commands completed
42182 Sep 22 23:22:20.159 INFO Pop front: ReconcileIO { id: ReconciliationId(310), op: ExtentRepair { repair_id: ReconciliationId(310), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42183 Sep 22 23:22:20.159 INFO Sent repair work, now wait for resp
42184 Sep 22 23:22:20.159 INFO [0] received reconcile message
42185 Sep 22 23:22:20.159 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(310), op: ExtentRepair { repair_id: ReconciliationId(310), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42186 Sep 22 23:22:20.159 INFO [0] client ExtentRepair { repair_id: ReconciliationId(310), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42187 Sep 22 23:22:20.159 INFO [0] Sending repair request ReconciliationId(310)
42188 Sep 22 23:22:20.160 INFO [1] received reconcile message
42189 Sep 22 23:22:20.160 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(310), op: ExtentRepair { repair_id: ReconciliationId(310), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42190 Sep 22 23:22:20.160 INFO [1] client ExtentRepair { repair_id: ReconciliationId(310), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42191 Sep 22 23:22:20.160 INFO [1] No action required ReconciliationId(310)
42192 Sep 22 23:22:20.160 INFO [2] received reconcile message
42193 Sep 22 23:22:20.160 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(310), op: ExtentRepair { repair_id: ReconciliationId(310), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42194 Sep 22 23:22:20.160 INFO [2] client ExtentRepair { repair_id: ReconciliationId(310), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42195 Sep 22 23:22:20.160 INFO [2] No action required ReconciliationId(310)
42196 Sep 22 23:22:20.160 DEBG 310 Repair extent 15 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
42197 Sep 22 23:22:20.160 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/00F.copy"
42198 Sep 22 23:22:20.225 INFO accepted connection, remote_addr: 127.0.0.1:60684, local_addr: 127.0.0.1:52864, task: repair
42199 Sep 22 23:22:20.225 TRCE incoming request, uri: /extent/15/files, method: GET, req_id: 801a47ed-e4f0-481a-8572-f0c327ff4d0d, remote_addr: 127.0.0.1:60684, local_addr: 127.0.0.1:52864, task: repair
42200 Sep 22 23:22:20.225 INFO request completed, latency_us: 215, response_code: 200, uri: /extent/15/files, method: GET, req_id: 801a47ed-e4f0-481a-8572-f0c327ff4d0d, remote_addr: 127.0.0.1:60684, local_addr: 127.0.0.1:52864, task: repair
42201 Sep 22 23:22:20.226 INFO eid:15 Found repair files: ["00F", "00F.db"]
42202 Sep 22 23:22:20.226 TRCE incoming request, uri: /newextent/15/data, method: GET, req_id: 76fbd9bd-49a8-4894-8c72-f1f95c2417b7, remote_addr: 127.0.0.1:60684, local_addr: 127.0.0.1:52864, task: repair
42203 Sep 22 23:22:20.226 INFO request completed, latency_us: 318, response_code: 200, uri: /newextent/15/data, method: GET, req_id: 76fbd9bd-49a8-4894-8c72-f1f95c2417b7, remote_addr: 127.0.0.1:60684, local_addr: 127.0.0.1:52864, task: repair
42204 Sep 22 23:22:20.231 TRCE incoming request, uri: /newextent/15/db, method: GET, req_id: 8ac1cadf-680f-4a97-bac5-008e2de460aa, remote_addr: 127.0.0.1:60684, local_addr: 127.0.0.1:52864, task: repair
42205 Sep 22 23:22:20.232 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/15/db, method: GET, req_id: 8ac1cadf-680f-4a97-bac5-008e2de460aa, remote_addr: 127.0.0.1:60684, local_addr: 127.0.0.1:52864, task: repair
42206 Sep 22 23:22:20.233 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/00F.copy" to "/tmp/downstairs-zrMnlo6G/00/000/00F.replace"
42207 Sep 22 23:22:20.233 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42208 Sep 22 23:22:20.233 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/00F.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
42209 Sep 22 23:22:20.234 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/00F"
42210 Sep 22 23:22:20.234 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/00F.db"
42211 Sep 22 23:22:20.234 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42212 Sep 22 23:22:20.234 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/00F.replace" to "/tmp/downstairs-zrMnlo6G/00/000/00F.completed"
42213 Sep 22 23:22:20.234 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42214 Sep 22 23:22:20.234 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42215 Sep 22 23:22:20.234 DEBG [0] It's time to notify for 310
42216 Sep 22 23:22:20.234 INFO Completion from [0] id:310 status:true
42217 Sep 22 23:22:20.234 INFO [311/752] Repair commands completed
42218 Sep 22 23:22:20.234 INFO Pop front: ReconcileIO { id: ReconciliationId(311), op: ExtentReopen { repair_id: ReconciliationId(311), extent_id: 15 }, state: ClientData([New, New, New]) }
42219 Sep 22 23:22:20.234 INFO Sent repair work, now wait for resp
42220 Sep 22 23:22:20.234 INFO [0] received reconcile message
42221 Sep 22 23:22:20.234 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(311), op: ExtentReopen { repair_id: ReconciliationId(311), extent_id: 15 }, state: ClientData([InProgress, New, New]) }, : downstairs
42222 Sep 22 23:22:20.234 INFO [0] client ExtentReopen { repair_id: ReconciliationId(311), extent_id: 15 }
42223 Sep 22 23:22:20.234 INFO [1] received reconcile message
42224 Sep 22 23:22:20.234 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(311), op: ExtentReopen { repair_id: ReconciliationId(311), extent_id: 15 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42225 Sep 22 23:22:20.234 INFO [1] client ExtentReopen { repair_id: ReconciliationId(311), extent_id: 15 }
42226 Sep 22 23:22:20.234 INFO [2] received reconcile message
42227 Sep 22 23:22:20.234 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(311), op: ExtentReopen { repair_id: ReconciliationId(311), extent_id: 15 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42228 Sep 22 23:22:20.234 INFO [2] client ExtentReopen { repair_id: ReconciliationId(311), extent_id: 15 }
42229 Sep 22 23:22:20.235 DEBG 311 Reopen extent 15
42230 Sep 22 23:22:20.235 DEBG 311 Reopen extent 15
42231 Sep 22 23:22:20.236 DEBG 311 Reopen extent 15
42232 Sep 22 23:22:20.236 DEBG [2] It's time to notify for 311
42233 Sep 22 23:22:20.236 INFO Completion from [2] id:311 status:true
42234 Sep 22 23:22:20.236 INFO [312/752] Repair commands completed
42235 Sep 22 23:22:20.236 INFO Pop front: ReconcileIO { id: ReconciliationId(312), op: ExtentFlush { repair_id: ReconciliationId(312), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42236 Sep 22 23:22:20.236 INFO Sent repair work, now wait for resp
42237 Sep 22 23:22:20.236 INFO [0] received reconcile message
42238 Sep 22 23:22:20.236 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(312), op: ExtentFlush { repair_id: ReconciliationId(312), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42239 Sep 22 23:22:20.236 INFO [0] client ExtentFlush { repair_id: ReconciliationId(312), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42240 Sep 22 23:22:20.236 INFO [1] received reconcile message
42241 Sep 22 23:22:20.237 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(312), op: ExtentFlush { repair_id: ReconciliationId(312), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42242 Sep 22 23:22:20.237 INFO [1] client ExtentFlush { repair_id: ReconciliationId(312), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42243 Sep 22 23:22:20.237 INFO [2] received reconcile message
42244 Sep 22 23:22:20.237 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(312), op: ExtentFlush { repair_id: ReconciliationId(312), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42245 Sep 22 23:22:20.237 INFO [2] client ExtentFlush { repair_id: ReconciliationId(312), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42246 Sep 22 23:22:20.237 DEBG 312 Flush extent 43 with f:2 g:2
42247 Sep 22 23:22:20.237 DEBG Flush just extent 43 with f:2 and g:2
42248 Sep 22 23:22:20.237 DEBG [1] It's time to notify for 312
42249 Sep 22 23:22:20.237 INFO Completion from [1] id:312 status:true
42250 Sep 22 23:22:20.237 INFO [313/752] Repair commands completed
42251 Sep 22 23:22:20.237 INFO Pop front: ReconcileIO { id: ReconciliationId(313), op: ExtentClose { repair_id: ReconciliationId(313), extent_id: 43 }, state: ClientData([New, New, New]) }
42252 Sep 22 23:22:20.237 INFO Sent repair work, now wait for resp
42253 Sep 22 23:22:20.237 INFO [0] received reconcile message
42254 Sep 22 23:22:20.237 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(313), op: ExtentClose { repair_id: ReconciliationId(313), extent_id: 43 }, state: ClientData([InProgress, New, New]) }, : downstairs
42255 Sep 22 23:22:20.237 INFO [0] client ExtentClose { repair_id: ReconciliationId(313), extent_id: 43 }
42256 Sep 22 23:22:20.237 INFO [1] received reconcile message
42257 Sep 22 23:22:20.237 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(313), op: ExtentClose { repair_id: ReconciliationId(313), extent_id: 43 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42258 Sep 22 23:22:20.237 INFO [1] client ExtentClose { repair_id: ReconciliationId(313), extent_id: 43 }
42259 Sep 22 23:22:20.237 INFO [2] received reconcile message
42260 Sep 22 23:22:20.237 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(313), op: ExtentClose { repair_id: ReconciliationId(313), extent_id: 43 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42261 Sep 22 23:22:20.237 INFO [2] client ExtentClose { repair_id: ReconciliationId(313), extent_id: 43 }
42262 Sep 22 23:22:20.237 DEBG 313 Close extent 43
42263 Sep 22 23:22:20.238 DEBG 313 Close extent 43
42264 Sep 22 23:22:20.238 DEBG 313 Close extent 43
42265 Sep 22 23:22:20.238 DEBG [2] It's time to notify for 313
42266 Sep 22 23:22:20.238 INFO Completion from [2] id:313 status:true
42267 Sep 22 23:22:20.238 INFO [314/752] Repair commands completed
42268 Sep 22 23:22:20.238 INFO Pop front: ReconcileIO { id: ReconciliationId(314), op: ExtentRepair { repair_id: ReconciliationId(314), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42269 Sep 22 23:22:20.238 INFO Sent repair work, now wait for resp
42270 Sep 22 23:22:20.238 INFO [0] received reconcile message
42271 Sep 22 23:22:20.238 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(314), op: ExtentRepair { repair_id: ReconciliationId(314), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42272 Sep 22 23:22:20.238 INFO [0] client ExtentRepair { repair_id: ReconciliationId(314), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42273 Sep 22 23:22:20.238 INFO [0] Sending repair request ReconciliationId(314)
42274 Sep 22 23:22:20.238 INFO [1] received reconcile message
42275 Sep 22 23:22:20.239 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(314), op: ExtentRepair { repair_id: ReconciliationId(314), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42276 Sep 22 23:22:20.239 INFO [1] client ExtentRepair { repair_id: ReconciliationId(314), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42277 Sep 22 23:22:20.239 INFO [1] No action required ReconciliationId(314)
42278 Sep 22 23:22:20.239 INFO [2] received reconcile message
42279 Sep 22 23:22:20.239 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(314), op: ExtentRepair { repair_id: ReconciliationId(314), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42280 Sep 22 23:22:20.239 INFO [2] client ExtentRepair { repair_id: ReconciliationId(314), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42281 Sep 22 23:22:20.239 INFO [2] No action required ReconciliationId(314)
42282 Sep 22 23:22:20.239 DEBG 314 Repair extent 43 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
42283 Sep 22 23:22:20.239 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/02B.copy"
42284 Sep 22 23:22:20.302 INFO accepted connection, remote_addr: 127.0.0.1:42156, local_addr: 127.0.0.1:52864, task: repair
42285 Sep 22 23:22:20.302 TRCE incoming request, uri: /extent/43/files, method: GET, req_id: 3b91bffe-4b28-48d9-abe6-9c8f5c4032d8, remote_addr: 127.0.0.1:42156, local_addr: 127.0.0.1:52864, task: repair
42286 Sep 22 23:22:20.302 INFO request completed, latency_us: 193, response_code: 200, uri: /extent/43/files, method: GET, req_id: 3b91bffe-4b28-48d9-abe6-9c8f5c4032d8, remote_addr: 127.0.0.1:42156, local_addr: 127.0.0.1:52864, task: repair
42287 Sep 22 23:22:20.302 INFO eid:43 Found repair files: ["02B", "02B.db"]
42288 Sep 22 23:22:20.302 TRCE incoming request, uri: /newextent/43/data, method: GET, req_id: d9f00ab3-0a1d-4684-954e-f87930771d5d, remote_addr: 127.0.0.1:42156, local_addr: 127.0.0.1:52864, task: repair
42289 Sep 22 23:22:20.303 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/43/data, method: GET, req_id: d9f00ab3-0a1d-4684-954e-f87930771d5d, remote_addr: 127.0.0.1:42156, local_addr: 127.0.0.1:52864, task: repair
42290 Sep 22 23:22:20.308 TRCE incoming request, uri: /newextent/43/db, method: GET, req_id: 61d2aecb-9186-4e54-bc26-461522dee8de, remote_addr: 127.0.0.1:42156, local_addr: 127.0.0.1:52864, task: repair
42291 Sep 22 23:22:20.308 INFO request completed, latency_us: 288, response_code: 200, uri: /newextent/43/db, method: GET, req_id: 61d2aecb-9186-4e54-bc26-461522dee8de, remote_addr: 127.0.0.1:42156, local_addr: 127.0.0.1:52864, task: repair
42292 Sep 22 23:22:20.309 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/02B.copy" to "/tmp/downstairs-zrMnlo6G/00/000/02B.replace"
42293 Sep 22 23:22:20.309 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42294 Sep 22 23:22:20.310 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/02B.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
42295 Sep 22 23:22:20.310 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/02B"
42296 Sep 22 23:22:20.310 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/02B.db"
42297 Sep 22 23:22:20.310 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42298 Sep 22 23:22:20.310 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/02B.replace" to "/tmp/downstairs-zrMnlo6G/00/000/02B.completed"
42299 Sep 22 23:22:20.310 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42300 Sep 22 23:22:20.310 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42301 Sep 22 23:22:20.311 DEBG [0] It's time to notify for 314
42302 Sep 22 23:22:20.311 INFO Completion from [0] id:314 status:true
42303 Sep 22 23:22:20.311 INFO [315/752] Repair commands completed
42304 Sep 22 23:22:20.311 INFO Pop front: ReconcileIO { id: ReconciliationId(315), op: ExtentReopen { repair_id: ReconciliationId(315), extent_id: 43 }, state: ClientData([New, New, New]) }
42305 Sep 22 23:22:20.311 INFO Sent repair work, now wait for resp
42306 Sep 22 23:22:20.311 INFO [0] received reconcile message
42307 Sep 22 23:22:20.311 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(315), op: ExtentReopen { repair_id: ReconciliationId(315), extent_id: 43 }, state: ClientData([InProgress, New, New]) }, : downstairs
42308 Sep 22 23:22:20.311 INFO [0] client ExtentReopen { repair_id: ReconciliationId(315), extent_id: 43 }
42309 Sep 22 23:22:20.311 INFO [1] received reconcile message
42310 Sep 22 23:22:20.311 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(315), op: ExtentReopen { repair_id: ReconciliationId(315), extent_id: 43 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42311 Sep 22 23:22:20.311 INFO [1] client ExtentReopen { repair_id: ReconciliationId(315), extent_id: 43 }
42312 Sep 22 23:22:20.311 INFO [2] received reconcile message
42313 Sep 22 23:22:20.311 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(315), op: ExtentReopen { repair_id: ReconciliationId(315), extent_id: 43 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42314 Sep 22 23:22:20.311 INFO [2] client ExtentReopen { repair_id: ReconciliationId(315), extent_id: 43 }
42315 Sep 22 23:22:20.311 DEBG 315 Reopen extent 43
42316 Sep 22 23:22:20.312 DEBG 315 Reopen extent 43
42317 Sep 22 23:22:20.312 DEBG 315 Reopen extent 43
42318 Sep 22 23:22:20.313 DEBG [2] It's time to notify for 315
42319 Sep 22 23:22:20.313 INFO Completion from [2] id:315 status:true
42320 Sep 22 23:22:20.313 INFO [316/752] Repair commands completed
42321 Sep 22 23:22:20.313 INFO Pop front: ReconcileIO { id: ReconciliationId(316), op: ExtentFlush { repair_id: ReconciliationId(316), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42322 Sep 22 23:22:20.313 INFO Sent repair work, now wait for resp
42323 Sep 22 23:22:20.313 INFO [0] received reconcile message
42324 Sep 22 23:22:20.313 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(316), op: ExtentFlush { repair_id: ReconciliationId(316), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42325 Sep 22 23:22:20.313 INFO [0] client ExtentFlush { repair_id: ReconciliationId(316), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42326 Sep 22 23:22:20.313 INFO [1] received reconcile message
42327 Sep 22 23:22:20.313 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(316), op: ExtentFlush { repair_id: ReconciliationId(316), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42328 Sep 22 23:22:20.313 INFO [1] client ExtentFlush { repair_id: ReconciliationId(316), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42329 Sep 22 23:22:20.313 INFO [2] received reconcile message
42330 Sep 22 23:22:20.313 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(316), op: ExtentFlush { repair_id: ReconciliationId(316), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42331 Sep 22 23:22:20.313 INFO [2] client ExtentFlush { repair_id: ReconciliationId(316), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42332 Sep 22 23:22:20.313 DEBG 316 Flush extent 60 with f:2 g:2
42333 Sep 22 23:22:20.313 DEBG Flush just extent 60 with f:2 and g:2
42334 Sep 22 23:22:20.314 DEBG [1] It's time to notify for 316
42335 Sep 22 23:22:20.314 INFO Completion from [1] id:316 status:true
42336 Sep 22 23:22:20.314 INFO [317/752] Repair commands completed
42337 Sep 22 23:22:20.314 INFO Pop front: ReconcileIO { id: ReconciliationId(317), op: ExtentClose { repair_id: ReconciliationId(317), extent_id: 60 }, state: ClientData([New, New, New]) }
42338 Sep 22 23:22:20.314 INFO Sent repair work, now wait for resp
42339 Sep 22 23:22:20.314 INFO [0] received reconcile message
42340 Sep 22 23:22:20.314 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(317), op: ExtentClose { repair_id: ReconciliationId(317), extent_id: 60 }, state: ClientData([InProgress, New, New]) }, : downstairs
42341 Sep 22 23:22:20.314 INFO [0] client ExtentClose { repair_id: ReconciliationId(317), extent_id: 60 }
42342 Sep 22 23:22:20.314 INFO [1] received reconcile message
42343 Sep 22 23:22:20.314 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(317), op: ExtentClose { repair_id: ReconciliationId(317), extent_id: 60 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42344 Sep 22 23:22:20.314 INFO [1] client ExtentClose { repair_id: ReconciliationId(317), extent_id: 60 }
42345 Sep 22 23:22:20.314 INFO [2] received reconcile message
42346 Sep 22 23:22:20.314 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(317), op: ExtentClose { repair_id: ReconciliationId(317), extent_id: 60 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42347 Sep 22 23:22:20.314 INFO [2] client ExtentClose { repair_id: ReconciliationId(317), extent_id: 60 }
42348 Sep 22 23:22:20.314 DEBG 317 Close extent 60
42349 Sep 22 23:22:20.314 DEBG 317 Close extent 60
42350 Sep 22 23:22:20.315 DEBG 317 Close extent 60
42351 Sep 22 23:22:20.315 DEBG [2] It's time to notify for 317
42352 Sep 22 23:22:20.315 INFO Completion from [2] id:317 status:true
42353 Sep 22 23:22:20.315 INFO [318/752] Repair commands completed
42354 Sep 22 23:22:20.315 INFO Pop front: ReconcileIO { id: ReconciliationId(318), op: ExtentRepair { repair_id: ReconciliationId(318), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42355 Sep 22 23:22:20.315 INFO Sent repair work, now wait for resp
42356 Sep 22 23:22:20.315 INFO [0] received reconcile message
42357 Sep 22 23:22:20.315 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(318), op: ExtentRepair { repair_id: ReconciliationId(318), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42358 Sep 22 23:22:20.315 INFO [0] client ExtentRepair { repair_id: ReconciliationId(318), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42359 Sep 22 23:22:20.315 INFO [0] Sending repair request ReconciliationId(318)
42360 Sep 22 23:22:20.315 INFO [1] received reconcile message
42361 Sep 22 23:22:20.315 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(318), op: ExtentRepair { repair_id: ReconciliationId(318), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42362 Sep 22 23:22:20.315 INFO [1] client ExtentRepair { repair_id: ReconciliationId(318), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42363 Sep 22 23:22:20.315 INFO [1] No action required ReconciliationId(318)
42364 Sep 22 23:22:20.315 INFO [2] received reconcile message
42365 Sep 22 23:22:20.315 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(318), op: ExtentRepair { repair_id: ReconciliationId(318), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42366 Sep 22 23:22:20.315 INFO [2] client ExtentRepair { repair_id: ReconciliationId(318), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42367 Sep 22 23:22:20.315 INFO [2] No action required ReconciliationId(318)
42368 Sep 22 23:22:20.316 DEBG 318 Repair extent 60 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
42369 Sep 22 23:22:20.316 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/03C.copy"
42370 Sep 22 23:22:20.373 DEBG [1] Read AckReady 1081, : downstairs
42371 Sep 22 23:22:20.374 ERRO [1] job id 1082 saw error GenericError("test error")
42372 Sep 22 23:22:20.374 DEBG up_ds_listen was notified
42373 Sep 22 23:22:20.374 DEBG up_ds_listen process 1081
42374 Sep 22 23:22:20.374 DEBG [A] ack job 1081:82, : downstairs
42375 Sep 22 23:22:20.380 INFO accepted connection, remote_addr: 127.0.0.1:34218, local_addr: 127.0.0.1:52864, task: repair
42376 Sep 22 23:22:20.380 TRCE incoming request, uri: /extent/60/files, method: GET, req_id: 99aaab7a-9b9a-45eb-903b-530a660f81d4, remote_addr: 127.0.0.1:34218, local_addr: 127.0.0.1:52864, task: repair
42377 Sep 22 23:22:20.380 INFO request completed, latency_us: 201, response_code: 200, uri: /extent/60/files, method: GET, req_id: 99aaab7a-9b9a-45eb-903b-530a660f81d4, remote_addr: 127.0.0.1:34218, local_addr: 127.0.0.1:52864, task: repair
42378 Sep 22 23:22:20.381 INFO eid:60 Found repair files: ["03C", "03C.db"]
42379 Sep 22 23:22:20.381 TRCE incoming request, uri: /newextent/60/data, method: GET, req_id: 9c1fd412-a54e-47aa-8406-8564f97261eb, remote_addr: 127.0.0.1:34218, local_addr: 127.0.0.1:52864, task: repair
42380 Sep 22 23:22:20.381 INFO request completed, latency_us: 256, response_code: 200, uri: /newextent/60/data, method: GET, req_id: 9c1fd412-a54e-47aa-8406-8564f97261eb, remote_addr: 127.0.0.1:34218, local_addr: 127.0.0.1:52864, task: repair
42381 Sep 22 23:22:20.386 TRCE incoming request, uri: /newextent/60/db, method: GET, req_id: b3702ba4-2649-4f51-9f6c-891ac0714979, remote_addr: 127.0.0.1:34218, local_addr: 127.0.0.1:52864, task: repair
42382 Sep 22 23:22:20.386 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/60/db, method: GET, req_id: b3702ba4-2649-4f51-9f6c-891ac0714979, remote_addr: 127.0.0.1:34218, local_addr: 127.0.0.1:52864, task: repair
42383 Sep 22 23:22:20.387 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/03C.copy" to "/tmp/downstairs-zrMnlo6G/00/000/03C.replace"
42384 Sep 22 23:22:20.388 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42385 Sep 22 23:22:20.388 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/03C.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
42386 Sep 22 23:22:20.389 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/03C"
42387 Sep 22 23:22:20.389 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/03C.db"
42388 Sep 22 23:22:20.389 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42389 Sep 22 23:22:20.389 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/03C.replace" to "/tmp/downstairs-zrMnlo6G/00/000/03C.completed"
42390 Sep 22 23:22:20.389 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42391 Sep 22 23:22:20.389 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42392 Sep 22 23:22:20.389 DEBG [0] It's time to notify for 318
42393 Sep 22 23:22:20.389 INFO Completion from [0] id:318 status:true
42394 Sep 22 23:22:20.389 INFO [319/752] Repair commands completed
42395 Sep 22 23:22:20.389 INFO Pop front: ReconcileIO { id: ReconciliationId(319), op: ExtentReopen { repair_id: ReconciliationId(319), extent_id: 60 }, state: ClientData([New, New, New]) }
42396 Sep 22 23:22:20.389 INFO Sent repair work, now wait for resp
42397 Sep 22 23:22:20.389 INFO [0] received reconcile message
42398 Sep 22 23:22:20.389 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(319), op: ExtentReopen { repair_id: ReconciliationId(319), extent_id: 60 }, state: ClientData([InProgress, New, New]) }, : downstairs
42399 Sep 22 23:22:20.389 INFO [0] client ExtentReopen { repair_id: ReconciliationId(319), extent_id: 60 }
42400 Sep 22 23:22:20.389 INFO [1] received reconcile message
42401 Sep 22 23:22:20.389 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(319), op: ExtentReopen { repair_id: ReconciliationId(319), extent_id: 60 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42402 Sep 22 23:22:20.389 INFO [1] client ExtentReopen { repair_id: ReconciliationId(319), extent_id: 60 }
42403 Sep 22 23:22:20.389 INFO [2] received reconcile message
42404 Sep 22 23:22:20.389 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(319), op: ExtentReopen { repair_id: ReconciliationId(319), extent_id: 60 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42405 Sep 22 23:22:20.389 INFO [2] client ExtentReopen { repair_id: ReconciliationId(319), extent_id: 60 }
42406 Sep 22 23:22:20.390 DEBG 319 Reopen extent 60
42407 Sep 22 23:22:20.390 DEBG 319 Reopen extent 60
42408 Sep 22 23:22:20.391 DEBG 319 Reopen extent 60
42409 Sep 22 23:22:20.391 DEBG [2] It's time to notify for 319
42410 Sep 22 23:22:20.391 INFO Completion from [2] id:319 status:true
42411 Sep 22 23:22:20.391 INFO [320/752] Repair commands completed
42412 Sep 22 23:22:20.391 INFO Pop front: ReconcileIO { id: ReconciliationId(320), op: ExtentFlush { repair_id: ReconciliationId(320), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42413 Sep 22 23:22:20.391 INFO Sent repair work, now wait for resp
42414 Sep 22 23:22:20.391 INFO [0] received reconcile message
42415 Sep 22 23:22:20.391 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(320), op: ExtentFlush { repair_id: ReconciliationId(320), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42416 Sep 22 23:22:20.391 INFO [0] client ExtentFlush { repair_id: ReconciliationId(320), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42417 Sep 22 23:22:20.391 INFO [1] received reconcile message
42418 Sep 22 23:22:20.391 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(320), op: ExtentFlush { repair_id: ReconciliationId(320), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42419 Sep 22 23:22:20.391 INFO [1] client ExtentFlush { repair_id: ReconciliationId(320), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42420 Sep 22 23:22:20.391 INFO [2] received reconcile message
42421 Sep 22 23:22:20.391 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(320), op: ExtentFlush { repair_id: ReconciliationId(320), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42422 Sep 22 23:22:20.392 INFO [2] client ExtentFlush { repair_id: ReconciliationId(320), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42423 Sep 22 23:22:20.392 DEBG 320 Flush extent 18 with f:2 g:2
42424 Sep 22 23:22:20.392 DEBG Flush just extent 18 with f:2 and g:2
42425 Sep 22 23:22:20.392 DEBG [1] It's time to notify for 320
42426 Sep 22 23:22:20.392 INFO Completion from [1] id:320 status:true
42427 Sep 22 23:22:20.392 INFO [321/752] Repair commands completed
42428 Sep 22 23:22:20.392 INFO Pop front: ReconcileIO { id: ReconciliationId(321), op: ExtentClose { repair_id: ReconciliationId(321), extent_id: 18 }, state: ClientData([New, New, New]) }
42429 Sep 22 23:22:20.392 INFO Sent repair work, now wait for resp
42430 Sep 22 23:22:20.392 INFO [0] received reconcile message
42431 Sep 22 23:22:20.392 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(321), op: ExtentClose { repair_id: ReconciliationId(321), extent_id: 18 }, state: ClientData([InProgress, New, New]) }, : downstairs
42432 Sep 22 23:22:20.392 INFO [0] client ExtentClose { repair_id: ReconciliationId(321), extent_id: 18 }
42433 Sep 22 23:22:20.392 INFO [1] received reconcile message
42434 Sep 22 23:22:20.392 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(321), op: ExtentClose { repair_id: ReconciliationId(321), extent_id: 18 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42435 Sep 22 23:22:20.392 INFO [1] client ExtentClose { repair_id: ReconciliationId(321), extent_id: 18 }
42436 Sep 22 23:22:20.392 INFO [2] received reconcile message
42437 Sep 22 23:22:20.392 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(321), op: ExtentClose { repair_id: ReconciliationId(321), extent_id: 18 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42438 Sep 22 23:22:20.392 INFO [2] client ExtentClose { repair_id: ReconciliationId(321), extent_id: 18 }
42439 Sep 22 23:22:20.392 DEBG 321 Close extent 18
42440 Sep 22 23:22:20.392 DEBG 321 Close extent 18
42441 Sep 22 23:22:20.393 DEBG 321 Close extent 18
42442 Sep 22 23:22:20.393 DEBG [2] It's time to notify for 321
42443 Sep 22 23:22:20.393 INFO Completion from [2] id:321 status:true
42444 Sep 22 23:22:20.393 INFO [322/752] Repair commands completed
42445 Sep 22 23:22:20.393 INFO Pop front: ReconcileIO { id: ReconciliationId(322), op: ExtentRepair { repair_id: ReconciliationId(322), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42446 Sep 22 23:22:20.393 INFO Sent repair work, now wait for resp
42447 Sep 22 23:22:20.393 INFO [0] received reconcile message
42448 Sep 22 23:22:20.393 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(322), op: ExtentRepair { repair_id: ReconciliationId(322), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42449 Sep 22 23:22:20.393 INFO [0] client ExtentRepair { repair_id: ReconciliationId(322), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42450 Sep 22 23:22:20.393 INFO [0] Sending repair request ReconciliationId(322)
42451 Sep 22 23:22:20.393 INFO [1] received reconcile message
42452 Sep 22 23:22:20.393 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(322), op: ExtentRepair { repair_id: ReconciliationId(322), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42453 Sep 22 23:22:20.393 INFO [1] client ExtentRepair { repair_id: ReconciliationId(322), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42454 Sep 22 23:22:20.394 INFO [1] No action required ReconciliationId(322)
42455 Sep 22 23:22:20.394 INFO [2] received reconcile message
42456 Sep 22 23:22:20.394 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(322), op: ExtentRepair { repair_id: ReconciliationId(322), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42457 Sep 22 23:22:20.394 INFO [2] client ExtentRepair { repair_id: ReconciliationId(322), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42458 Sep 22 23:22:20.394 INFO [2] No action required ReconciliationId(322)
42459 Sep 22 23:22:20.394 DEBG 322 Repair extent 18 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
42460 Sep 22 23:22:20.394 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/012.copy"
42461 Sep 22 23:22:20.427 DEBG up_ds_listen checked 1 jobs, back to waiting
42462 Sep 22 23:22:20.430 DEBG Flush :1082 extent_limit None deps:[JobId(1081), JobId(1080)] res:true f:30 g:1
42463 Sep 22 23:22:20.430 INFO [lossy] sleeping 1 second
42464 Sep 22 23:22:20.430 INFO [lossy] skipping 1081
42465 Sep 22 23:22:20.430 INFO [lossy] skipping 1082
42466 Sep 22 23:22:20.430 WARN returning error on read!
42467 Sep 22 23:22:20.430 DEBG Read :1081 deps:[JobId(1080)] res:false
42468 Sep 22 23:22:20.430 INFO [lossy] skipping 1082
42469 Sep 22 23:22:20.430 INFO [lossy] skipping 1081
42470 Sep 22 23:22:20.430 INFO [lossy] skipping 1082
42471 Sep 22 23:22:20.430 INFO [lossy] skipping 1081
42472 Sep 22 23:22:20.436 DEBG Read :1081 deps:[JobId(1080)] res:true
42473 Sep 22 23:22:20.459 INFO accepted connection, remote_addr: 127.0.0.1:44148, local_addr: 127.0.0.1:52864, task: repair
42474 Sep 22 23:22:20.459 TRCE incoming request, uri: /extent/18/files, method: GET, req_id: 190edafc-ed12-473d-9d18-a7652c2a94ea, remote_addr: 127.0.0.1:44148, local_addr: 127.0.0.1:52864, task: repair
42475 Sep 22 23:22:20.459 INFO request completed, latency_us: 242, response_code: 200, uri: /extent/18/files, method: GET, req_id: 190edafc-ed12-473d-9d18-a7652c2a94ea, remote_addr: 127.0.0.1:44148, local_addr: 127.0.0.1:52864, task: repair
42476 Sep 22 23:22:20.460 INFO eid:18 Found repair files: ["012", "012.db"]
42477 Sep 22 23:22:20.460 TRCE incoming request, uri: /newextent/18/data, method: GET, req_id: e9cb2297-7515-444d-b235-1d25c804d296, remote_addr: 127.0.0.1:44148, local_addr: 127.0.0.1:52864, task: repair
42478 Sep 22 23:22:20.460 INFO request completed, latency_us: 349, response_code: 200, uri: /newextent/18/data, method: GET, req_id: e9cb2297-7515-444d-b235-1d25c804d296, remote_addr: 127.0.0.1:44148, local_addr: 127.0.0.1:52864, task: repair
42479 Sep 22 23:22:20.465 TRCE incoming request, uri: /newextent/18/db, method: GET, req_id: d0505bf2-8c91-4f19-86e9-af7be9578d9c, remote_addr: 127.0.0.1:44148, local_addr: 127.0.0.1:52864, task: repair
42480 Sep 22 23:22:20.466 INFO request completed, latency_us: 352, response_code: 200, uri: /newextent/18/db, method: GET, req_id: d0505bf2-8c91-4f19-86e9-af7be9578d9c, remote_addr: 127.0.0.1:44148, local_addr: 127.0.0.1:52864, task: repair
42481 Sep 22 23:22:20.467 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/012.copy" to "/tmp/downstairs-zrMnlo6G/00/000/012.replace"
42482 Sep 22 23:22:20.467 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42483 Sep 22 23:22:20.468 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/012.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
42484 Sep 22 23:22:20.468 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/012"
42485 Sep 22 23:22:20.468 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/012.db"
42486 Sep 22 23:22:20.468 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42487 Sep 22 23:22:20.468 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/012.replace" to "/tmp/downstairs-zrMnlo6G/00/000/012.completed"
42488 Sep 22 23:22:20.468 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42489 Sep 22 23:22:20.468 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42490 Sep 22 23:22:20.469 DEBG [0] It's time to notify for 322
42491 Sep 22 23:22:20.469 INFO Completion from [0] id:322 status:true
42492 Sep 22 23:22:20.469 INFO [323/752] Repair commands completed
42493 Sep 22 23:22:20.469 INFO Pop front: ReconcileIO { id: ReconciliationId(323), op: ExtentReopen { repair_id: ReconciliationId(323), extent_id: 18 }, state: ClientData([New, New, New]) }
42494 Sep 22 23:22:20.469 INFO Sent repair work, now wait for resp
42495 Sep 22 23:22:20.469 INFO [0] received reconcile message
42496 Sep 22 23:22:20.469 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(323), op: ExtentReopen { repair_id: ReconciliationId(323), extent_id: 18 }, state: ClientData([InProgress, New, New]) }, : downstairs
42497 Sep 22 23:22:20.469 INFO [0] client ExtentReopen { repair_id: ReconciliationId(323), extent_id: 18 }
42498 Sep 22 23:22:20.469 INFO [1] received reconcile message
42499 Sep 22 23:22:20.469 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(323), op: ExtentReopen { repair_id: ReconciliationId(323), extent_id: 18 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42500 Sep 22 23:22:20.469 INFO [1] client ExtentReopen { repair_id: ReconciliationId(323), extent_id: 18 }
42501 Sep 22 23:22:20.469 INFO [2] received reconcile message
42502 Sep 22 23:22:20.469 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(323), op: ExtentReopen { repair_id: ReconciliationId(323), extent_id: 18 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42503 Sep 22 23:22:20.469 INFO [2] client ExtentReopen { repair_id: ReconciliationId(323), extent_id: 18 }
42504 Sep 22 23:22:20.469 DEBG 323 Reopen extent 18
42505 Sep 22 23:22:20.470 DEBG 323 Reopen extent 18
42506 Sep 22 23:22:20.470 DEBG 323 Reopen extent 18
42507 Sep 22 23:22:20.471 DEBG [2] It's time to notify for 323
42508 Sep 22 23:22:20.471 INFO Completion from [2] id:323 status:true
42509 Sep 22 23:22:20.471 INFO [324/752] Repair commands completed
42510 Sep 22 23:22:20.471 INFO Pop front: ReconcileIO { id: ReconciliationId(324), op: ExtentFlush { repair_id: ReconciliationId(324), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42511 Sep 22 23:22:20.471 INFO Sent repair work, now wait for resp
42512 Sep 22 23:22:20.471 INFO [0] received reconcile message
42513 Sep 22 23:22:20.471 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(324), op: ExtentFlush { repair_id: ReconciliationId(324), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42514 Sep 22 23:22:20.471 INFO [0] client ExtentFlush { repair_id: ReconciliationId(324), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42515 Sep 22 23:22:20.471 INFO [1] received reconcile message
42516 Sep 22 23:22:20.471 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(324), op: ExtentFlush { repair_id: ReconciliationId(324), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42517 Sep 22 23:22:20.471 INFO [1] client ExtentFlush { repair_id: ReconciliationId(324), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42518 Sep 22 23:22:20.471 INFO [2] received reconcile message
42519 Sep 22 23:22:20.471 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(324), op: ExtentFlush { repair_id: ReconciliationId(324), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42520 Sep 22 23:22:20.471 INFO [2] client ExtentFlush { repair_id: ReconciliationId(324), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42521 Sep 22 23:22:20.472 DEBG 324 Flush extent 110 with f:2 g:2
42522 Sep 22 23:22:20.472 DEBG Flush just extent 110 with f:2 and g:2
42523 Sep 22 23:22:20.472 DEBG [1] It's time to notify for 324
42524 Sep 22 23:22:20.472 INFO Completion from [1] id:324 status:true
42525 Sep 22 23:22:20.472 INFO [325/752] Repair commands completed
42526 Sep 22 23:22:20.472 INFO Pop front: ReconcileIO { id: ReconciliationId(325), op: ExtentClose { repair_id: ReconciliationId(325), extent_id: 110 }, state: ClientData([New, New, New]) }
42527 Sep 22 23:22:20.472 INFO Sent repair work, now wait for resp
42528 Sep 22 23:22:20.472 INFO [0] received reconcile message
42529 Sep 22 23:22:20.472 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(325), op: ExtentClose { repair_id: ReconciliationId(325), extent_id: 110 }, state: ClientData([InProgress, New, New]) }, : downstairs
42530 Sep 22 23:22:20.472 INFO [0] client ExtentClose { repair_id: ReconciliationId(325), extent_id: 110 }
42531 Sep 22 23:22:20.472 INFO [1] received reconcile message
42532 Sep 22 23:22:20.472 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(325), op: ExtentClose { repair_id: ReconciliationId(325), extent_id: 110 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42533 Sep 22 23:22:20.472 INFO [1] client ExtentClose { repair_id: ReconciliationId(325), extent_id: 110 }
42534 Sep 22 23:22:20.472 INFO [2] received reconcile message
42535 Sep 22 23:22:20.472 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(325), op: ExtentClose { repair_id: ReconciliationId(325), extent_id: 110 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42536 Sep 22 23:22:20.472 INFO [2] client ExtentClose { repair_id: ReconciliationId(325), extent_id: 110 }
42537 Sep 22 23:22:20.472 DEBG 325 Close extent 110
42538 Sep 22 23:22:20.472 DEBG 325 Close extent 110
42539 Sep 22 23:22:20.473 DEBG 325 Close extent 110
42540 Sep 22 23:22:20.473 DEBG [2] It's time to notify for 325
42541 Sep 22 23:22:20.473 INFO Completion from [2] id:325 status:true
42542 Sep 22 23:22:20.473 INFO [326/752] Repair commands completed
42543 Sep 22 23:22:20.473 INFO Pop front: ReconcileIO { id: ReconciliationId(326), op: ExtentRepair { repair_id: ReconciliationId(326), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42544 Sep 22 23:22:20.473 INFO Sent repair work, now wait for resp
42545 Sep 22 23:22:20.473 INFO [0] received reconcile message
42546 Sep 22 23:22:20.473 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(326), op: ExtentRepair { repair_id: ReconciliationId(326), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42547 Sep 22 23:22:20.473 INFO [0] client ExtentRepair { repair_id: ReconciliationId(326), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42548 Sep 22 23:22:20.473 INFO [0] Sending repair request ReconciliationId(326)
42549 Sep 22 23:22:20.473 INFO [1] received reconcile message
42550 Sep 22 23:22:20.473 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(326), op: ExtentRepair { repair_id: ReconciliationId(326), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42551 Sep 22 23:22:20.473 INFO [1] client ExtentRepair { repair_id: ReconciliationId(326), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42552 Sep 22 23:22:20.473 INFO [1] No action required ReconciliationId(326)
42553 Sep 22 23:22:20.473 INFO [2] received reconcile message
42554 Sep 22 23:22:20.473 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(326), op: ExtentRepair { repair_id: ReconciliationId(326), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42555 Sep 22 23:22:20.474 INFO [2] client ExtentRepair { repair_id: ReconciliationId(326), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42556 Sep 22 23:22:20.474 INFO [2] No action required ReconciliationId(326)
42557 Sep 22 23:22:20.474 DEBG 326 Repair extent 110 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
42558 Sep 22 23:22:20.474 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/06E.copy"
42559 Sep 22 23:22:20.506 DEBG IO Read 1083 has deps [JobId(1082)]
42560 Sep 22 23:22:20.538 INFO accepted connection, remote_addr: 127.0.0.1:58718, local_addr: 127.0.0.1:52864, task: repair
42561 Sep 22 23:22:20.538 TRCE incoming request, uri: /extent/110/files, method: GET, req_id: 4231e696-e5ee-4677-ab8e-0e04ca1cbe20, remote_addr: 127.0.0.1:58718, local_addr: 127.0.0.1:52864, task: repair
42562 Sep 22 23:22:20.539 INFO request completed, latency_us: 204, response_code: 200, uri: /extent/110/files, method: GET, req_id: 4231e696-e5ee-4677-ab8e-0e04ca1cbe20, remote_addr: 127.0.0.1:58718, local_addr: 127.0.0.1:52864, task: repair
42563 Sep 22 23:22:20.539 INFO eid:110 Found repair files: ["06E", "06E.db"]
42564 Sep 22 23:22:20.539 TRCE incoming request, uri: /newextent/110/data, method: GET, req_id: fca3749f-3f15-4d66-bc9d-5c29cf8611b7, remote_addr: 127.0.0.1:58718, local_addr: 127.0.0.1:52864, task: repair
42565 Sep 22 23:22:20.540 INFO request completed, latency_us: 326, response_code: 200, uri: /newextent/110/data, method: GET, req_id: fca3749f-3f15-4d66-bc9d-5c29cf8611b7, remote_addr: 127.0.0.1:58718, local_addr: 127.0.0.1:52864, task: repair
42566 Sep 22 23:22:20.544 TRCE incoming request, uri: /newextent/110/db, method: GET, req_id: ebd033e8-311b-4eed-a5d1-b9083be6ce8a, remote_addr: 127.0.0.1:58718, local_addr: 127.0.0.1:52864, task: repair
42567 Sep 22 23:22:20.544 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/110/db, method: GET, req_id: ebd033e8-311b-4eed-a5d1-b9083be6ce8a, remote_addr: 127.0.0.1:58718, local_addr: 127.0.0.1:52864, task: repair
42568 Sep 22 23:22:20.546 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/06E.copy" to "/tmp/downstairs-zrMnlo6G/00/000/06E.replace"
42569 Sep 22 23:22:20.546 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42570 Sep 22 23:22:20.546 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/06E.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
42571 Sep 22 23:22:20.547 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/06E"
42572 Sep 22 23:22:20.547 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/06E.db"
42573 Sep 22 23:22:20.547 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42574 Sep 22 23:22:20.547 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/06E.replace" to "/tmp/downstairs-zrMnlo6G/00/000/06E.completed"
42575 Sep 22 23:22:20.547 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42576 Sep 22 23:22:20.547 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42577 Sep 22 23:22:20.547 DEBG [0] It's time to notify for 326
42578 Sep 22 23:22:20.547 INFO Completion from [0] id:326 status:true
42579 Sep 22 23:22:20.547 INFO [327/752] Repair commands completed
42580 Sep 22 23:22:20.547 INFO Pop front: ReconcileIO { id: ReconciliationId(327), op: ExtentReopen { repair_id: ReconciliationId(327), extent_id: 110 }, state: ClientData([New, New, New]) }
42581 Sep 22 23:22:20.547 INFO Sent repair work, now wait for resp
42582 Sep 22 23:22:20.547 INFO [0] received reconcile message
42583 Sep 22 23:22:20.547 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(327), op: ExtentReopen { repair_id: ReconciliationId(327), extent_id: 110 }, state: ClientData([InProgress, New, New]) }, : downstairs
42584 Sep 22 23:22:20.547 INFO [0] client ExtentReopen { repair_id: ReconciliationId(327), extent_id: 110 }
42585 Sep 22 23:22:20.547 INFO [1] received reconcile message
42586 Sep 22 23:22:20.547 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(327), op: ExtentReopen { repair_id: ReconciliationId(327), extent_id: 110 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42587 Sep 22 23:22:20.548 INFO [1] client ExtentReopen { repair_id: ReconciliationId(327), extent_id: 110 }
42588 Sep 22 23:22:20.548 INFO [2] received reconcile message
42589 Sep 22 23:22:20.548 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(327), op: ExtentReopen { repair_id: ReconciliationId(327), extent_id: 110 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42590 Sep 22 23:22:20.548 INFO [2] client ExtentReopen { repair_id: ReconciliationId(327), extent_id: 110 }
42591 Sep 22 23:22:20.548 DEBG 327 Reopen extent 110
42592 Sep 22 23:22:20.548 DEBG 327 Reopen extent 110
42593 Sep 22 23:22:20.549 DEBG 327 Reopen extent 110
42594 Sep 22 23:22:20.549 DEBG [2] It's time to notify for 327
42595 Sep 22 23:22:20.549 INFO Completion from [2] id:327 status:true
42596 Sep 22 23:22:20.549 INFO [328/752] Repair commands completed
42597 Sep 22 23:22:20.549 INFO Pop front: ReconcileIO { id: ReconciliationId(328), op: ExtentFlush { repair_id: ReconciliationId(328), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42598 Sep 22 23:22:20.550 INFO Sent repair work, now wait for resp
42599 Sep 22 23:22:20.550 INFO [0] received reconcile message
42600 Sep 22 23:22:20.550 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(328), op: ExtentFlush { repair_id: ReconciliationId(328), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42601 Sep 22 23:22:20.550 INFO [0] client ExtentFlush { repair_id: ReconciliationId(328), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42602 Sep 22 23:22:20.550 INFO [1] received reconcile message
42603 Sep 22 23:22:20.550 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(328), op: ExtentFlush { repair_id: ReconciliationId(328), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42604 Sep 22 23:22:20.550 INFO [1] client ExtentFlush { repair_id: ReconciliationId(328), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42605 Sep 22 23:22:20.550 INFO [2] received reconcile message
42606 Sep 22 23:22:20.550 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(328), op: ExtentFlush { repair_id: ReconciliationId(328), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42607 Sep 22 23:22:20.550 INFO [2] client ExtentFlush { repair_id: ReconciliationId(328), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42608 Sep 22 23:22:20.550 DEBG 328 Flush extent 136 with f:2 g:2
42609 Sep 22 23:22:20.550 DEBG Flush just extent 136 with f:2 and g:2
42610 Sep 22 23:22:20.550 DEBG [1] It's time to notify for 328
42611 Sep 22 23:22:20.550 INFO Completion from [1] id:328 status:true
42612 Sep 22 23:22:20.550 INFO [329/752] Repair commands completed
42613 Sep 22 23:22:20.550 INFO Pop front: ReconcileIO { id: ReconciliationId(329), op: ExtentClose { repair_id: ReconciliationId(329), extent_id: 136 }, state: ClientData([New, New, New]) }
42614 Sep 22 23:22:20.550 INFO Sent repair work, now wait for resp
42615 Sep 22 23:22:20.550 INFO [0] received reconcile message
42616 Sep 22 23:22:20.550 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(329), op: ExtentClose { repair_id: ReconciliationId(329), extent_id: 136 }, state: ClientData([InProgress, New, New]) }, : downstairs
42617 Sep 22 23:22:20.550 INFO [0] client ExtentClose { repair_id: ReconciliationId(329), extent_id: 136 }
42618 Sep 22 23:22:20.550 INFO [1] received reconcile message
42619 Sep 22 23:22:20.550 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(329), op: ExtentClose { repair_id: ReconciliationId(329), extent_id: 136 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42620 Sep 22 23:22:20.550 INFO [1] client ExtentClose { repair_id: ReconciliationId(329), extent_id: 136 }
42621 Sep 22 23:22:20.550 INFO [2] received reconcile message
42622 Sep 22 23:22:20.550 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(329), op: ExtentClose { repair_id: ReconciliationId(329), extent_id: 136 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42623 Sep 22 23:22:20.550 INFO [2] client ExtentClose { repair_id: ReconciliationId(329), extent_id: 136 }
42624 Sep 22 23:22:20.550 DEBG 329 Close extent 136
42625 Sep 22 23:22:20.551 DEBG 329 Close extent 136
42626 Sep 22 23:22:20.551 DEBG 329 Close extent 136
42627 Sep 22 23:22:20.551 DEBG [2] It's time to notify for 329
42628 Sep 22 23:22:20.551 INFO Completion from [2] id:329 status:true
42629 Sep 22 23:22:20.552 INFO [330/752] Repair commands completed
42630 Sep 22 23:22:20.552 INFO Pop front: ReconcileIO { id: ReconciliationId(330), op: ExtentRepair { repair_id: ReconciliationId(330), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42631 Sep 22 23:22:20.552 INFO Sent repair work, now wait for resp
42632 Sep 22 23:22:20.552 INFO [0] received reconcile message
42633 Sep 22 23:22:20.552 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(330), op: ExtentRepair { repair_id: ReconciliationId(330), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42634 Sep 22 23:22:20.552 INFO [0] client ExtentRepair { repair_id: ReconciliationId(330), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42635 Sep 22 23:22:20.552 INFO [0] Sending repair request ReconciliationId(330)
42636 Sep 22 23:22:20.552 INFO [1] received reconcile message
42637 Sep 22 23:22:20.552 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(330), op: ExtentRepair { repair_id: ReconciliationId(330), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42638 Sep 22 23:22:20.552 INFO [1] client ExtentRepair { repair_id: ReconciliationId(330), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42639 Sep 22 23:22:20.552 INFO [1] No action required ReconciliationId(330)
42640 Sep 22 23:22:20.552 INFO [2] received reconcile message
42641 Sep 22 23:22:20.552 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(330), op: ExtentRepair { repair_id: ReconciliationId(330), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42642 Sep 22 23:22:20.552 INFO [2] client ExtentRepair { repair_id: ReconciliationId(330), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42643 Sep 22 23:22:20.552 INFO [2] No action required ReconciliationId(330)
42644 Sep 22 23:22:20.552 DEBG 330 Repair extent 136 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
42645 Sep 22 23:22:20.552 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/088.copy"
42646 Sep 22 23:22:20.614 INFO accepted connection, remote_addr: 127.0.0.1:63364, local_addr: 127.0.0.1:52864, task: repair
42647 Sep 22 23:22:20.615 TRCE incoming request, uri: /extent/136/files, method: GET, req_id: fa98f1b4-02b6-46c2-9a16-c22c578c7be0, remote_addr: 127.0.0.1:63364, local_addr: 127.0.0.1:52864, task: repair
42648 Sep 22 23:22:20.615 INFO request completed, latency_us: 195, response_code: 200, uri: /extent/136/files, method: GET, req_id: fa98f1b4-02b6-46c2-9a16-c22c578c7be0, remote_addr: 127.0.0.1:63364, local_addr: 127.0.0.1:52864, task: repair
42649 Sep 22 23:22:20.615 INFO eid:136 Found repair files: ["088", "088.db"]
42650 Sep 22 23:22:20.615 TRCE incoming request, uri: /newextent/136/data, method: GET, req_id: f4a3ccf3-9f30-49cb-a421-74a6afa13a96, remote_addr: 127.0.0.1:63364, local_addr: 127.0.0.1:52864, task: repair
42651 Sep 22 23:22:20.616 INFO request completed, latency_us: 305, response_code: 200, uri: /newextent/136/data, method: GET, req_id: f4a3ccf3-9f30-49cb-a421-74a6afa13a96, remote_addr: 127.0.0.1:63364, local_addr: 127.0.0.1:52864, task: repair
42652 Sep 22 23:22:20.620 TRCE incoming request, uri: /newextent/136/db, method: GET, req_id: 6fee2191-cd90-421c-82af-0faf89c5e3ce, remote_addr: 127.0.0.1:63364, local_addr: 127.0.0.1:52864, task: repair
42653 Sep 22 23:22:20.621 INFO request completed, latency_us: 297, response_code: 200, uri: /newextent/136/db, method: GET, req_id: 6fee2191-cd90-421c-82af-0faf89c5e3ce, remote_addr: 127.0.0.1:63364, local_addr: 127.0.0.1:52864, task: repair
42654 Sep 22 23:22:20.622 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/088.copy" to "/tmp/downstairs-zrMnlo6G/00/000/088.replace"
42655 Sep 22 23:22:20.622 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42656 Sep 22 23:22:20.623 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/088.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
42657 Sep 22 23:22:20.623 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/088"
42658 Sep 22 23:22:20.623 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/088.db"
42659 Sep 22 23:22:20.623 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42660 Sep 22 23:22:20.623 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/088.replace" to "/tmp/downstairs-zrMnlo6G/00/000/088.completed"
42661 Sep 22 23:22:20.623 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42662 Sep 22 23:22:20.623 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42663 Sep 22 23:22:20.623 DEBG [0] It's time to notify for 330
42664 Sep 22 23:22:20.623 INFO Completion from [0] id:330 status:true
42665 Sep 22 23:22:20.623 INFO [331/752] Repair commands completed
42666 Sep 22 23:22:20.623 INFO Pop front: ReconcileIO { id: ReconciliationId(331), op: ExtentReopen { repair_id: ReconciliationId(331), extent_id: 136 }, state: ClientData([New, New, New]) }
42667 Sep 22 23:22:20.623 INFO Sent repair work, now wait for resp
42668 Sep 22 23:22:20.624 INFO [0] received reconcile message
42669 Sep 22 23:22:20.624 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(331), op: ExtentReopen { repair_id: ReconciliationId(331), extent_id: 136 }, state: ClientData([InProgress, New, New]) }, : downstairs
42670 Sep 22 23:22:20.624 INFO [0] client ExtentReopen { repair_id: ReconciliationId(331), extent_id: 136 }
42671 Sep 22 23:22:20.624 INFO [1] received reconcile message
42672 Sep 22 23:22:20.624 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(331), op: ExtentReopen { repair_id: ReconciliationId(331), extent_id: 136 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42673 Sep 22 23:22:20.624 INFO [1] client ExtentReopen { repair_id: ReconciliationId(331), extent_id: 136 }
42674 Sep 22 23:22:20.624 INFO [2] received reconcile message
42675 Sep 22 23:22:20.624 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(331), op: ExtentReopen { repair_id: ReconciliationId(331), extent_id: 136 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42676 Sep 22 23:22:20.624 INFO [2] client ExtentReopen { repair_id: ReconciliationId(331), extent_id: 136 }
42677 Sep 22 23:22:20.624 DEBG 331 Reopen extent 136
42678 Sep 22 23:22:20.624 DEBG 331 Reopen extent 136
42679 Sep 22 23:22:20.625 DEBG 331 Reopen extent 136
42680 Sep 22 23:22:20.626 DEBG [2] It's time to notify for 331
42681 Sep 22 23:22:20.626 INFO Completion from [2] id:331 status:true
42682 Sep 22 23:22:20.626 INFO [332/752] Repair commands completed
42683 Sep 22 23:22:20.626 INFO Pop front: ReconcileIO { id: ReconciliationId(332), op: ExtentFlush { repair_id: ReconciliationId(332), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42684 Sep 22 23:22:20.626 INFO Sent repair work, now wait for resp
42685 Sep 22 23:22:20.626 INFO [0] received reconcile message
42686 Sep 22 23:22:20.626 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(332), op: ExtentFlush { repair_id: ReconciliationId(332), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42687 Sep 22 23:22:20.626 INFO [0] client ExtentFlush { repair_id: ReconciliationId(332), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42688 Sep 22 23:22:20.626 INFO [1] received reconcile message
42689 Sep 22 23:22:20.626 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(332), op: ExtentFlush { repair_id: ReconciliationId(332), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42690 Sep 22 23:22:20.626 INFO [1] client ExtentFlush { repair_id: ReconciliationId(332), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42691 Sep 22 23:22:20.626 INFO [2] received reconcile message
42692 Sep 22 23:22:20.626 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(332), op: ExtentFlush { repair_id: ReconciliationId(332), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42693 Sep 22 23:22:20.626 INFO [2] client ExtentFlush { repair_id: ReconciliationId(332), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42694 Sep 22 23:22:20.626 DEBG 332 Flush extent 61 with f:2 g:2
42695 Sep 22 23:22:20.626 DEBG Flush just extent 61 with f:2 and g:2
42696 Sep 22 23:22:20.626 DEBG [1] It's time to notify for 332
42697 Sep 22 23:22:20.626 INFO Completion from [1] id:332 status:true
42698 Sep 22 23:22:20.626 INFO [333/752] Repair commands completed
42699 Sep 22 23:22:20.626 INFO Pop front: ReconcileIO { id: ReconciliationId(333), op: ExtentClose { repair_id: ReconciliationId(333), extent_id: 61 }, state: ClientData([New, New, New]) }
42700 Sep 22 23:22:20.626 INFO Sent repair work, now wait for resp
42701 Sep 22 23:22:20.626 INFO [0] received reconcile message
42702 Sep 22 23:22:20.626 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(333), op: ExtentClose { repair_id: ReconciliationId(333), extent_id: 61 }, state: ClientData([InProgress, New, New]) }, : downstairs
42703 Sep 22 23:22:20.626 INFO [0] client ExtentClose { repair_id: ReconciliationId(333), extent_id: 61 }
42704 Sep 22 23:22:20.626 INFO [1] received reconcile message
42705 Sep 22 23:22:20.626 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(333), op: ExtentClose { repair_id: ReconciliationId(333), extent_id: 61 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42706 Sep 22 23:22:20.626 INFO [1] client ExtentClose { repair_id: ReconciliationId(333), extent_id: 61 }
42707 Sep 22 23:22:20.626 INFO [2] received reconcile message
42708 Sep 22 23:22:20.626 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(333), op: ExtentClose { repair_id: ReconciliationId(333), extent_id: 61 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42709 Sep 22 23:22:20.626 INFO [2] client ExtentClose { repair_id: ReconciliationId(333), extent_id: 61 }
42710 Sep 22 23:22:20.627 DEBG 333 Close extent 61
42711 Sep 22 23:22:20.627 DEBG 333 Close extent 61
42712 Sep 22 23:22:20.627 DEBG 333 Close extent 61
42713 Sep 22 23:22:20.628 DEBG [2] It's time to notify for 333
42714 Sep 22 23:22:20.628 INFO Completion from [2] id:333 status:true
42715 Sep 22 23:22:20.628 INFO [334/752] Repair commands completed
42716 Sep 22 23:22:20.628 INFO Pop front: ReconcileIO { id: ReconciliationId(334), op: ExtentRepair { repair_id: ReconciliationId(334), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42717 Sep 22 23:22:20.628 INFO Sent repair work, now wait for resp
42718 Sep 22 23:22:20.628 INFO [0] received reconcile message
42719 Sep 22 23:22:20.628 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(334), op: ExtentRepair { repair_id: ReconciliationId(334), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42720 Sep 22 23:22:20.628 INFO [0] client ExtentRepair { repair_id: ReconciliationId(334), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42721 Sep 22 23:22:20.628 INFO [0] Sending repair request ReconciliationId(334)
42722 Sep 22 23:22:20.628 INFO [1] received reconcile message
42723 Sep 22 23:22:20.628 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(334), op: ExtentRepair { repair_id: ReconciliationId(334), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42724 Sep 22 23:22:20.628 INFO [1] client ExtentRepair { repair_id: ReconciliationId(334), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42725 Sep 22 23:22:20.628 INFO [1] No action required ReconciliationId(334)
42726 Sep 22 23:22:20.628 INFO [2] received reconcile message
42727 Sep 22 23:22:20.628 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(334), op: ExtentRepair { repair_id: ReconciliationId(334), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42728 Sep 22 23:22:20.628 INFO [2] client ExtentRepair { repair_id: ReconciliationId(334), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42729 Sep 22 23:22:20.628 INFO [2] No action required ReconciliationId(334)
42730 Sep 22 23:22:20.628 DEBG 334 Repair extent 61 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
42731 Sep 22 23:22:20.628 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/03D.copy"
42732 Sep 22 23:22:20.694 INFO accepted connection, remote_addr: 127.0.0.1:51628, local_addr: 127.0.0.1:52864, task: repair
42733 Sep 22 23:22:20.694 TRCE incoming request, uri: /extent/61/files, method: GET, req_id: 2ed5c2a6-37d2-4d0d-a854-29afc6e81f41, remote_addr: 127.0.0.1:51628, local_addr: 127.0.0.1:52864, task: repair
42734 Sep 22 23:22:20.694 INFO request completed, latency_us: 228, response_code: 200, uri: /extent/61/files, method: GET, req_id: 2ed5c2a6-37d2-4d0d-a854-29afc6e81f41, remote_addr: 127.0.0.1:51628, local_addr: 127.0.0.1:52864, task: repair
42735 Sep 22 23:22:20.695 INFO eid:61 Found repair files: ["03D", "03D.db"]
42736 Sep 22 23:22:20.695 TRCE incoming request, uri: /newextent/61/data, method: GET, req_id: bb61d23f-805b-46bf-9dce-f4d981c96af7, remote_addr: 127.0.0.1:51628, local_addr: 127.0.0.1:52864, task: repair
42737 Sep 22 23:22:20.695 INFO request completed, latency_us: 344, response_code: 200, uri: /newextent/61/data, method: GET, req_id: bb61d23f-805b-46bf-9dce-f4d981c96af7, remote_addr: 127.0.0.1:51628, local_addr: 127.0.0.1:52864, task: repair
42738 Sep 22 23:22:20.700 TRCE incoming request, uri: /newextent/61/db, method: GET, req_id: bac254bc-c714-4bea-beb4-a4b6a3ff9746, remote_addr: 127.0.0.1:51628, local_addr: 127.0.0.1:52864, task: repair
42739 Sep 22 23:22:20.700 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/61/db, method: GET, req_id: bac254bc-c714-4bea-beb4-a4b6a3ff9746, remote_addr: 127.0.0.1:51628, local_addr: 127.0.0.1:52864, task: repair
42740 Sep 22 23:22:20.702 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/03D.copy" to "/tmp/downstairs-zrMnlo6G/00/000/03D.replace"
42741 Sep 22 23:22:20.702 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42742 Sep 22 23:22:20.702 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/03D.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
42743 Sep 22 23:22:20.703 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/03D"
42744 Sep 22 23:22:20.703 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/03D.db"
42745 Sep 22 23:22:20.703 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42746 Sep 22 23:22:20.703 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/03D.replace" to "/tmp/downstairs-zrMnlo6G/00/000/03D.completed"
42747 Sep 22 23:22:20.703 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42748 Sep 22 23:22:20.703 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42749 Sep 22 23:22:20.703 DEBG [0] It's time to notify for 334
42750 Sep 22 23:22:20.703 INFO Completion from [0] id:334 status:true
42751 Sep 22 23:22:20.703 INFO [335/752] Repair commands completed
42752 Sep 22 23:22:20.703 INFO Pop front: ReconcileIO { id: ReconciliationId(335), op: ExtentReopen { repair_id: ReconciliationId(335), extent_id: 61 }, state: ClientData([New, New, New]) }
42753 Sep 22 23:22:20.703 INFO Sent repair work, now wait for resp
42754 Sep 22 23:22:20.703 INFO [0] received reconcile message
42755 Sep 22 23:22:20.703 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(335), op: ExtentReopen { repair_id: ReconciliationId(335), extent_id: 61 }, state: ClientData([InProgress, New, New]) }, : downstairs
42756 Sep 22 23:22:20.703 INFO [0] client ExtentReopen { repair_id: ReconciliationId(335), extent_id: 61 }
42757 Sep 22 23:22:20.704 INFO [1] received reconcile message
42758 Sep 22 23:22:20.704 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(335), op: ExtentReopen { repair_id: ReconciliationId(335), extent_id: 61 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42759 Sep 22 23:22:20.704 INFO [1] client ExtentReopen { repair_id: ReconciliationId(335), extent_id: 61 }
42760 Sep 22 23:22:20.704 INFO [2] received reconcile message
42761 Sep 22 23:22:20.704 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(335), op: ExtentReopen { repair_id: ReconciliationId(335), extent_id: 61 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42762 Sep 22 23:22:20.704 INFO [2] client ExtentReopen { repair_id: ReconciliationId(335), extent_id: 61 }
42763 Sep 22 23:22:20.704 DEBG 335 Reopen extent 61
42764 Sep 22 23:22:20.704 DEBG 335 Reopen extent 61
42765 Sep 22 23:22:20.705 DEBG 335 Reopen extent 61
42766 Sep 22 23:22:20.706 DEBG [2] It's time to notify for 335
42767 Sep 22 23:22:20.706 INFO Completion from [2] id:335 status:true
42768 Sep 22 23:22:20.706 INFO [336/752] Repair commands completed
42769 Sep 22 23:22:20.706 INFO Pop front: ReconcileIO { id: ReconciliationId(336), op: ExtentFlush { repair_id: ReconciliationId(336), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42770 Sep 22 23:22:20.706 INFO Sent repair work, now wait for resp
42771 Sep 22 23:22:20.706 INFO [0] received reconcile message
42772 Sep 22 23:22:20.706 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(336), op: ExtentFlush { repair_id: ReconciliationId(336), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42773 Sep 22 23:22:20.706 INFO [0] client ExtentFlush { repair_id: ReconciliationId(336), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42774 Sep 22 23:22:20.706 INFO [1] received reconcile message
42775 Sep 22 23:22:20.706 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(336), op: ExtentFlush { repair_id: ReconciliationId(336), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42776 Sep 22 23:22:20.706 INFO [1] client ExtentFlush { repair_id: ReconciliationId(336), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42777 Sep 22 23:22:20.706 INFO [2] received reconcile message
42778 Sep 22 23:22:20.706 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(336), op: ExtentFlush { repair_id: ReconciliationId(336), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42779 Sep 22 23:22:20.706 INFO [2] client ExtentFlush { repair_id: ReconciliationId(336), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42780 Sep 22 23:22:20.706 DEBG 336 Flush extent 119 with f:2 g:2
42781 Sep 22 23:22:20.706 DEBG Flush just extent 119 with f:2 and g:2
42782 Sep 22 23:22:20.706 DEBG [1] It's time to notify for 336
42783 Sep 22 23:22:20.706 INFO Completion from [1] id:336 status:true
42784 Sep 22 23:22:20.706 INFO [337/752] Repair commands completed
42785 Sep 22 23:22:20.706 INFO Pop front: ReconcileIO { id: ReconciliationId(337), op: ExtentClose { repair_id: ReconciliationId(337), extent_id: 119 }, state: ClientData([New, New, New]) }
42786 Sep 22 23:22:20.706 INFO Sent repair work, now wait for resp
42787 Sep 22 23:22:20.706 INFO [0] received reconcile message
42788 Sep 22 23:22:20.706 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(337), op: ExtentClose { repair_id: ReconciliationId(337), extent_id: 119 }, state: ClientData([InProgress, New, New]) }, : downstairs
42789 Sep 22 23:22:20.706 INFO [0] client ExtentClose { repair_id: ReconciliationId(337), extent_id: 119 }
42790 Sep 22 23:22:20.706 INFO [1] received reconcile message
42791 Sep 22 23:22:20.706 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(337), op: ExtentClose { repair_id: ReconciliationId(337), extent_id: 119 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42792 Sep 22 23:22:20.706 INFO [1] client ExtentClose { repair_id: ReconciliationId(337), extent_id: 119 }
42793 Sep 22 23:22:20.706 INFO [2] received reconcile message
42794 Sep 22 23:22:20.706 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(337), op: ExtentClose { repair_id: ReconciliationId(337), extent_id: 119 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42795 Sep 22 23:22:20.707 INFO [2] client ExtentClose { repair_id: ReconciliationId(337), extent_id: 119 }
42796 Sep 22 23:22:20.707 DEBG 337 Close extent 119
42797 Sep 22 23:22:20.707 DEBG 337 Close extent 119
42798 Sep 22 23:22:20.707 DEBG 337 Close extent 119
42799 Sep 22 23:22:20.708 DEBG [2] It's time to notify for 337
42800 Sep 22 23:22:20.708 INFO Completion from [2] id:337 status:true
42801 Sep 22 23:22:20.708 INFO [338/752] Repair commands completed
42802 Sep 22 23:22:20.708 INFO Pop front: ReconcileIO { id: ReconciliationId(338), op: ExtentRepair { repair_id: ReconciliationId(338), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42803 Sep 22 23:22:20.708 INFO Sent repair work, now wait for resp
42804 Sep 22 23:22:20.708 INFO [0] received reconcile message
42805 Sep 22 23:22:20.708 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(338), op: ExtentRepair { repair_id: ReconciliationId(338), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42806 Sep 22 23:22:20.708 INFO [0] client ExtentRepair { repair_id: ReconciliationId(338), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42807 Sep 22 23:22:20.708 INFO [0] Sending repair request ReconciliationId(338)
42808 Sep 22 23:22:20.708 INFO [1] received reconcile message
42809 Sep 22 23:22:20.708 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(338), op: ExtentRepair { repair_id: ReconciliationId(338), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42810 Sep 22 23:22:20.708 INFO [1] client ExtentRepair { repair_id: ReconciliationId(338), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42811 Sep 22 23:22:20.708 INFO [1] No action required ReconciliationId(338)
42812 Sep 22 23:22:20.708 INFO [2] received reconcile message
42813 Sep 22 23:22:20.708 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(338), op: ExtentRepair { repair_id: ReconciliationId(338), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42814 Sep 22 23:22:20.708 INFO [2] client ExtentRepair { repair_id: ReconciliationId(338), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42815 Sep 22 23:22:20.708 INFO [2] No action required ReconciliationId(338)
42816 Sep 22 23:22:20.708 DEBG 338 Repair extent 119 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
42817 Sep 22 23:22:20.708 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/077.copy"
42818 Sep 22 23:22:20.772 INFO accepted connection, remote_addr: 127.0.0.1:57841, local_addr: 127.0.0.1:52864, task: repair
42819 Sep 22 23:22:20.772 TRCE incoming request, uri: /extent/119/files, method: GET, req_id: a3003b4f-a27b-453f-a3ce-cf6ee617df66, remote_addr: 127.0.0.1:57841, local_addr: 127.0.0.1:52864, task: repair
42820 Sep 22 23:22:20.772 INFO request completed, latency_us: 208, response_code: 200, uri: /extent/119/files, method: GET, req_id: a3003b4f-a27b-453f-a3ce-cf6ee617df66, remote_addr: 127.0.0.1:57841, local_addr: 127.0.0.1:52864, task: repair
42821 Sep 22 23:22:20.772 INFO eid:119 Found repair files: ["077", "077.db"]
42822 Sep 22 23:22:20.773 TRCE incoming request, uri: /newextent/119/data, method: GET, req_id: da6f6d79-c613-49ed-83b1-1b1f95301334, remote_addr: 127.0.0.1:57841, local_addr: 127.0.0.1:52864, task: repair
42823 Sep 22 23:22:20.773 INFO request completed, latency_us: 263, response_code: 200, uri: /newextent/119/data, method: GET, req_id: da6f6d79-c613-49ed-83b1-1b1f95301334, remote_addr: 127.0.0.1:57841, local_addr: 127.0.0.1:52864, task: repair
42824 Sep 22 23:22:20.778 TRCE incoming request, uri: /newextent/119/db, method: GET, req_id: 70454771-459b-4510-8e83-c2073313f6b7, remote_addr: 127.0.0.1:57841, local_addr: 127.0.0.1:52864, task: repair
42825 Sep 22 23:22:20.778 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/119/db, method: GET, req_id: 70454771-459b-4510-8e83-c2073313f6b7, remote_addr: 127.0.0.1:57841, local_addr: 127.0.0.1:52864, task: repair
42826 Sep 22 23:22:20.779 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/077.copy" to "/tmp/downstairs-zrMnlo6G/00/000/077.replace"
42827 Sep 22 23:22:20.779 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42828 Sep 22 23:22:20.780 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/077.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
42829 Sep 22 23:22:20.780 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/077"
42830 Sep 22 23:22:20.780 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/077.db"
42831 Sep 22 23:22:20.781 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42832 Sep 22 23:22:20.781 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/077.replace" to "/tmp/downstairs-zrMnlo6G/00/000/077.completed"
42833 Sep 22 23:22:20.781 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42834 Sep 22 23:22:20.781 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42835 Sep 22 23:22:20.781 DEBG [0] It's time to notify for 338
42836 Sep 22 23:22:20.781 INFO Completion from [0] id:338 status:true
42837 Sep 22 23:22:20.781 INFO [339/752] Repair commands completed
42838 Sep 22 23:22:20.781 INFO Pop front: ReconcileIO { id: ReconciliationId(339), op: ExtentReopen { repair_id: ReconciliationId(339), extent_id: 119 }, state: ClientData([New, New, New]) }
42839 Sep 22 23:22:20.781 INFO Sent repair work, now wait for resp
42840 Sep 22 23:22:20.781 INFO [0] received reconcile message
42841 Sep 22 23:22:20.781 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(339), op: ExtentReopen { repair_id: ReconciliationId(339), extent_id: 119 }, state: ClientData([InProgress, New, New]) }, : downstairs
42842 Sep 22 23:22:20.781 INFO [0] client ExtentReopen { repair_id: ReconciliationId(339), extent_id: 119 }
42843 Sep 22 23:22:20.781 INFO [1] received reconcile message
42844 Sep 22 23:22:20.781 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(339), op: ExtentReopen { repair_id: ReconciliationId(339), extent_id: 119 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42845 Sep 22 23:22:20.781 INFO [1] client ExtentReopen { repair_id: ReconciliationId(339), extent_id: 119 }
42846 Sep 22 23:22:20.781 INFO [2] received reconcile message
42847 Sep 22 23:22:20.781 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(339), op: ExtentReopen { repair_id: ReconciliationId(339), extent_id: 119 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42848 Sep 22 23:22:20.781 INFO [2] client ExtentReopen { repair_id: ReconciliationId(339), extent_id: 119 }
42849 Sep 22 23:22:20.781 DEBG 339 Reopen extent 119
42850 Sep 22 23:22:20.782 DEBG 339 Reopen extent 119
42851 Sep 22 23:22:20.783 DEBG 339 Reopen extent 119
42852 Sep 22 23:22:20.783 DEBG [2] It's time to notify for 339
42853 Sep 22 23:22:20.783 INFO Completion from [2] id:339 status:true
42854 Sep 22 23:22:20.783 INFO [340/752] Repair commands completed
42855 Sep 22 23:22:20.783 INFO Pop front: ReconcileIO { id: ReconciliationId(340), op: ExtentFlush { repair_id: ReconciliationId(340), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42856 Sep 22 23:22:20.783 INFO Sent repair work, now wait for resp
42857 Sep 22 23:22:20.783 INFO [0] received reconcile message
42858 Sep 22 23:22:20.783 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(340), op: ExtentFlush { repair_id: ReconciliationId(340), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42859 Sep 22 23:22:20.783 INFO [0] client ExtentFlush { repair_id: ReconciliationId(340), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42860 Sep 22 23:22:20.783 INFO [1] received reconcile message
42861 Sep 22 23:22:20.783 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(340), op: ExtentFlush { repair_id: ReconciliationId(340), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42862 Sep 22 23:22:20.783 INFO [1] client ExtentFlush { repair_id: ReconciliationId(340), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42863 Sep 22 23:22:20.783 INFO [2] received reconcile message
42864 Sep 22 23:22:20.783 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(340), op: ExtentFlush { repair_id: ReconciliationId(340), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42865 Sep 22 23:22:20.783 INFO [2] client ExtentFlush { repair_id: ReconciliationId(340), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42866 Sep 22 23:22:20.784 DEBG 340 Flush extent 30 with f:2 g:2
42867 Sep 22 23:22:20.784 DEBG Flush just extent 30 with f:2 and g:2
42868 Sep 22 23:22:20.784 DEBG [1] It's time to notify for 340
42869 Sep 22 23:22:20.784 INFO Completion from [1] id:340 status:true
42870 Sep 22 23:22:20.784 INFO [341/752] Repair commands completed
42871 Sep 22 23:22:20.784 INFO Pop front: ReconcileIO { id: ReconciliationId(341), op: ExtentClose { repair_id: ReconciliationId(341), extent_id: 30 }, state: ClientData([New, New, New]) }
42872 Sep 22 23:22:20.784 INFO Sent repair work, now wait for resp
42873 Sep 22 23:22:20.784 INFO [0] received reconcile message
42874 Sep 22 23:22:20.784 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(341), op: ExtentClose { repair_id: ReconciliationId(341), extent_id: 30 }, state: ClientData([InProgress, New, New]) }, : downstairs
42875 Sep 22 23:22:20.784 INFO [0] client ExtentClose { repair_id: ReconciliationId(341), extent_id: 30 }
42876 Sep 22 23:22:20.784 INFO [1] received reconcile message
42877 Sep 22 23:22:20.784 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(341), op: ExtentClose { repair_id: ReconciliationId(341), extent_id: 30 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42878 Sep 22 23:22:20.784 INFO [1] client ExtentClose { repair_id: ReconciliationId(341), extent_id: 30 }
42879 Sep 22 23:22:20.784 INFO [2] received reconcile message
42880 Sep 22 23:22:20.784 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(341), op: ExtentClose { repair_id: ReconciliationId(341), extent_id: 30 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42881 Sep 22 23:22:20.784 INFO [2] client ExtentClose { repair_id: ReconciliationId(341), extent_id: 30 }
42882 Sep 22 23:22:20.784 DEBG 341 Close extent 30
42883 Sep 22 23:22:20.784 DEBG 341 Close extent 30
42884 Sep 22 23:22:20.785 DEBG 341 Close extent 30
42885 Sep 22 23:22:20.785 DEBG [2] It's time to notify for 341
42886 Sep 22 23:22:20.785 INFO Completion from [2] id:341 status:true
42887 Sep 22 23:22:20.785 INFO [342/752] Repair commands completed
42888 Sep 22 23:22:20.785 INFO Pop front: ReconcileIO { id: ReconciliationId(342), op: ExtentRepair { repair_id: ReconciliationId(342), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42889 Sep 22 23:22:20.785 INFO Sent repair work, now wait for resp
42890 Sep 22 23:22:20.785 INFO [0] received reconcile message
42891 Sep 22 23:22:20.785 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(342), op: ExtentRepair { repair_id: ReconciliationId(342), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42892 Sep 22 23:22:20.785 INFO [0] client ExtentRepair { repair_id: ReconciliationId(342), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42893 Sep 22 23:22:20.785 INFO [0] Sending repair request ReconciliationId(342)
42894 Sep 22 23:22:20.785 INFO [1] received reconcile message
42895 Sep 22 23:22:20.785 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(342), op: ExtentRepair { repair_id: ReconciliationId(342), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42896 Sep 22 23:22:20.785 INFO [1] client ExtentRepair { repair_id: ReconciliationId(342), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42897 Sep 22 23:22:20.785 INFO [1] No action required ReconciliationId(342)
42898 Sep 22 23:22:20.785 INFO [2] received reconcile message
42899 Sep 22 23:22:20.785 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(342), op: ExtentRepair { repair_id: ReconciliationId(342), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42900 Sep 22 23:22:20.786 INFO [2] client ExtentRepair { repair_id: ReconciliationId(342), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42901 Sep 22 23:22:20.786 INFO [2] No action required ReconciliationId(342)
42902 Sep 22 23:22:20.786 DEBG 342 Repair extent 30 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
42903 Sep 22 23:22:20.786 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/01E.copy"
42904 Sep 22 23:22:20.839 ERRO [2] job id 1081 saw error GenericError("test error")
42905 Sep 22 23:22:20.844 DEBG up_ds_listen was notified
42906 Sep 22 23:22:20.844 DEBG up_ds_listen process 1082
42907 Sep 22 23:22:20.844 DEBG [A] ack job 1082:83, : downstairs
42908 Sep 22 23:22:20.844 DEBG up_ds_listen checked 1 jobs, back to waiting
42909 Sep 22 23:22:20.850 INFO accepted connection, remote_addr: 127.0.0.1:58426, local_addr: 127.0.0.1:52864, task: repair
42910 Sep 22 23:22:20.850 TRCE incoming request, uri: /extent/30/files, method: GET, req_id: 42aedd9b-da3a-446d-afc9-ca44553ea593, remote_addr: 127.0.0.1:58426, local_addr: 127.0.0.1:52864, task: repair
42911 Sep 22 23:22:20.850 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/30/files, method: GET, req_id: 42aedd9b-da3a-446d-afc9-ca44553ea593, remote_addr: 127.0.0.1:58426, local_addr: 127.0.0.1:52864, task: repair
42912 Sep 22 23:22:20.850 INFO eid:30 Found repair files: ["01E", "01E.db"]
42913 Sep 22 23:22:20.851 TRCE incoming request, uri: /newextent/30/data, method: GET, req_id: cd8bc92a-65d6-414e-81db-c90b8063e733, remote_addr: 127.0.0.1:58426, local_addr: 127.0.0.1:52864, task: repair
42914 Sep 22 23:22:20.851 DEBG IO Flush 1084 has deps [JobId(1083), JobId(1082)]
42915 Sep 22 23:22:20.851 INFO request completed, latency_us: 363, response_code: 200, uri: /newextent/30/data, method: GET, req_id: cd8bc92a-65d6-414e-81db-c90b8063e733, remote_addr: 127.0.0.1:58426, local_addr: 127.0.0.1:52864, task: repair
42916 Sep 22 23:22:20.853 INFO [lossy] sleeping 1 second
42917 Sep 22 23:22:20.856 TRCE incoming request, uri: /newextent/30/db, method: GET, req_id: 33618b72-82e5-4f7a-866f-ebebf093c319, remote_addr: 127.0.0.1:58426, local_addr: 127.0.0.1:52864, task: repair
42918 Sep 22 23:22:20.856 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/30/db, method: GET, req_id: 33618b72-82e5-4f7a-866f-ebebf093c319, remote_addr: 127.0.0.1:58426, local_addr: 127.0.0.1:52864, task: repair
42919 Sep 22 23:22:20.857 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/01E.copy" to "/tmp/downstairs-zrMnlo6G/00/000/01E.replace"
42920 Sep 22 23:22:20.857 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42921 Sep 22 23:22:20.858 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/01E.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
42922 Sep 22 23:22:20.858 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/01E"
42923 Sep 22 23:22:20.858 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/01E.db"
42924 Sep 22 23:22:20.859 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42925 Sep 22 23:22:20.859 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/01E.replace" to "/tmp/downstairs-zrMnlo6G/00/000/01E.completed"
42926 Sep 22 23:22:20.859 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42927 Sep 22 23:22:20.859 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
42928 Sep 22 23:22:20.859 DEBG [0] It's time to notify for 342
42929 Sep 22 23:22:20.859 INFO Completion from [0] id:342 status:true
42930 Sep 22 23:22:20.859 INFO [343/752] Repair commands completed
42931 Sep 22 23:22:20.859 INFO Pop front: ReconcileIO { id: ReconciliationId(343), op: ExtentReopen { repair_id: ReconciliationId(343), extent_id: 30 }, state: ClientData([New, New, New]) }
42932 Sep 22 23:22:20.859 INFO Sent repair work, now wait for resp
42933 Sep 22 23:22:20.859 INFO [0] received reconcile message
42934 Sep 22 23:22:20.859 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(343), op: ExtentReopen { repair_id: ReconciliationId(343), extent_id: 30 }, state: ClientData([InProgress, New, New]) }, : downstairs
42935 Sep 22 23:22:20.859 INFO [0] client ExtentReopen { repair_id: ReconciliationId(343), extent_id: 30 }
42936 Sep 22 23:22:20.859 INFO [1] received reconcile message
42937 Sep 22 23:22:20.859 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(343), op: ExtentReopen { repair_id: ReconciliationId(343), extent_id: 30 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42938 Sep 22 23:22:20.859 INFO [1] client ExtentReopen { repair_id: ReconciliationId(343), extent_id: 30 }
42939 Sep 22 23:22:20.859 INFO [2] received reconcile message
42940 Sep 22 23:22:20.859 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(343), op: ExtentReopen { repair_id: ReconciliationId(343), extent_id: 30 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42941 Sep 22 23:22:20.859 INFO [2] client ExtentReopen { repair_id: ReconciliationId(343), extent_id: 30 }
42942 Sep 22 23:22:20.859 DEBG 343 Reopen extent 30
42943 Sep 22 23:22:20.860 DEBG 343 Reopen extent 30
42944 Sep 22 23:22:20.861 DEBG 343 Reopen extent 30
42945 Sep 22 23:22:20.861 DEBG [2] It's time to notify for 343
42946 Sep 22 23:22:20.861 INFO Completion from [2] id:343 status:true
42947 Sep 22 23:22:20.861 INFO [344/752] Repair commands completed
42948 Sep 22 23:22:20.861 INFO Pop front: ReconcileIO { id: ReconciliationId(344), op: ExtentFlush { repair_id: ReconciliationId(344), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42949 Sep 22 23:22:20.861 INFO Sent repair work, now wait for resp
42950 Sep 22 23:22:20.861 INFO [0] received reconcile message
42951 Sep 22 23:22:20.861 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(344), op: ExtentFlush { repair_id: ReconciliationId(344), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42952 Sep 22 23:22:20.861 INFO [0] client ExtentFlush { repair_id: ReconciliationId(344), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42953 Sep 22 23:22:20.861 INFO [1] received reconcile message
42954 Sep 22 23:22:20.861 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(344), op: ExtentFlush { repair_id: ReconciliationId(344), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42955 Sep 22 23:22:20.861 INFO [1] client ExtentFlush { repair_id: ReconciliationId(344), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42956 Sep 22 23:22:20.861 INFO [2] received reconcile message
42957 Sep 22 23:22:20.861 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(344), op: ExtentFlush { repair_id: ReconciliationId(344), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42958 Sep 22 23:22:20.861 INFO [2] client ExtentFlush { repair_id: ReconciliationId(344), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42959 Sep 22 23:22:20.862 DEBG 344 Flush extent 46 with f:2 g:2
42960 Sep 22 23:22:20.862 DEBG Flush just extent 46 with f:2 and g:2
42961 Sep 22 23:22:20.862 DEBG [1] It's time to notify for 344
42962 Sep 22 23:22:20.862 INFO Completion from [1] id:344 status:true
42963 Sep 22 23:22:20.862 INFO [345/752] Repair commands completed
42964 Sep 22 23:22:20.862 INFO Pop front: ReconcileIO { id: ReconciliationId(345), op: ExtentClose { repair_id: ReconciliationId(345), extent_id: 46 }, state: ClientData([New, New, New]) }
42965 Sep 22 23:22:20.862 INFO Sent repair work, now wait for resp
42966 Sep 22 23:22:20.862 INFO [0] received reconcile message
42967 Sep 22 23:22:20.862 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(345), op: ExtentClose { repair_id: ReconciliationId(345), extent_id: 46 }, state: ClientData([InProgress, New, New]) }, : downstairs
42968 Sep 22 23:22:20.862 INFO [0] client ExtentClose { repair_id: ReconciliationId(345), extent_id: 46 }
42969 Sep 22 23:22:20.862 INFO [1] received reconcile message
42970 Sep 22 23:22:20.862 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(345), op: ExtentClose { repair_id: ReconciliationId(345), extent_id: 46 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42971 Sep 22 23:22:20.862 INFO [1] client ExtentClose { repair_id: ReconciliationId(345), extent_id: 46 }
42972 Sep 22 23:22:20.862 INFO [2] received reconcile message
42973 Sep 22 23:22:20.862 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(345), op: ExtentClose { repair_id: ReconciliationId(345), extent_id: 46 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42974 Sep 22 23:22:20.862 INFO [2] client ExtentClose { repair_id: ReconciliationId(345), extent_id: 46 }
42975 Sep 22 23:22:20.862 DEBG 345 Close extent 46
42976 Sep 22 23:22:20.862 DEBG 345 Close extent 46
42977 Sep 22 23:22:20.863 DEBG 345 Close extent 46
42978 Sep 22 23:22:20.863 DEBG [2] It's time to notify for 345
42979 Sep 22 23:22:20.863 INFO Completion from [2] id:345 status:true
42980 Sep 22 23:22:20.863 INFO [346/752] Repair commands completed
42981 Sep 22 23:22:20.863 INFO Pop front: ReconcileIO { id: ReconciliationId(346), op: ExtentRepair { repair_id: ReconciliationId(346), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42982 Sep 22 23:22:20.863 INFO Sent repair work, now wait for resp
42983 Sep 22 23:22:20.863 INFO [0] received reconcile message
42984 Sep 22 23:22:20.863 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(346), op: ExtentRepair { repair_id: ReconciliationId(346), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42985 Sep 22 23:22:20.863 INFO [0] client ExtentRepair { repair_id: ReconciliationId(346), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42986 Sep 22 23:22:20.863 INFO [0] Sending repair request ReconciliationId(346)
42987 Sep 22 23:22:20.863 INFO [1] received reconcile message
42988 Sep 22 23:22:20.863 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(346), op: ExtentRepair { repair_id: ReconciliationId(346), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42989 Sep 22 23:22:20.863 INFO [1] client ExtentRepair { repair_id: ReconciliationId(346), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42990 Sep 22 23:22:20.863 INFO [1] No action required ReconciliationId(346)
42991 Sep 22 23:22:20.863 INFO [2] received reconcile message
42992 Sep 22 23:22:20.863 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(346), op: ExtentRepair { repair_id: ReconciliationId(346), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42993 Sep 22 23:22:20.864 INFO [2] client ExtentRepair { repair_id: ReconciliationId(346), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
42994 Sep 22 23:22:20.864 INFO [2] No action required ReconciliationId(346)
42995 Sep 22 23:22:20.864 DEBG 346 Repair extent 46 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
42996 Sep 22 23:22:20.864 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/02E.copy"
42997 Sep 22 23:22:20.930 INFO accepted connection, remote_addr: 127.0.0.1:62231, local_addr: 127.0.0.1:52864, task: repair
42998 Sep 22 23:22:20.930 TRCE incoming request, uri: /extent/46/files, method: GET, req_id: 52fa3f60-9101-408a-ae8c-2e8375d14443, remote_addr: 127.0.0.1:62231, local_addr: 127.0.0.1:52864, task: repair
42999 Sep 22 23:22:20.930 INFO request completed, latency_us: 276, response_code: 200, uri: /extent/46/files, method: GET, req_id: 52fa3f60-9101-408a-ae8c-2e8375d14443, remote_addr: 127.0.0.1:62231, local_addr: 127.0.0.1:52864, task: repair
43000 Sep 22 23:22:20.930 INFO eid:46 Found repair files: ["02E", "02E.db"]
43001 Sep 22 23:22:20.931 TRCE incoming request, uri: /newextent/46/data, method: GET, req_id: d134f810-e211-4f22-a384-f33489afcd36, remote_addr: 127.0.0.1:62231, local_addr: 127.0.0.1:52864, task: repair
43002 Sep 22 23:22:20.931 INFO request completed, latency_us: 342, response_code: 200, uri: /newextent/46/data, method: GET, req_id: d134f810-e211-4f22-a384-f33489afcd36, remote_addr: 127.0.0.1:62231, local_addr: 127.0.0.1:52864, task: repair
43003 Sep 22 23:22:20.936 TRCE incoming request, uri: /newextent/46/db, method: GET, req_id: 31ff714d-d9b8-425c-8cf8-0c0b8301bc57, remote_addr: 127.0.0.1:62231, local_addr: 127.0.0.1:52864, task: repair
43004 Sep 22 23:22:20.936 INFO request completed, latency_us: 306, response_code: 200, uri: /newextent/46/db, method: GET, req_id: 31ff714d-d9b8-425c-8cf8-0c0b8301bc57, remote_addr: 127.0.0.1:62231, local_addr: 127.0.0.1:52864, task: repair
43005 Sep 22 23:22:20.938 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/02E.copy" to "/tmp/downstairs-zrMnlo6G/00/000/02E.replace"
43006 Sep 22 23:22:20.938 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43007 Sep 22 23:22:20.939 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/02E.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
43008 Sep 22 23:22:20.939 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/02E"
43009 Sep 22 23:22:20.939 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/02E.db"
43010 Sep 22 23:22:20.939 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43011 Sep 22 23:22:20.939 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/02E.replace" to "/tmp/downstairs-zrMnlo6G/00/000/02E.completed"
43012 Sep 22 23:22:20.939 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43013 Sep 22 23:22:20.939 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43014 Sep 22 23:22:20.940 DEBG [0] It's time to notify for 346
43015 Sep 22 23:22:20.940 INFO Completion from [0] id:346 status:true
43016 Sep 22 23:22:20.940 INFO [347/752] Repair commands completed
43017 Sep 22 23:22:20.940 INFO Pop front: ReconcileIO { id: ReconciliationId(347), op: ExtentReopen { repair_id: ReconciliationId(347), extent_id: 46 }, state: ClientData([New, New, New]) }
43018 Sep 22 23:22:20.940 INFO Sent repair work, now wait for resp
43019 Sep 22 23:22:20.940 INFO [0] received reconcile message
43020 Sep 22 23:22:20.940 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(347), op: ExtentReopen { repair_id: ReconciliationId(347), extent_id: 46 }, state: ClientData([InProgress, New, New]) }, : downstairs
43021 Sep 22 23:22:20.940 INFO [0] client ExtentReopen { repair_id: ReconciliationId(347), extent_id: 46 }
43022 Sep 22 23:22:20.940 INFO [1] received reconcile message
43023 Sep 22 23:22:20.940 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(347), op: ExtentReopen { repair_id: ReconciliationId(347), extent_id: 46 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43024 Sep 22 23:22:20.940 INFO [1] client ExtentReopen { repair_id: ReconciliationId(347), extent_id: 46 }
43025 Sep 22 23:22:20.940 INFO [2] received reconcile message
43026 Sep 22 23:22:20.940 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(347), op: ExtentReopen { repair_id: ReconciliationId(347), extent_id: 46 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43027 Sep 22 23:22:20.940 INFO [2] client ExtentReopen { repair_id: ReconciliationId(347), extent_id: 46 }
43028 Sep 22 23:22:20.940 DEBG 347 Reopen extent 46
43029 Sep 22 23:22:20.941 DEBG 347 Reopen extent 46
43030 Sep 22 23:22:20.942 DEBG 347 Reopen extent 46
43031 Sep 22 23:22:20.942 DEBG [2] It's time to notify for 347
43032 Sep 22 23:22:20.942 INFO Completion from [2] id:347 status:true
43033 Sep 22 23:22:20.942 INFO [348/752] Repair commands completed
43034 Sep 22 23:22:20.942 INFO Pop front: ReconcileIO { id: ReconciliationId(348), op: ExtentFlush { repair_id: ReconciliationId(348), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43035 Sep 22 23:22:20.942 INFO Sent repair work, now wait for resp
43036 Sep 22 23:22:20.943 INFO [0] received reconcile message
43037 Sep 22 23:22:20.943 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(348), op: ExtentFlush { repair_id: ReconciliationId(348), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43038 Sep 22 23:22:20.943 INFO [0] client ExtentFlush { repair_id: ReconciliationId(348), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43039 Sep 22 23:22:20.943 INFO [1] received reconcile message
43040 Sep 22 23:22:20.943 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(348), op: ExtentFlush { repair_id: ReconciliationId(348), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43041 Sep 22 23:22:20.943 INFO [1] client ExtentFlush { repair_id: ReconciliationId(348), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43042 Sep 22 23:22:20.943 INFO [2] received reconcile message
43043 Sep 22 23:22:20.943 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(348), op: ExtentFlush { repair_id: ReconciliationId(348), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43044 Sep 22 23:22:20.943 INFO [2] client ExtentFlush { repair_id: ReconciliationId(348), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43045 Sep 22 23:22:20.943 DEBG 348 Flush extent 127 with f:2 g:2
43046 Sep 22 23:22:20.943 DEBG Flush just extent 127 with f:2 and g:2
43047 Sep 22 23:22:20.943 DEBG [1] It's time to notify for 348
43048 Sep 22 23:22:20.943 INFO Completion from [1] id:348 status:true
43049 Sep 22 23:22:20.943 INFO [349/752] Repair commands completed
43050 Sep 22 23:22:20.943 INFO Pop front: ReconcileIO { id: ReconciliationId(349), op: ExtentClose { repair_id: ReconciliationId(349), extent_id: 127 }, state: ClientData([New, New, New]) }
43051 Sep 22 23:22:20.943 INFO Sent repair work, now wait for resp
43052 Sep 22 23:22:20.943 INFO [0] received reconcile message
43053 Sep 22 23:22:20.943 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(349), op: ExtentClose { repair_id: ReconciliationId(349), extent_id: 127 }, state: ClientData([InProgress, New, New]) }, : downstairs
43054 Sep 22 23:22:20.943 INFO [0] client ExtentClose { repair_id: ReconciliationId(349), extent_id: 127 }
43055 Sep 22 23:22:20.943 INFO [1] received reconcile message
43056 Sep 22 23:22:20.943 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(349), op: ExtentClose { repair_id: ReconciliationId(349), extent_id: 127 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43057 Sep 22 23:22:20.943 INFO [1] client ExtentClose { repair_id: ReconciliationId(349), extent_id: 127 }
43058 Sep 22 23:22:20.943 INFO [2] received reconcile message
43059 Sep 22 23:22:20.943 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(349), op: ExtentClose { repair_id: ReconciliationId(349), extent_id: 127 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43060 Sep 22 23:22:20.943 INFO [2] client ExtentClose { repair_id: ReconciliationId(349), extent_id: 127 }
43061 Sep 22 23:22:20.943 DEBG 349 Close extent 127
43062 Sep 22 23:22:20.944 DEBG 349 Close extent 127
43063 Sep 22 23:22:20.944 DEBG 349 Close extent 127
43064 Sep 22 23:22:20.944 DEBG [2] It's time to notify for 349
43065 Sep 22 23:22:20.944 INFO Completion from [2] id:349 status:true
43066 Sep 22 23:22:20.944 INFO [350/752] Repair commands completed
43067 Sep 22 23:22:20.944 INFO Pop front: ReconcileIO { id: ReconciliationId(350), op: ExtentRepair { repair_id: ReconciliationId(350), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43068 Sep 22 23:22:20.945 INFO Sent repair work, now wait for resp
43069 Sep 22 23:22:20.945 INFO [0] received reconcile message
43070 Sep 22 23:22:20.945 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(350), op: ExtentRepair { repair_id: ReconciliationId(350), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43071 Sep 22 23:22:20.945 INFO [0] client ExtentRepair { repair_id: ReconciliationId(350), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43072 Sep 22 23:22:20.945 INFO [0] Sending repair request ReconciliationId(350)
43073 Sep 22 23:22:20.945 INFO [1] received reconcile message
43074 Sep 22 23:22:20.945 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(350), op: ExtentRepair { repair_id: ReconciliationId(350), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43075 Sep 22 23:22:20.945 INFO [1] client ExtentRepair { repair_id: ReconciliationId(350), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43076 Sep 22 23:22:20.945 INFO [1] No action required ReconciliationId(350)
43077 Sep 22 23:22:20.945 INFO [2] received reconcile message
43078 Sep 22 23:22:20.945 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(350), op: ExtentRepair { repair_id: ReconciliationId(350), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43079 Sep 22 23:22:20.945 INFO [2] client ExtentRepair { repair_id: ReconciliationId(350), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43080 Sep 22 23:22:20.945 INFO [2] No action required ReconciliationId(350)
43081 Sep 22 23:22:20.945 DEBG 350 Repair extent 127 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
43082 Sep 22 23:22:20.945 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/07F.copy"
43083 Sep 22 23:22:21.011 INFO accepted connection, remote_addr: 127.0.0.1:56439, local_addr: 127.0.0.1:52864, task: repair
43084 Sep 22 23:22:21.012 TRCE incoming request, uri: /extent/127/files, method: GET, req_id: 46989101-851b-4e66-b25a-724ac843852a, remote_addr: 127.0.0.1:56439, local_addr: 127.0.0.1:52864, task: repair
43085 Sep 22 23:22:21.012 INFO request completed, latency_us: 296, response_code: 200, uri: /extent/127/files, method: GET, req_id: 46989101-851b-4e66-b25a-724ac843852a, remote_addr: 127.0.0.1:56439, local_addr: 127.0.0.1:52864, task: repair
43086 Sep 22 23:22:21.012 INFO eid:127 Found repair files: ["07F", "07F.db"]
43087 Sep 22 23:22:21.013 TRCE incoming request, uri: /newextent/127/data, method: GET, req_id: eed44035-bfdb-46b8-85e3-330fb507ba9a, remote_addr: 127.0.0.1:56439, local_addr: 127.0.0.1:52864, task: repair
43088 Sep 22 23:22:21.013 INFO request completed, latency_us: 359, response_code: 200, uri: /newextent/127/data, method: GET, req_id: eed44035-bfdb-46b8-85e3-330fb507ba9a, remote_addr: 127.0.0.1:56439, local_addr: 127.0.0.1:52864, task: repair
43089 Sep 22 23:22:21.018 TRCE incoming request, uri: /newextent/127/db, method: GET, req_id: 7260fb59-b8f9-4d8c-84e3-237cc7565e46, remote_addr: 127.0.0.1:56439, local_addr: 127.0.0.1:52864, task: repair
43090 Sep 22 23:22:21.018 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/127/db, method: GET, req_id: 7260fb59-b8f9-4d8c-84e3-237cc7565e46, remote_addr: 127.0.0.1:56439, local_addr: 127.0.0.1:52864, task: repair
43091 Sep 22 23:22:21.019 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/07F.copy" to "/tmp/downstairs-zrMnlo6G/00/000/07F.replace"
43092 Sep 22 23:22:21.019 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43093 Sep 22 23:22:21.020 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/07F.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
43094 Sep 22 23:22:21.021 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/07F"
43095 Sep 22 23:22:21.021 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/07F.db"
43096 Sep 22 23:22:21.021 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43097 Sep 22 23:22:21.021 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/07F.replace" to "/tmp/downstairs-zrMnlo6G/00/000/07F.completed"
43098 Sep 22 23:22:21.021 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43099 Sep 22 23:22:21.021 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43100 Sep 22 23:22:21.021 DEBG [0] It's time to notify for 350
43101 Sep 22 23:22:21.021 INFO Completion from [0] id:350 status:true
43102 Sep 22 23:22:21.021 INFO [351/752] Repair commands completed
43103 Sep 22 23:22:21.021 INFO Pop front: ReconcileIO { id: ReconciliationId(351), op: ExtentReopen { repair_id: ReconciliationId(351), extent_id: 127 }, state: ClientData([New, New, New]) }
43104 Sep 22 23:22:21.021 INFO Sent repair work, now wait for resp
43105 Sep 22 23:22:21.021 INFO [0] received reconcile message
43106 Sep 22 23:22:21.021 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(351), op: ExtentReopen { repair_id: ReconciliationId(351), extent_id: 127 }, state: ClientData([InProgress, New, New]) }, : downstairs
43107 Sep 22 23:22:21.022 INFO [0] client ExtentReopen { repair_id: ReconciliationId(351), extent_id: 127 }
43108 Sep 22 23:22:21.022 INFO [1] received reconcile message
43109 Sep 22 23:22:21.022 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(351), op: ExtentReopen { repair_id: ReconciliationId(351), extent_id: 127 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43110 Sep 22 23:22:21.022 INFO [1] client ExtentReopen { repair_id: ReconciliationId(351), extent_id: 127 }
43111 Sep 22 23:22:21.022 INFO [2] received reconcile message
43112 Sep 22 23:22:21.022 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(351), op: ExtentReopen { repair_id: ReconciliationId(351), extent_id: 127 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43113 Sep 22 23:22:21.022 INFO [2] client ExtentReopen { repair_id: ReconciliationId(351), extent_id: 127 }
43114 Sep 22 23:22:21.022 DEBG 351 Reopen extent 127
43115 Sep 22 23:22:21.023 DEBG 351 Reopen extent 127
43116 Sep 22 23:22:21.023 DEBG 351 Reopen extent 127
43117 Sep 22 23:22:21.024 DEBG [2] It's time to notify for 351
43118 Sep 22 23:22:21.024 INFO Completion from [2] id:351 status:true
43119 Sep 22 23:22:21.024 INFO [352/752] Repair commands completed
43120 Sep 22 23:22:21.024 INFO Pop front: ReconcileIO { id: ReconciliationId(352), op: ExtentFlush { repair_id: ReconciliationId(352), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43121 Sep 22 23:22:21.024 INFO Sent repair work, now wait for resp
43122 Sep 22 23:22:21.024 INFO [0] received reconcile message
43123 Sep 22 23:22:21.024 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(352), op: ExtentFlush { repair_id: ReconciliationId(352), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43124 Sep 22 23:22:21.024 INFO [0] client ExtentFlush { repair_id: ReconciliationId(352), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43125 Sep 22 23:22:21.024 INFO [1] received reconcile message
43126 Sep 22 23:22:21.024 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(352), op: ExtentFlush { repair_id: ReconciliationId(352), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43127 Sep 22 23:22:21.024 INFO [1] client ExtentFlush { repair_id: ReconciliationId(352), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43128 Sep 22 23:22:21.024 INFO [2] received reconcile message
43129 Sep 22 23:22:21.024 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(352), op: ExtentFlush { repair_id: ReconciliationId(352), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43130 Sep 22 23:22:21.024 INFO [2] client ExtentFlush { repair_id: ReconciliationId(352), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43131 Sep 22 23:22:21.024 DEBG 352 Flush extent 99 with f:2 g:2
43132 Sep 22 23:22:21.024 DEBG Flush just extent 99 with f:2 and g:2
43133 Sep 22 23:22:21.024 DEBG [1] It's time to notify for 352
43134 Sep 22 23:22:21.024 INFO Completion from [1] id:352 status:true
43135 Sep 22 23:22:21.024 INFO [353/752] Repair commands completed
43136 Sep 22 23:22:21.024 INFO Pop front: ReconcileIO { id: ReconciliationId(353), op: ExtentClose { repair_id: ReconciliationId(353), extent_id: 99 }, state: ClientData([New, New, New]) }
43137 Sep 22 23:22:21.025 INFO Sent repair work, now wait for resp
43138 Sep 22 23:22:21.025 INFO [0] received reconcile message
43139 Sep 22 23:22:21.025 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(353), op: ExtentClose { repair_id: ReconciliationId(353), extent_id: 99 }, state: ClientData([InProgress, New, New]) }, : downstairs
43140 Sep 22 23:22:21.025 INFO [0] client ExtentClose { repair_id: ReconciliationId(353), extent_id: 99 }
43141 Sep 22 23:22:21.025 INFO [1] received reconcile message
43142 Sep 22 23:22:21.025 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(353), op: ExtentClose { repair_id: ReconciliationId(353), extent_id: 99 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43143 Sep 22 23:22:21.025 INFO [1] client ExtentClose { repair_id: ReconciliationId(353), extent_id: 99 }
43144 Sep 22 23:22:21.025 INFO [2] received reconcile message
43145 Sep 22 23:22:21.025 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(353), op: ExtentClose { repair_id: ReconciliationId(353), extent_id: 99 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43146 Sep 22 23:22:21.025 INFO [2] client ExtentClose { repair_id: ReconciliationId(353), extent_id: 99 }
43147 Sep 22 23:22:21.025 DEBG 353 Close extent 99
43148 Sep 22 23:22:21.025 DEBG 353 Close extent 99
43149 Sep 22 23:22:21.025 DEBG 353 Close extent 99
43150 Sep 22 23:22:21.026 DEBG [2] It's time to notify for 353
43151 Sep 22 23:22:21.026 INFO Completion from [2] id:353 status:true
43152 Sep 22 23:22:21.026 INFO [354/752] Repair commands completed
43153 Sep 22 23:22:21.026 INFO Pop front: ReconcileIO { id: ReconciliationId(354), op: ExtentRepair { repair_id: ReconciliationId(354), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43154 Sep 22 23:22:21.026 INFO Sent repair work, now wait for resp
43155 Sep 22 23:22:21.026 INFO [0] received reconcile message
43156 Sep 22 23:22:21.026 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(354), op: ExtentRepair { repair_id: ReconciliationId(354), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43157 Sep 22 23:22:21.026 INFO [0] client ExtentRepair { repair_id: ReconciliationId(354), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43158 Sep 22 23:22:21.026 INFO [0] Sending repair request ReconciliationId(354)
43159 Sep 22 23:22:21.026 INFO [1] received reconcile message
43160 Sep 22 23:22:21.026 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(354), op: ExtentRepair { repair_id: ReconciliationId(354), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43161 Sep 22 23:22:21.026 INFO [1] client ExtentRepair { repair_id: ReconciliationId(354), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43162 Sep 22 23:22:21.026 INFO [1] No action required ReconciliationId(354)
43163 Sep 22 23:22:21.026 INFO [2] received reconcile message
43164 Sep 22 23:22:21.026 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(354), op: ExtentRepair { repair_id: ReconciliationId(354), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43165 Sep 22 23:22:21.026 INFO [2] client ExtentRepair { repair_id: ReconciliationId(354), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43166 Sep 22 23:22:21.026 INFO [2] No action required ReconciliationId(354)
43167 Sep 22 23:22:21.026 DEBG 354 Repair extent 99 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
43168 Sep 22 23:22:21.026 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/063.copy"
43169 Sep 22 23:22:21.092 INFO accepted connection, remote_addr: 127.0.0.1:47636, local_addr: 127.0.0.1:52864, task: repair
43170 Sep 22 23:22:21.092 TRCE incoming request, uri: /extent/99/files, method: GET, req_id: fd1881d2-5a5b-43ae-aff7-5d1b019d18d8, remote_addr: 127.0.0.1:47636, local_addr: 127.0.0.1:52864, task: repair
43171 Sep 22 23:22:21.093 INFO request completed, latency_us: 223, response_code: 200, uri: /extent/99/files, method: GET, req_id: fd1881d2-5a5b-43ae-aff7-5d1b019d18d8, remote_addr: 127.0.0.1:47636, local_addr: 127.0.0.1:52864, task: repair
43172 Sep 22 23:22:21.093 INFO eid:99 Found repair files: ["063", "063.db"]
43173 Sep 22 23:22:21.093 TRCE incoming request, uri: /newextent/99/data, method: GET, req_id: e51b3f22-3657-4d69-a52c-f88e0b77ad75, remote_addr: 127.0.0.1:47636, local_addr: 127.0.0.1:52864, task: repair
43174 Sep 22 23:22:21.094 INFO request completed, latency_us: 318, response_code: 200, uri: /newextent/99/data, method: GET, req_id: e51b3f22-3657-4d69-a52c-f88e0b77ad75, remote_addr: 127.0.0.1:47636, local_addr: 127.0.0.1:52864, task: repair
43175 Sep 22 23:22:21.099 TRCE incoming request, uri: /newextent/99/db, method: GET, req_id: 9f83f553-3592-41b1-be69-c50140905ff2, remote_addr: 127.0.0.1:47636, local_addr: 127.0.0.1:52864, task: repair
43176 Sep 22 23:22:21.099 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/99/db, method: GET, req_id: 9f83f553-3592-41b1-be69-c50140905ff2, remote_addr: 127.0.0.1:47636, local_addr: 127.0.0.1:52864, task: repair
43177 Sep 22 23:22:21.100 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/063.copy" to "/tmp/downstairs-zrMnlo6G/00/000/063.replace"
43178 Sep 22 23:22:21.100 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43179 Sep 22 23:22:21.101 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/063.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
43180 Sep 22 23:22:21.101 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/063"
43181 Sep 22 23:22:21.101 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/063.db"
43182 Sep 22 23:22:21.101 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43183 Sep 22 23:22:21.101 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/063.replace" to "/tmp/downstairs-zrMnlo6G/00/000/063.completed"
43184 Sep 22 23:22:21.102 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43185 Sep 22 23:22:21.102 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43186 Sep 22 23:22:21.102 DEBG [0] It's time to notify for 354
43187 Sep 22 23:22:21.102 INFO Completion from [0] id:354 status:true
43188 Sep 22 23:22:21.102 INFO [355/752] Repair commands completed
43189 Sep 22 23:22:21.102 INFO Pop front: ReconcileIO { id: ReconciliationId(355), op: ExtentReopen { repair_id: ReconciliationId(355), extent_id: 99 }, state: ClientData([New, New, New]) }
43190 Sep 22 23:22:21.102 INFO Sent repair work, now wait for resp
43191 Sep 22 23:22:21.102 INFO [0] received reconcile message
43192 Sep 22 23:22:21.102 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(355), op: ExtentReopen { repair_id: ReconciliationId(355), extent_id: 99 }, state: ClientData([InProgress, New, New]) }, : downstairs
43193 Sep 22 23:22:21.102 INFO [0] client ExtentReopen { repair_id: ReconciliationId(355), extent_id: 99 }
43194 Sep 22 23:22:21.102 INFO [1] received reconcile message
43195 Sep 22 23:22:21.102 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(355), op: ExtentReopen { repair_id: ReconciliationId(355), extent_id: 99 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43196 Sep 22 23:22:21.102 INFO [1] client ExtentReopen { repair_id: ReconciliationId(355), extent_id: 99 }
43197 Sep 22 23:22:21.102 INFO [2] received reconcile message
43198 Sep 22 23:22:21.102 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(355), op: ExtentReopen { repair_id: ReconciliationId(355), extent_id: 99 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43199 Sep 22 23:22:21.102 INFO [2] client ExtentReopen { repair_id: ReconciliationId(355), extent_id: 99 }
43200 Sep 22 23:22:21.102 DEBG 355 Reopen extent 99
43201 Sep 22 23:22:21.103 DEBG 355 Reopen extent 99
43202 Sep 22 23:22:21.104 DEBG 355 Reopen extent 99
43203 Sep 22 23:22:21.104 DEBG [2] It's time to notify for 355
43204 Sep 22 23:22:21.104 INFO Completion from [2] id:355 status:true
43205 Sep 22 23:22:21.104 INFO [356/752] Repair commands completed
43206 Sep 22 23:22:21.104 INFO Pop front: ReconcileIO { id: ReconciliationId(356), op: ExtentFlush { repair_id: ReconciliationId(356), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43207 Sep 22 23:22:21.104 INFO Sent repair work, now wait for resp
43208 Sep 22 23:22:21.104 INFO [0] received reconcile message
43209 Sep 22 23:22:21.104 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(356), op: ExtentFlush { repair_id: ReconciliationId(356), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43210 Sep 22 23:22:21.104 INFO [0] client ExtentFlush { repair_id: ReconciliationId(356), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43211 Sep 22 23:22:21.104 INFO [1] received reconcile message
43212 Sep 22 23:22:21.104 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(356), op: ExtentFlush { repair_id: ReconciliationId(356), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43213 Sep 22 23:22:21.104 INFO [1] client ExtentFlush { repair_id: ReconciliationId(356), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43214 Sep 22 23:22:21.105 INFO [2] received reconcile message
43215 Sep 22 23:22:21.105 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(356), op: ExtentFlush { repair_id: ReconciliationId(356), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43216 Sep 22 23:22:21.105 INFO [2] client ExtentFlush { repair_id: ReconciliationId(356), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43217 Sep 22 23:22:21.105 DEBG 356 Flush extent 12 with f:2 g:2
43218 Sep 22 23:22:21.105 DEBG Flush just extent 12 with f:2 and g:2
43219 Sep 22 23:22:21.105 DEBG [1] It's time to notify for 356
43220 Sep 22 23:22:21.105 INFO Completion from [1] id:356 status:true
43221 Sep 22 23:22:21.105 INFO [357/752] Repair commands completed
43222 Sep 22 23:22:21.105 INFO Pop front: ReconcileIO { id: ReconciliationId(357), op: ExtentClose { repair_id: ReconciliationId(357), extent_id: 12 }, state: ClientData([New, New, New]) }
43223 Sep 22 23:22:21.105 INFO Sent repair work, now wait for resp
43224 Sep 22 23:22:21.105 INFO [0] received reconcile message
43225 Sep 22 23:22:21.105 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(357), op: ExtentClose { repair_id: ReconciliationId(357), extent_id: 12 }, state: ClientData([InProgress, New, New]) }, : downstairs
43226 Sep 22 23:22:21.105 INFO [0] client ExtentClose { repair_id: ReconciliationId(357), extent_id: 12 }
43227 Sep 22 23:22:21.105 INFO [1] received reconcile message
43228 Sep 22 23:22:21.105 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(357), op: ExtentClose { repair_id: ReconciliationId(357), extent_id: 12 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43229 Sep 22 23:22:21.105 INFO [1] client ExtentClose { repair_id: ReconciliationId(357), extent_id: 12 }
43230 Sep 22 23:22:21.105 INFO [2] received reconcile message
43231 Sep 22 23:22:21.105 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(357), op: ExtentClose { repair_id: ReconciliationId(357), extent_id: 12 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43232 Sep 22 23:22:21.105 INFO [2] client ExtentClose { repair_id: ReconciliationId(357), extent_id: 12 }
43233 Sep 22 23:22:21.105 DEBG 357 Close extent 12
43234 Sep 22 23:22:21.105 DEBG 357 Close extent 12
43235 Sep 22 23:22:21.106 DEBG 357 Close extent 12
43236 Sep 22 23:22:21.106 DEBG [2] It's time to notify for 357
43237 Sep 22 23:22:21.106 INFO Completion from [2] id:357 status:true
43238 Sep 22 23:22:21.106 INFO [358/752] Repair commands completed
43239 Sep 22 23:22:21.106 INFO Pop front: ReconcileIO { id: ReconciliationId(358), op: ExtentRepair { repair_id: ReconciliationId(358), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43240 Sep 22 23:22:21.106 INFO Sent repair work, now wait for resp
43241 Sep 22 23:22:21.106 INFO [0] received reconcile message
43242 Sep 22 23:22:21.106 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(358), op: ExtentRepair { repair_id: ReconciliationId(358), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43243 Sep 22 23:22:21.106 INFO [0] client ExtentRepair { repair_id: ReconciliationId(358), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43244 Sep 22 23:22:21.106 INFO [0] Sending repair request ReconciliationId(358)
43245 Sep 22 23:22:21.106 INFO [1] received reconcile message
43246 Sep 22 23:22:21.106 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(358), op: ExtentRepair { repair_id: ReconciliationId(358), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43247 Sep 22 23:22:21.107 INFO [1] client ExtentRepair { repair_id: ReconciliationId(358), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43248 Sep 22 23:22:21.107 INFO [1] No action required ReconciliationId(358)
43249 Sep 22 23:22:21.107 INFO [2] received reconcile message
43250 Sep 22 23:22:21.107 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(358), op: ExtentRepair { repair_id: ReconciliationId(358), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43251 Sep 22 23:22:21.107 INFO [2] client ExtentRepair { repair_id: ReconciliationId(358), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43252 Sep 22 23:22:21.107 INFO [2] No action required ReconciliationId(358)
43253 Sep 22 23:22:21.107 DEBG 358 Repair extent 12 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
43254 Sep 22 23:22:21.107 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/00C.copy"
43255 Sep 22 23:22:21.172 INFO accepted connection, remote_addr: 127.0.0.1:64228, local_addr: 127.0.0.1:52864, task: repair
43256 Sep 22 23:22:21.173 TRCE incoming request, uri: /extent/12/files, method: GET, req_id: b3c02c20-cc84-46c0-9438-3e2db2f72443, remote_addr: 127.0.0.1:64228, local_addr: 127.0.0.1:52864, task: repair
43257 Sep 22 23:22:21.173 INFO request completed, latency_us: 256, response_code: 200, uri: /extent/12/files, method: GET, req_id: b3c02c20-cc84-46c0-9438-3e2db2f72443, remote_addr: 127.0.0.1:64228, local_addr: 127.0.0.1:52864, task: repair
43258 Sep 22 23:22:21.173 INFO eid:12 Found repair files: ["00C", "00C.db"]
43259 Sep 22 23:22:21.174 TRCE incoming request, uri: /newextent/12/data, method: GET, req_id: 6fe9f463-2599-48c2-9b27-82af677233e0, remote_addr: 127.0.0.1:64228, local_addr: 127.0.0.1:52864, task: repair
43260 Sep 22 23:22:21.174 INFO request completed, latency_us: 342, response_code: 200, uri: /newextent/12/data, method: GET, req_id: 6fe9f463-2599-48c2-9b27-82af677233e0, remote_addr: 127.0.0.1:64228, local_addr: 127.0.0.1:52864, task: repair
43261 Sep 22 23:22:21.179 TRCE incoming request, uri: /newextent/12/db, method: GET, req_id: 8e04cb31-5a20-4989-8458-bc37e9e1d510, remote_addr: 127.0.0.1:64228, local_addr: 127.0.0.1:52864, task: repair
43262 Sep 22 23:22:21.179 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/12/db, method: GET, req_id: 8e04cb31-5a20-4989-8458-bc37e9e1d510, remote_addr: 127.0.0.1:64228, local_addr: 127.0.0.1:52864, task: repair
43263 Sep 22 23:22:21.180 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/00C.copy" to "/tmp/downstairs-zrMnlo6G/00/000/00C.replace"
43264 Sep 22 23:22:21.180 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43265 Sep 22 23:22:21.182 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/00C.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
43266 Sep 22 23:22:21.182 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/00C"
43267 Sep 22 23:22:21.182 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/00C.db"
43268 Sep 22 23:22:21.182 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43269 Sep 22 23:22:21.182 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/00C.replace" to "/tmp/downstairs-zrMnlo6G/00/000/00C.completed"
43270 Sep 22 23:22:21.182 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43271 Sep 22 23:22:21.182 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43272 Sep 22 23:22:21.182 DEBG [0] It's time to notify for 358
43273 Sep 22 23:22:21.182 INFO Completion from [0] id:358 status:true
43274 Sep 22 23:22:21.182 INFO [359/752] Repair commands completed
43275 Sep 22 23:22:21.182 INFO Pop front: ReconcileIO { id: ReconciliationId(359), op: ExtentReopen { repair_id: ReconciliationId(359), extent_id: 12 }, state: ClientData([New, New, New]) }
43276 Sep 22 23:22:21.182 INFO Sent repair work, now wait for resp
43277 Sep 22 23:22:21.183 INFO [0] received reconcile message
43278 Sep 22 23:22:21.183 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(359), op: ExtentReopen { repair_id: ReconciliationId(359), extent_id: 12 }, state: ClientData([InProgress, New, New]) }, : downstairs
43279 Sep 22 23:22:21.183 INFO [0] client ExtentReopen { repair_id: ReconciliationId(359), extent_id: 12 }
43280 Sep 22 23:22:21.183 INFO [1] received reconcile message
43281 Sep 22 23:22:21.183 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(359), op: ExtentReopen { repair_id: ReconciliationId(359), extent_id: 12 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43282 Sep 22 23:22:21.183 INFO [1] client ExtentReopen { repair_id: ReconciliationId(359), extent_id: 12 }
43283 Sep 22 23:22:21.183 INFO [2] received reconcile message
43284 Sep 22 23:22:21.183 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(359), op: ExtentReopen { repair_id: ReconciliationId(359), extent_id: 12 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43285 Sep 22 23:22:21.183 INFO [2] client ExtentReopen { repair_id: ReconciliationId(359), extent_id: 12 }
43286 Sep 22 23:22:21.183 DEBG 359 Reopen extent 12
43287 Sep 22 23:22:21.184 DEBG 359 Reopen extent 12
43288 Sep 22 23:22:21.184 DEBG 359 Reopen extent 12
43289 Sep 22 23:22:21.185 DEBG [2] It's time to notify for 359
43290 Sep 22 23:22:21.185 INFO Completion from [2] id:359 status:true
43291 Sep 22 23:22:21.185 INFO [360/752] Repair commands completed
43292 Sep 22 23:22:21.185 INFO Pop front: ReconcileIO { id: ReconciliationId(360), op: ExtentFlush { repair_id: ReconciliationId(360), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43293 Sep 22 23:22:21.185 INFO Sent repair work, now wait for resp
43294 Sep 22 23:22:21.185 INFO [0] received reconcile message
43295 Sep 22 23:22:21.185 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(360), op: ExtentFlush { repair_id: ReconciliationId(360), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43296 Sep 22 23:22:21.185 INFO [0] client ExtentFlush { repair_id: ReconciliationId(360), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43297 Sep 22 23:22:21.185 INFO [1] received reconcile message
43298 Sep 22 23:22:21.185 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(360), op: ExtentFlush { repair_id: ReconciliationId(360), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43299 Sep 22 23:22:21.185 INFO [1] client ExtentFlush { repair_id: ReconciliationId(360), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43300 Sep 22 23:22:21.185 INFO [2] received reconcile message
43301 Sep 22 23:22:21.185 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(360), op: ExtentFlush { repair_id: ReconciliationId(360), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43302 Sep 22 23:22:21.185 INFO [2] client ExtentFlush { repair_id: ReconciliationId(360), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43303 Sep 22 23:22:21.185 DEBG 360 Flush extent 34 with f:2 g:2
43304 Sep 22 23:22:21.185 DEBG Flush just extent 34 with f:2 and g:2
43305 Sep 22 23:22:21.185 DEBG [1] It's time to notify for 360
43306 Sep 22 23:22:21.185 INFO Completion from [1] id:360 status:true
43307 Sep 22 23:22:21.185 INFO [361/752] Repair commands completed
43308 Sep 22 23:22:21.185 INFO Pop front: ReconcileIO { id: ReconciliationId(361), op: ExtentClose { repair_id: ReconciliationId(361), extent_id: 34 }, state: ClientData([New, New, New]) }
43309 Sep 22 23:22:21.186 INFO Sent repair work, now wait for resp
43310 Sep 22 23:22:21.186 INFO [0] received reconcile message
43311 Sep 22 23:22:21.186 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(361), op: ExtentClose { repair_id: ReconciliationId(361), extent_id: 34 }, state: ClientData([InProgress, New, New]) }, : downstairs
43312 Sep 22 23:22:21.186 INFO [0] client ExtentClose { repair_id: ReconciliationId(361), extent_id: 34 }
43313 Sep 22 23:22:21.186 INFO [1] received reconcile message
43314 Sep 22 23:22:21.186 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(361), op: ExtentClose { repair_id: ReconciliationId(361), extent_id: 34 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43315 Sep 22 23:22:21.186 INFO [1] client ExtentClose { repair_id: ReconciliationId(361), extent_id: 34 }
43316 Sep 22 23:22:21.186 INFO [2] received reconcile message
43317 Sep 22 23:22:21.186 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(361), op: ExtentClose { repair_id: ReconciliationId(361), extent_id: 34 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43318 Sep 22 23:22:21.186 INFO [2] client ExtentClose { repair_id: ReconciliationId(361), extent_id: 34 }
43319 Sep 22 23:22:21.186 DEBG 361 Close extent 34
43320 Sep 22 23:22:21.186 DEBG 361 Close extent 34
43321 Sep 22 23:22:21.186 DEBG 361 Close extent 34
43322 Sep 22 23:22:21.187 DEBG [2] It's time to notify for 361
43323 Sep 22 23:22:21.187 INFO Completion from [2] id:361 status:true
43324 Sep 22 23:22:21.187 INFO [362/752] Repair commands completed
43325 Sep 22 23:22:21.187 INFO Pop front: ReconcileIO { id: ReconciliationId(362), op: ExtentRepair { repair_id: ReconciliationId(362), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43326 Sep 22 23:22:21.187 INFO Sent repair work, now wait for resp
43327 Sep 22 23:22:21.187 INFO [0] received reconcile message
43328 Sep 22 23:22:21.187 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(362), op: ExtentRepair { repair_id: ReconciliationId(362), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43329 Sep 22 23:22:21.187 INFO [0] client ExtentRepair { repair_id: ReconciliationId(362), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43330 Sep 22 23:22:21.187 INFO [0] Sending repair request ReconciliationId(362)
43331 Sep 22 23:22:21.187 INFO [1] received reconcile message
43332 Sep 22 23:22:21.187 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(362), op: ExtentRepair { repair_id: ReconciliationId(362), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43333 Sep 22 23:22:21.187 INFO [1] client ExtentRepair { repair_id: ReconciliationId(362), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43334 Sep 22 23:22:21.187 INFO [1] No action required ReconciliationId(362)
43335 Sep 22 23:22:21.187 INFO [2] received reconcile message
43336 Sep 22 23:22:21.187 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(362), op: ExtentRepair { repair_id: ReconciliationId(362), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43337 Sep 22 23:22:21.187 INFO [2] client ExtentRepair { repair_id: ReconciliationId(362), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43338 Sep 22 23:22:21.187 INFO [2] No action required ReconciliationId(362)
43339 Sep 22 23:22:21.187 DEBG 362 Repair extent 34 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
43340 Sep 22 23:22:21.187 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/022.copy"
43341 Sep 22 23:22:21.236 INFO [lossy] skipping 1083
43342 Sep 22 23:22:21.236 INFO [lossy] skipping 1084
43343 Sep 22 23:22:21.242 DEBG Read :1083 deps:[JobId(1082)] res:true
43344 Sep 22 23:22:21.249 INFO accepted connection, remote_addr: 127.0.0.1:62310, local_addr: 127.0.0.1:52864, task: repair
43345 Sep 22 23:22:21.250 TRCE incoming request, uri: /extent/34/files, method: GET, req_id: 58fdb95c-fe9a-4eb3-a77d-4c3394e80c0c, remote_addr: 127.0.0.1:62310, local_addr: 127.0.0.1:52864, task: repair
43346 Sep 22 23:22:21.250 INFO request completed, latency_us: 223, response_code: 200, uri: /extent/34/files, method: GET, req_id: 58fdb95c-fe9a-4eb3-a77d-4c3394e80c0c, remote_addr: 127.0.0.1:62310, local_addr: 127.0.0.1:52864, task: repair
43347 Sep 22 23:22:21.250 INFO eid:34 Found repair files: ["022", "022.db"]
43348 Sep 22 23:22:21.250 TRCE incoming request, uri: /newextent/34/data, method: GET, req_id: e0fa8d5c-95c4-41e9-a2cb-16171c25b105, remote_addr: 127.0.0.1:62310, local_addr: 127.0.0.1:52864, task: repair
43349 Sep 22 23:22:21.251 INFO request completed, latency_us: 330, response_code: 200, uri: /newextent/34/data, method: GET, req_id: e0fa8d5c-95c4-41e9-a2cb-16171c25b105, remote_addr: 127.0.0.1:62310, local_addr: 127.0.0.1:52864, task: repair
43350 Sep 22 23:22:21.256 TRCE incoming request, uri: /newextent/34/db, method: GET, req_id: 7122403c-e19c-4e1b-9ace-63fb2bc8a134, remote_addr: 127.0.0.1:62310, local_addr: 127.0.0.1:52864, task: repair
43351 Sep 22 23:22:21.256 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/34/db, method: GET, req_id: 7122403c-e19c-4e1b-9ace-63fb2bc8a134, remote_addr: 127.0.0.1:62310, local_addr: 127.0.0.1:52864, task: repair
43352 Sep 22 23:22:21.257 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/022.copy" to "/tmp/downstairs-zrMnlo6G/00/000/022.replace"
43353 Sep 22 23:22:21.257 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43354 Sep 22 23:22:21.258 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/022.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
43355 Sep 22 23:22:21.258 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/022"
43356 Sep 22 23:22:21.258 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/022.db"
43357 Sep 22 23:22:21.258 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43358 Sep 22 23:22:21.258 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/022.replace" to "/tmp/downstairs-zrMnlo6G/00/000/022.completed"
43359 Sep 22 23:22:21.258 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43360 Sep 22 23:22:21.259 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43361 Sep 22 23:22:21.259 DEBG [0] It's time to notify for 362
43362 Sep 22 23:22:21.259 INFO Completion from [0] id:362 status:true
43363 Sep 22 23:22:21.259 INFO [363/752] Repair commands completed
43364 Sep 22 23:22:21.259 INFO Pop front: ReconcileIO { id: ReconciliationId(363), op: ExtentReopen { repair_id: ReconciliationId(363), extent_id: 34 }, state: ClientData([New, New, New]) }
43365 Sep 22 23:22:21.259 INFO Sent repair work, now wait for resp
43366 Sep 22 23:22:21.259 INFO [0] received reconcile message
43367 Sep 22 23:22:21.259 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(363), op: ExtentReopen { repair_id: ReconciliationId(363), extent_id: 34 }, state: ClientData([InProgress, New, New]) }, : downstairs
43368 Sep 22 23:22:21.259 INFO [0] client ExtentReopen { repair_id: ReconciliationId(363), extent_id: 34 }
43369 Sep 22 23:22:21.259 INFO [1] received reconcile message
43370 Sep 22 23:22:21.259 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(363), op: ExtentReopen { repair_id: ReconciliationId(363), extent_id: 34 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43371 Sep 22 23:22:21.259 INFO [1] client ExtentReopen { repair_id: ReconciliationId(363), extent_id: 34 }
43372 Sep 22 23:22:21.259 INFO [2] received reconcile message
43373 Sep 22 23:22:21.259 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(363), op: ExtentReopen { repair_id: ReconciliationId(363), extent_id: 34 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43374 Sep 22 23:22:21.259 INFO [2] client ExtentReopen { repair_id: ReconciliationId(363), extent_id: 34 }
43375 Sep 22 23:22:21.259 DEBG 363 Reopen extent 34
43376 Sep 22 23:22:21.260 DEBG 363 Reopen extent 34
43377 Sep 22 23:22:21.261 DEBG 363 Reopen extent 34
43378 Sep 22 23:22:21.261 DEBG [2] It's time to notify for 363
43379 Sep 22 23:22:21.261 INFO Completion from [2] id:363 status:true
43380 Sep 22 23:22:21.261 INFO [364/752] Repair commands completed
43381 Sep 22 23:22:21.261 INFO Pop front: ReconcileIO { id: ReconciliationId(364), op: ExtentFlush { repair_id: ReconciliationId(364), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43382 Sep 22 23:22:21.261 INFO Sent repair work, now wait for resp
43383 Sep 22 23:22:21.261 INFO [0] received reconcile message
43384 Sep 22 23:22:21.261 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(364), op: ExtentFlush { repair_id: ReconciliationId(364), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43385 Sep 22 23:22:21.261 INFO [0] client ExtentFlush { repair_id: ReconciliationId(364), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43386 Sep 22 23:22:21.261 INFO [1] received reconcile message
43387 Sep 22 23:22:21.261 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(364), op: ExtentFlush { repair_id: ReconciliationId(364), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43388 Sep 22 23:22:21.261 INFO [1] client ExtentFlush { repair_id: ReconciliationId(364), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43389 Sep 22 23:22:21.261 INFO [2] received reconcile message
43390 Sep 22 23:22:21.261 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(364), op: ExtentFlush { repair_id: ReconciliationId(364), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43391 Sep 22 23:22:21.261 INFO [2] client ExtentFlush { repair_id: ReconciliationId(364), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43392 Sep 22 23:22:21.262 DEBG 364 Flush extent 83 with f:2 g:2
43393 Sep 22 23:22:21.262 DEBG Flush just extent 83 with f:2 and g:2
43394 Sep 22 23:22:21.262 DEBG [1] It's time to notify for 364
43395 Sep 22 23:22:21.262 INFO Completion from [1] id:364 status:true
43396 Sep 22 23:22:21.262 INFO [365/752] Repair commands completed
43397 Sep 22 23:22:21.262 INFO Pop front: ReconcileIO { id: ReconciliationId(365), op: ExtentClose { repair_id: ReconciliationId(365), extent_id: 83 }, state: ClientData([New, New, New]) }
43398 Sep 22 23:22:21.262 INFO Sent repair work, now wait for resp
43399 Sep 22 23:22:21.262 INFO [0] received reconcile message
43400 Sep 22 23:22:21.262 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(365), op: ExtentClose { repair_id: ReconciliationId(365), extent_id: 83 }, state: ClientData([InProgress, New, New]) }, : downstairs
43401 Sep 22 23:22:21.262 INFO [0] client ExtentClose { repair_id: ReconciliationId(365), extent_id: 83 }
43402 Sep 22 23:22:21.262 INFO [1] received reconcile message
43403 Sep 22 23:22:21.262 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(365), op: ExtentClose { repair_id: ReconciliationId(365), extent_id: 83 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43404 Sep 22 23:22:21.262 INFO [1] client ExtentClose { repair_id: ReconciliationId(365), extent_id: 83 }
43405 Sep 22 23:22:21.262 INFO [2] received reconcile message
43406 Sep 22 23:22:21.262 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(365), op: ExtentClose { repair_id: ReconciliationId(365), extent_id: 83 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43407 Sep 22 23:22:21.262 INFO [2] client ExtentClose { repair_id: ReconciliationId(365), extent_id: 83 }
43408 Sep 22 23:22:21.262 DEBG 365 Close extent 83
43409 Sep 22 23:22:21.262 DEBG 365 Close extent 83
43410 Sep 22 23:22:21.263 DEBG 365 Close extent 83
43411 Sep 22 23:22:21.263 DEBG [2] It's time to notify for 365
43412 Sep 22 23:22:21.263 INFO Completion from [2] id:365 status:true
43413 Sep 22 23:22:21.263 INFO [366/752] Repair commands completed
43414 Sep 22 23:22:21.263 INFO Pop front: ReconcileIO { id: ReconciliationId(366), op: ExtentRepair { repair_id: ReconciliationId(366), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43415 Sep 22 23:22:21.263 INFO Sent repair work, now wait for resp
43416 Sep 22 23:22:21.263 INFO [0] received reconcile message
43417 Sep 22 23:22:21.263 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(366), op: ExtentRepair { repair_id: ReconciliationId(366), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43418 Sep 22 23:22:21.263 INFO [0] client ExtentRepair { repair_id: ReconciliationId(366), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43419 Sep 22 23:22:21.263 INFO [0] Sending repair request ReconciliationId(366)
43420 Sep 22 23:22:21.263 INFO [1] received reconcile message
43421 Sep 22 23:22:21.263 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(366), op: ExtentRepair { repair_id: ReconciliationId(366), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43422 Sep 22 23:22:21.263 INFO [1] client ExtentRepair { repair_id: ReconciliationId(366), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43423 Sep 22 23:22:21.263 INFO [1] No action required ReconciliationId(366)
43424 Sep 22 23:22:21.263 INFO [2] received reconcile message
43425 Sep 22 23:22:21.264 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(366), op: ExtentRepair { repair_id: ReconciliationId(366), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43426 Sep 22 23:22:21.264 INFO [2] client ExtentRepair { repair_id: ReconciliationId(366), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43427 Sep 22 23:22:21.264 INFO [2] No action required ReconciliationId(366)
43428 Sep 22 23:22:21.264 DEBG 366 Repair extent 83 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
43429 Sep 22 23:22:21.264 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/053.copy"
43430 Sep 22 23:22:21.265 INFO [lossy] skipping 1084
43431 Sep 22 23:22:21.265 DEBG Flush :1084 extent_limit None deps:[JobId(1083), JobId(1082)] res:true f:31 g:1
43432 Sep 22 23:22:21.265 INFO [lossy] sleeping 1 second
43433 Sep 22 23:22:21.328 INFO accepted connection, remote_addr: 127.0.0.1:45336, local_addr: 127.0.0.1:52864, task: repair
43434 Sep 22 23:22:21.328 TRCE incoming request, uri: /extent/83/files, method: GET, req_id: 6bd567c5-c702-4ac8-92de-920bb4d3e083, remote_addr: 127.0.0.1:45336, local_addr: 127.0.0.1:52864, task: repair
43435 Sep 22 23:22:21.328 INFO request completed, latency_us: 199, response_code: 200, uri: /extent/83/files, method: GET, req_id: 6bd567c5-c702-4ac8-92de-920bb4d3e083, remote_addr: 127.0.0.1:45336, local_addr: 127.0.0.1:52864, task: repair
43436 Sep 22 23:22:21.329 INFO eid:83 Found repair files: ["053", "053.db"]
43437 Sep 22 23:22:21.329 TRCE incoming request, uri: /newextent/83/data, method: GET, req_id: 952faa8d-c196-40a0-a71b-c104182df9eb, remote_addr: 127.0.0.1:45336, local_addr: 127.0.0.1:52864, task: repair
43438 Sep 22 23:22:21.329 INFO request completed, latency_us: 322, response_code: 200, uri: /newextent/83/data, method: GET, req_id: 952faa8d-c196-40a0-a71b-c104182df9eb, remote_addr: 127.0.0.1:45336, local_addr: 127.0.0.1:52864, task: repair
43439 Sep 22 23:22:21.334 TRCE incoming request, uri: /newextent/83/db, method: GET, req_id: d4e5ea97-9f3f-47c5-88ee-b92477f5fe51, remote_addr: 127.0.0.1:45336, local_addr: 127.0.0.1:52864, task: repair
43440 Sep 22 23:22:21.334 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/83/db, method: GET, req_id: d4e5ea97-9f3f-47c5-88ee-b92477f5fe51, remote_addr: 127.0.0.1:45336, local_addr: 127.0.0.1:52864, task: repair
43441 Sep 22 23:22:21.335 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/053.copy" to "/tmp/downstairs-zrMnlo6G/00/000/053.replace"
43442 Sep 22 23:22:21.336 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43443 Sep 22 23:22:21.336 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/053.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
43444 Sep 22 23:22:21.337 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/053"
43445 Sep 22 23:22:21.337 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/053.db"
43446 Sep 22 23:22:21.337 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43447 Sep 22 23:22:21.337 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/053.replace" to "/tmp/downstairs-zrMnlo6G/00/000/053.completed"
43448 Sep 22 23:22:21.337 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43449 Sep 22 23:22:21.337 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43450 Sep 22 23:22:21.337 DEBG [0] It's time to notify for 366
43451 Sep 22 23:22:21.337 INFO Completion from [0] id:366 status:true
43452 Sep 22 23:22:21.337 INFO [367/752] Repair commands completed
43453 Sep 22 23:22:21.337 INFO Pop front: ReconcileIO { id: ReconciliationId(367), op: ExtentReopen { repair_id: ReconciliationId(367), extent_id: 83 }, state: ClientData([New, New, New]) }
43454 Sep 22 23:22:21.337 INFO Sent repair work, now wait for resp
43455 Sep 22 23:22:21.337 INFO [0] received reconcile message
43456 Sep 22 23:22:21.337 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(367), op: ExtentReopen { repair_id: ReconciliationId(367), extent_id: 83 }, state: ClientData([InProgress, New, New]) }, : downstairs
43457 Sep 22 23:22:21.337 INFO [0] client ExtentReopen { repair_id: ReconciliationId(367), extent_id: 83 }
43458 Sep 22 23:22:21.337 INFO [1] received reconcile message
43459 Sep 22 23:22:21.337 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(367), op: ExtentReopen { repair_id: ReconciliationId(367), extent_id: 83 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43460 Sep 22 23:22:21.337 INFO [1] client ExtentReopen { repair_id: ReconciliationId(367), extent_id: 83 }
43461 Sep 22 23:22:21.337 INFO [2] received reconcile message
43462 Sep 22 23:22:21.337 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(367), op: ExtentReopen { repair_id: ReconciliationId(367), extent_id: 83 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43463 Sep 22 23:22:21.337 INFO [2] client ExtentReopen { repair_id: ReconciliationId(367), extent_id: 83 }
43464 Sep 22 23:22:21.338 DEBG 367 Reopen extent 83
43465 Sep 22 23:22:21.338 DEBG 367 Reopen extent 83
43466 Sep 22 23:22:21.339 DEBG 367 Reopen extent 83
43467 Sep 22 23:22:21.339 DEBG [2] It's time to notify for 367
43468 Sep 22 23:22:21.339 INFO Completion from [2] id:367 status:true
43469 Sep 22 23:22:21.339 INFO [368/752] Repair commands completed
43470 Sep 22 23:22:21.339 INFO Pop front: ReconcileIO { id: ReconciliationId(368), op: ExtentFlush { repair_id: ReconciliationId(368), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43471 Sep 22 23:22:21.339 INFO Sent repair work, now wait for resp
43472 Sep 22 23:22:21.339 INFO [0] received reconcile message
43473 Sep 22 23:22:21.339 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(368), op: ExtentFlush { repair_id: ReconciliationId(368), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43474 Sep 22 23:22:21.339 INFO [0] client ExtentFlush { repair_id: ReconciliationId(368), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43475 Sep 22 23:22:21.339 INFO [1] received reconcile message
43476 Sep 22 23:22:21.339 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(368), op: ExtentFlush { repair_id: ReconciliationId(368), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43477 Sep 22 23:22:21.339 INFO [1] client ExtentFlush { repair_id: ReconciliationId(368), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43478 Sep 22 23:22:21.340 INFO [2] received reconcile message
43479 Sep 22 23:22:21.340 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(368), op: ExtentFlush { repair_id: ReconciliationId(368), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43480 Sep 22 23:22:21.340 INFO [2] client ExtentFlush { repair_id: ReconciliationId(368), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43481 Sep 22 23:22:21.340 DEBG 368 Flush extent 90 with f:2 g:2
43482 Sep 22 23:22:21.340 DEBG Flush just extent 90 with f:2 and g:2
43483 Sep 22 23:22:21.340 DEBG [1] It's time to notify for 368
43484 Sep 22 23:22:21.340 INFO Completion from [1] id:368 status:true
43485 Sep 22 23:22:21.340 INFO [369/752] Repair commands completed
43486 Sep 22 23:22:21.340 INFO Pop front: ReconcileIO { id: ReconciliationId(369), op: ExtentClose { repair_id: ReconciliationId(369), extent_id: 90 }, state: ClientData([New, New, New]) }
43487 Sep 22 23:22:21.340 INFO Sent repair work, now wait for resp
43488 Sep 22 23:22:21.340 INFO [0] received reconcile message
43489 Sep 22 23:22:21.340 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(369), op: ExtentClose { repair_id: ReconciliationId(369), extent_id: 90 }, state: ClientData([InProgress, New, New]) }, : downstairs
43490 Sep 22 23:22:21.340 INFO [0] client ExtentClose { repair_id: ReconciliationId(369), extent_id: 90 }
43491 Sep 22 23:22:21.340 INFO [1] received reconcile message
43492 Sep 22 23:22:21.340 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(369), op: ExtentClose { repair_id: ReconciliationId(369), extent_id: 90 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43493 Sep 22 23:22:21.340 INFO [1] client ExtentClose { repair_id: ReconciliationId(369), extent_id: 90 }
43494 Sep 22 23:22:21.340 INFO [2] received reconcile message
43495 Sep 22 23:22:21.340 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(369), op: ExtentClose { repair_id: ReconciliationId(369), extent_id: 90 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43496 Sep 22 23:22:21.340 INFO [2] client ExtentClose { repair_id: ReconciliationId(369), extent_id: 90 }
43497 Sep 22 23:22:21.340 DEBG 369 Close extent 90
43498 Sep 22 23:22:21.341 DEBG 369 Close extent 90
43499 Sep 22 23:22:21.341 DEBG 369 Close extent 90
43500 Sep 22 23:22:21.341 DEBG [2] It's time to notify for 369
43501 Sep 22 23:22:21.341 INFO Completion from [2] id:369 status:true
43502 Sep 22 23:22:21.341 INFO [370/752] Repair commands completed
43503 Sep 22 23:22:21.341 INFO Pop front: ReconcileIO { id: ReconciliationId(370), op: ExtentRepair { repair_id: ReconciliationId(370), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43504 Sep 22 23:22:21.341 INFO Sent repair work, now wait for resp
43505 Sep 22 23:22:21.341 INFO [0] received reconcile message
43506 Sep 22 23:22:21.341 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(370), op: ExtentRepair { repair_id: ReconciliationId(370), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43507 Sep 22 23:22:21.341 INFO [0] client ExtentRepair { repair_id: ReconciliationId(370), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43508 Sep 22 23:22:21.341 INFO [0] Sending repair request ReconciliationId(370)
43509 Sep 22 23:22:21.342 INFO [1] received reconcile message
43510 Sep 22 23:22:21.342 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(370), op: ExtentRepair { repair_id: ReconciliationId(370), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43511 Sep 22 23:22:21.342 INFO [1] client ExtentRepair { repair_id: ReconciliationId(370), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43512 Sep 22 23:22:21.342 INFO [1] No action required ReconciliationId(370)
43513 Sep 22 23:22:21.342 INFO [2] received reconcile message
43514 Sep 22 23:22:21.342 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(370), op: ExtentRepair { repair_id: ReconciliationId(370), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43515 Sep 22 23:22:21.342 INFO [2] client ExtentRepair { repair_id: ReconciliationId(370), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43516 Sep 22 23:22:21.342 INFO [2] No action required ReconciliationId(370)
43517 Sep 22 23:22:21.342 DEBG 370 Repair extent 90 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
43518 Sep 22 23:22:21.342 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/05A.copy"
43519 Sep 22 23:22:21.407 INFO accepted connection, remote_addr: 127.0.0.1:39719, local_addr: 127.0.0.1:52864, task: repair
43520 Sep 22 23:22:21.407 TRCE incoming request, uri: /extent/90/files, method: GET, req_id: b6cffec7-a064-4229-a79f-0a6741a1bbc8, remote_addr: 127.0.0.1:39719, local_addr: 127.0.0.1:52864, task: repair
43521 Sep 22 23:22:21.407 INFO request completed, latency_us: 199, response_code: 200, uri: /extent/90/files, method: GET, req_id: b6cffec7-a064-4229-a79f-0a6741a1bbc8, remote_addr: 127.0.0.1:39719, local_addr: 127.0.0.1:52864, task: repair
43522 Sep 22 23:22:21.408 INFO eid:90 Found repair files: ["05A", "05A.db"]
43523 Sep 22 23:22:21.408 TRCE incoming request, uri: /newextent/90/data, method: GET, req_id: 257516e6-db76-479d-8b49-cf5b481ff065, remote_addr: 127.0.0.1:39719, local_addr: 127.0.0.1:52864, task: repair
43524 Sep 22 23:22:21.408 INFO request completed, latency_us: 323, response_code: 200, uri: /newextent/90/data, method: GET, req_id: 257516e6-db76-479d-8b49-cf5b481ff065, remote_addr: 127.0.0.1:39719, local_addr: 127.0.0.1:52864, task: repair
43525 Sep 22 23:22:21.413 TRCE incoming request, uri: /newextent/90/db, method: GET, req_id: 3dc928e5-3c28-4498-8fb7-88853fe93e45, remote_addr: 127.0.0.1:39719, local_addr: 127.0.0.1:52864, task: repair
43526 Sep 22 23:22:21.413 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/90/db, method: GET, req_id: 3dc928e5-3c28-4498-8fb7-88853fe93e45, remote_addr: 127.0.0.1:39719, local_addr: 127.0.0.1:52864, task: repair
43527 Sep 22 23:22:21.414 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/05A.copy" to "/tmp/downstairs-zrMnlo6G/00/000/05A.replace"
43528 Sep 22 23:22:21.414 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43529 Sep 22 23:22:21.415 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/05A.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
43530 Sep 22 23:22:21.415 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/05A"
43531 Sep 22 23:22:21.416 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/05A.db"
43532 Sep 22 23:22:21.416 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43533 Sep 22 23:22:21.416 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/05A.replace" to "/tmp/downstairs-zrMnlo6G/00/000/05A.completed"
43534 Sep 22 23:22:21.416 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43535 Sep 22 23:22:21.416 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43536 Sep 22 23:22:21.416 DEBG [0] It's time to notify for 370
43537 Sep 22 23:22:21.416 INFO Completion from [0] id:370 status:true
43538 Sep 22 23:22:21.416 INFO [371/752] Repair commands completed
43539 Sep 22 23:22:21.416 INFO Pop front: ReconcileIO { id: ReconciliationId(371), op: ExtentReopen { repair_id: ReconciliationId(371), extent_id: 90 }, state: ClientData([New, New, New]) }
43540 Sep 22 23:22:21.416 INFO Sent repair work, now wait for resp
43541 Sep 22 23:22:21.416 INFO [0] received reconcile message
43542 Sep 22 23:22:21.416 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(371), op: ExtentReopen { repair_id: ReconciliationId(371), extent_id: 90 }, state: ClientData([InProgress, New, New]) }, : downstairs
43543 Sep 22 23:22:21.416 INFO [0] client ExtentReopen { repair_id: ReconciliationId(371), extent_id: 90 }
43544 Sep 22 23:22:21.416 INFO [1] received reconcile message
43545 Sep 22 23:22:21.416 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(371), op: ExtentReopen { repair_id: ReconciliationId(371), extent_id: 90 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43546 Sep 22 23:22:21.416 INFO [1] client ExtentReopen { repair_id: ReconciliationId(371), extent_id: 90 }
43547 Sep 22 23:22:21.416 INFO [2] received reconcile message
43548 Sep 22 23:22:21.416 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(371), op: ExtentReopen { repair_id: ReconciliationId(371), extent_id: 90 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43549 Sep 22 23:22:21.416 INFO [2] client ExtentReopen { repair_id: ReconciliationId(371), extent_id: 90 }
43550 Sep 22 23:22:21.416 DEBG 371 Reopen extent 90
43551 Sep 22 23:22:21.417 DEBG 371 Reopen extent 90
43552 Sep 22 23:22:21.418 DEBG 371 Reopen extent 90
43553 Sep 22 23:22:21.418 DEBG [2] It's time to notify for 371
43554 Sep 22 23:22:21.418 INFO Completion from [2] id:371 status:true
43555 Sep 22 23:22:21.418 INFO [372/752] Repair commands completed
43556 Sep 22 23:22:21.418 INFO Pop front: ReconcileIO { id: ReconciliationId(372), op: ExtentFlush { repair_id: ReconciliationId(372), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43557 Sep 22 23:22:21.418 INFO Sent repair work, now wait for resp
43558 Sep 22 23:22:21.418 INFO [0] received reconcile message
43559 Sep 22 23:22:21.418 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(372), op: ExtentFlush { repair_id: ReconciliationId(372), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43560 Sep 22 23:22:21.418 INFO [0] client ExtentFlush { repair_id: ReconciliationId(372), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43561 Sep 22 23:22:21.418 INFO [1] received reconcile message
43562 Sep 22 23:22:21.418 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(372), op: ExtentFlush { repair_id: ReconciliationId(372), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43563 Sep 22 23:22:21.418 INFO [1] client ExtentFlush { repair_id: ReconciliationId(372), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43564 Sep 22 23:22:21.418 INFO [2] received reconcile message
43565 Sep 22 23:22:21.418 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(372), op: ExtentFlush { repair_id: ReconciliationId(372), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43566 Sep 22 23:22:21.418 INFO [2] client ExtentFlush { repair_id: ReconciliationId(372), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43567 Sep 22 23:22:21.419 DEBG 372 Flush extent 66 with f:2 g:2
43568 Sep 22 23:22:21.419 DEBG Flush just extent 66 with f:2 and g:2
43569 Sep 22 23:22:21.419 DEBG [1] It's time to notify for 372
43570 Sep 22 23:22:21.419 INFO Completion from [1] id:372 status:true
43571 Sep 22 23:22:21.419 INFO [373/752] Repair commands completed
43572 Sep 22 23:22:21.419 INFO Pop front: ReconcileIO { id: ReconciliationId(373), op: ExtentClose { repair_id: ReconciliationId(373), extent_id: 66 }, state: ClientData([New, New, New]) }
43573 Sep 22 23:22:21.419 INFO Sent repair work, now wait for resp
43574 Sep 22 23:22:21.419 INFO [0] received reconcile message
43575 Sep 22 23:22:21.419 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(373), op: ExtentClose { repair_id: ReconciliationId(373), extent_id: 66 }, state: ClientData([InProgress, New, New]) }, : downstairs
43576 Sep 22 23:22:21.419 INFO [0] client ExtentClose { repair_id: ReconciliationId(373), extent_id: 66 }
43577 Sep 22 23:22:21.419 INFO [1] received reconcile message
43578 Sep 22 23:22:21.419 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(373), op: ExtentClose { repair_id: ReconciliationId(373), extent_id: 66 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43579 Sep 22 23:22:21.419 INFO [1] client ExtentClose { repair_id: ReconciliationId(373), extent_id: 66 }
43580 Sep 22 23:22:21.419 INFO [2] received reconcile message
43581 Sep 22 23:22:21.419 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(373), op: ExtentClose { repair_id: ReconciliationId(373), extent_id: 66 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43582 Sep 22 23:22:21.419 INFO [2] client ExtentClose { repair_id: ReconciliationId(373), extent_id: 66 }
43583 Sep 22 23:22:21.419 DEBG 373 Close extent 66
43584 Sep 22 23:22:21.419 DEBG 373 Close extent 66
43585 Sep 22 23:22:21.420 DEBG 373 Close extent 66
43586 Sep 22 23:22:21.420 DEBG [2] It's time to notify for 373
43587 Sep 22 23:22:21.420 INFO Completion from [2] id:373 status:true
43588 Sep 22 23:22:21.420 INFO [374/752] Repair commands completed
43589 Sep 22 23:22:21.420 INFO Pop front: ReconcileIO { id: ReconciliationId(374), op: ExtentRepair { repair_id: ReconciliationId(374), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43590 Sep 22 23:22:21.420 INFO Sent repair work, now wait for resp
43591 Sep 22 23:22:21.420 INFO [0] received reconcile message
43592 Sep 22 23:22:21.420 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(374), op: ExtentRepair { repair_id: ReconciliationId(374), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43593 Sep 22 23:22:21.420 INFO [0] client ExtentRepair { repair_id: ReconciliationId(374), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43594 Sep 22 23:22:21.420 INFO [0] Sending repair request ReconciliationId(374)
43595 Sep 22 23:22:21.420 INFO [1] received reconcile message
43596 Sep 22 23:22:21.420 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(374), op: ExtentRepair { repair_id: ReconciliationId(374), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43597 Sep 22 23:22:21.420 INFO [1] client ExtentRepair { repair_id: ReconciliationId(374), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43598 Sep 22 23:22:21.420 INFO [1] No action required ReconciliationId(374)
43599 Sep 22 23:22:21.421 INFO [2] received reconcile message
43600 Sep 22 23:22:21.421 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(374), op: ExtentRepair { repair_id: ReconciliationId(374), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43601 Sep 22 23:22:21.421 INFO [2] client ExtentRepair { repair_id: ReconciliationId(374), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43602 Sep 22 23:22:21.421 INFO [2] No action required ReconciliationId(374)
43603 Sep 22 23:22:21.421 DEBG 374 Repair extent 66 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
43604 Sep 22 23:22:21.421 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/042.copy"
43605 Sep 22 23:22:21.484 INFO accepted connection, remote_addr: 127.0.0.1:60013, local_addr: 127.0.0.1:52864, task: repair
43606 Sep 22 23:22:21.485 TRCE incoming request, uri: /extent/66/files, method: GET, req_id: ba3e1e6d-a638-494f-8f68-929d0d0b6c4b, remote_addr: 127.0.0.1:60013, local_addr: 127.0.0.1:52864, task: repair
43607 Sep 22 23:22:21.485 INFO request completed, latency_us: 208, response_code: 200, uri: /extent/66/files, method: GET, req_id: ba3e1e6d-a638-494f-8f68-929d0d0b6c4b, remote_addr: 127.0.0.1:60013, local_addr: 127.0.0.1:52864, task: repair
43608 Sep 22 23:22:21.485 INFO eid:66 Found repair files: ["042", "042.db"]
43609 Sep 22 23:22:21.485 TRCE incoming request, uri: /newextent/66/data, method: GET, req_id: 631ac310-2844-403e-bf4c-d141a93d8a8f, remote_addr: 127.0.0.1:60013, local_addr: 127.0.0.1:52864, task: repair
43610 Sep 22 23:22:21.486 INFO request completed, latency_us: 317, response_code: 200, uri: /newextent/66/data, method: GET, req_id: 631ac310-2844-403e-bf4c-d141a93d8a8f, remote_addr: 127.0.0.1:60013, local_addr: 127.0.0.1:52864, task: repair
43611 Sep 22 23:22:21.490 TRCE incoming request, uri: /newextent/66/db, method: GET, req_id: e36ffdaa-88ba-4641-be62-75003b4668b8, remote_addr: 127.0.0.1:60013, local_addr: 127.0.0.1:52864, task: repair
43612 Sep 22 23:22:21.491 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/66/db, method: GET, req_id: e36ffdaa-88ba-4641-be62-75003b4668b8, remote_addr: 127.0.0.1:60013, local_addr: 127.0.0.1:52864, task: repair
43613 Sep 22 23:22:21.492 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/042.copy" to "/tmp/downstairs-zrMnlo6G/00/000/042.replace"
43614 Sep 22 23:22:21.492 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43615 Sep 22 23:22:21.493 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/042.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
43616 Sep 22 23:22:21.493 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/042"
43617 Sep 22 23:22:21.493 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/042.db"
43618 Sep 22 23:22:21.493 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43619 Sep 22 23:22:21.493 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/042.replace" to "/tmp/downstairs-zrMnlo6G/00/000/042.completed"
43620 Sep 22 23:22:21.493 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43621 Sep 22 23:22:21.493 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43622 Sep 22 23:22:21.493 DEBG [0] It's time to notify for 374
43623 Sep 22 23:22:21.493 INFO Completion from [0] id:374 status:true
43624 Sep 22 23:22:21.494 INFO [375/752] Repair commands completed
43625 Sep 22 23:22:21.494 INFO Pop front: ReconcileIO { id: ReconciliationId(375), op: ExtentReopen { repair_id: ReconciliationId(375), extent_id: 66 }, state: ClientData([New, New, New]) }
43626 Sep 22 23:22:21.494 INFO Sent repair work, now wait for resp
43627 Sep 22 23:22:21.494 INFO [0] received reconcile message
43628 Sep 22 23:22:21.494 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(375), op: ExtentReopen { repair_id: ReconciliationId(375), extent_id: 66 }, state: ClientData([InProgress, New, New]) }, : downstairs
43629 Sep 22 23:22:21.494 INFO [0] client ExtentReopen { repair_id: ReconciliationId(375), extent_id: 66 }
43630 Sep 22 23:22:21.494 INFO [1] received reconcile message
43631 Sep 22 23:22:21.494 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(375), op: ExtentReopen { repair_id: ReconciliationId(375), extent_id: 66 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43632 Sep 22 23:22:21.494 INFO [1] client ExtentReopen { repair_id: ReconciliationId(375), extent_id: 66 }
43633 Sep 22 23:22:21.494 INFO [2] received reconcile message
43634 Sep 22 23:22:21.494 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(375), op: ExtentReopen { repair_id: ReconciliationId(375), extent_id: 66 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43635 Sep 22 23:22:21.494 INFO [2] client ExtentReopen { repair_id: ReconciliationId(375), extent_id: 66 }
43636 Sep 22 23:22:21.494 DEBG 375 Reopen extent 66
43637 Sep 22 23:22:21.494 DEBG 375 Reopen extent 66
43638 Sep 22 23:22:21.495 DEBG 375 Reopen extent 66
43639 Sep 22 23:22:21.496 DEBG [2] It's time to notify for 375
43640 Sep 22 23:22:21.496 INFO Completion from [2] id:375 status:true
43641 Sep 22 23:22:21.496 INFO [376/752] Repair commands completed
43642 Sep 22 23:22:21.496 INFO Pop front: ReconcileIO { id: ReconciliationId(376), op: ExtentFlush { repair_id: ReconciliationId(376), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43643 Sep 22 23:22:21.496 INFO Sent repair work, now wait for resp
43644 Sep 22 23:22:21.496 INFO [0] received reconcile message
43645 Sep 22 23:22:21.496 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(376), op: ExtentFlush { repair_id: ReconciliationId(376), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43646 Sep 22 23:22:21.496 INFO [0] client ExtentFlush { repair_id: ReconciliationId(376), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43647 Sep 22 23:22:21.496 INFO [1] received reconcile message
43648 Sep 22 23:22:21.496 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(376), op: ExtentFlush { repair_id: ReconciliationId(376), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43649 Sep 22 23:22:21.496 INFO [1] client ExtentFlush { repair_id: ReconciliationId(376), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43650 Sep 22 23:22:21.496 INFO [2] received reconcile message
43651 Sep 22 23:22:21.496 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(376), op: ExtentFlush { repair_id: ReconciliationId(376), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43652 Sep 22 23:22:21.496 INFO [2] client ExtentFlush { repair_id: ReconciliationId(376), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43653 Sep 22 23:22:21.496 DEBG 376 Flush extent 87 with f:2 g:2
43654 Sep 22 23:22:21.496 DEBG Flush just extent 87 with f:2 and g:2
43655 Sep 22 23:22:21.496 DEBG [1] It's time to notify for 376
43656 Sep 22 23:22:21.496 INFO Completion from [1] id:376 status:true
43657 Sep 22 23:22:21.496 INFO [377/752] Repair commands completed
43658 Sep 22 23:22:21.496 INFO Pop front: ReconcileIO { id: ReconciliationId(377), op: ExtentClose { repair_id: ReconciliationId(377), extent_id: 87 }, state: ClientData([New, New, New]) }
43659 Sep 22 23:22:21.496 INFO Sent repair work, now wait for resp
43660 Sep 22 23:22:21.496 INFO [0] received reconcile message
43661 Sep 22 23:22:21.496 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(377), op: ExtentClose { repair_id: ReconciliationId(377), extent_id: 87 }, state: ClientData([InProgress, New, New]) }, : downstairs
43662 Sep 22 23:22:21.496 INFO [0] client ExtentClose { repair_id: ReconciliationId(377), extent_id: 87 }
43663 Sep 22 23:22:21.496 INFO [1] received reconcile message
43664 Sep 22 23:22:21.496 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(377), op: ExtentClose { repair_id: ReconciliationId(377), extent_id: 87 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43665 Sep 22 23:22:21.496 INFO [1] client ExtentClose { repair_id: ReconciliationId(377), extent_id: 87 }
43666 Sep 22 23:22:21.497 INFO [2] received reconcile message
43667 Sep 22 23:22:21.497 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(377), op: ExtentClose { repair_id: ReconciliationId(377), extent_id: 87 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43668 Sep 22 23:22:21.497 INFO [2] client ExtentClose { repair_id: ReconciliationId(377), extent_id: 87 }
43669 Sep 22 23:22:21.497 DEBG 377 Close extent 87
43670 Sep 22 23:22:21.497 DEBG 377 Close extent 87
43671 Sep 22 23:22:21.497 DEBG 377 Close extent 87
43672 Sep 22 23:22:21.498 DEBG [2] It's time to notify for 377
43673 Sep 22 23:22:21.498 INFO Completion from [2] id:377 status:true
43674 Sep 22 23:22:21.498 INFO [378/752] Repair commands completed
43675 Sep 22 23:22:21.498 INFO Pop front: ReconcileIO { id: ReconciliationId(378), op: ExtentRepair { repair_id: ReconciliationId(378), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43676 Sep 22 23:22:21.498 INFO Sent repair work, now wait for resp
43677 Sep 22 23:22:21.498 INFO [0] received reconcile message
43678 Sep 22 23:22:21.498 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(378), op: ExtentRepair { repair_id: ReconciliationId(378), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43679 Sep 22 23:22:21.498 INFO [0] client ExtentRepair { repair_id: ReconciliationId(378), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43680 Sep 22 23:22:21.498 INFO [0] Sending repair request ReconciliationId(378)
43681 Sep 22 23:22:21.498 INFO [1] received reconcile message
43682 Sep 22 23:22:21.498 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(378), op: ExtentRepair { repair_id: ReconciliationId(378), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43683 Sep 22 23:22:21.498 INFO [1] client ExtentRepair { repair_id: ReconciliationId(378), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43684 Sep 22 23:22:21.498 INFO [1] No action required ReconciliationId(378)
43685 Sep 22 23:22:21.498 INFO [2] received reconcile message
43686 Sep 22 23:22:21.498 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(378), op: ExtentRepair { repair_id: ReconciliationId(378), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43687 Sep 22 23:22:21.498 INFO [2] client ExtentRepair { repair_id: ReconciliationId(378), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43688 Sep 22 23:22:21.498 INFO [2] No action required ReconciliationId(378)
43689 Sep 22 23:22:21.498 DEBG 378 Repair extent 87 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
43690 Sep 22 23:22:21.498 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/057.copy"
43691 Sep 22 23:22:21.563 INFO accepted connection, remote_addr: 127.0.0.1:47158, local_addr: 127.0.0.1:52864, task: repair
43692 Sep 22 23:22:21.563 TRCE incoming request, uri: /extent/87/files, method: GET, req_id: 798788a8-e856-46a4-ac2d-625351a34e85, remote_addr: 127.0.0.1:47158, local_addr: 127.0.0.1:52864, task: repair
43693 Sep 22 23:22:21.564 INFO request completed, latency_us: 194, response_code: 200, uri: /extent/87/files, method: GET, req_id: 798788a8-e856-46a4-ac2d-625351a34e85, remote_addr: 127.0.0.1:47158, local_addr: 127.0.0.1:52864, task: repair
43694 Sep 22 23:22:21.564 INFO eid:87 Found repair files: ["057", "057.db"]
43695 Sep 22 23:22:21.564 TRCE incoming request, uri: /newextent/87/data, method: GET, req_id: 8f8ed8c8-a65e-413a-8d94-d772584e4e4b, remote_addr: 127.0.0.1:47158, local_addr: 127.0.0.1:52864, task: repair
43696 Sep 22 23:22:21.565 INFO request completed, latency_us: 302, response_code: 200, uri: /newextent/87/data, method: GET, req_id: 8f8ed8c8-a65e-413a-8d94-d772584e4e4b, remote_addr: 127.0.0.1:47158, local_addr: 127.0.0.1:52864, task: repair
43697 Sep 22 23:22:21.569 TRCE incoming request, uri: /newextent/87/db, method: GET, req_id: 17351486-c099-4e22-a231-27f8e59fa155, remote_addr: 127.0.0.1:47158, local_addr: 127.0.0.1:52864, task: repair
43698 Sep 22 23:22:21.569 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/87/db, method: GET, req_id: 17351486-c099-4e22-a231-27f8e59fa155, remote_addr: 127.0.0.1:47158, local_addr: 127.0.0.1:52864, task: repair
43699 Sep 22 23:22:21.571 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/057.copy" to "/tmp/downstairs-zrMnlo6G/00/000/057.replace"
43700 Sep 22 23:22:21.571 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43701 Sep 22 23:22:21.571 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/057.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
43702 Sep 22 23:22:21.572 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/057"
43703 Sep 22 23:22:21.572 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/057.db"
43704 Sep 22 23:22:21.572 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43705 Sep 22 23:22:21.572 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/057.replace" to "/tmp/downstairs-zrMnlo6G/00/000/057.completed"
43706 Sep 22 23:22:21.572 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43707 Sep 22 23:22:21.572 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43708 Sep 22 23:22:21.572 DEBG [0] It's time to notify for 378
43709 Sep 22 23:22:21.572 INFO Completion from [0] id:378 status:true
43710 Sep 22 23:22:21.572 INFO [379/752] Repair commands completed
43711 Sep 22 23:22:21.572 INFO Pop front: ReconcileIO { id: ReconciliationId(379), op: ExtentReopen { repair_id: ReconciliationId(379), extent_id: 87 }, state: ClientData([New, New, New]) }
43712 Sep 22 23:22:21.572 INFO Sent repair work, now wait for resp
43713 Sep 22 23:22:21.572 INFO [0] received reconcile message
43714 Sep 22 23:22:21.572 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(379), op: ExtentReopen { repair_id: ReconciliationId(379), extent_id: 87 }, state: ClientData([InProgress, New, New]) }, : downstairs
43715 Sep 22 23:22:21.572 INFO [0] client ExtentReopen { repair_id: ReconciliationId(379), extent_id: 87 }
43716 Sep 22 23:22:21.572 INFO [1] received reconcile message
43717 Sep 22 23:22:21.572 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(379), op: ExtentReopen { repair_id: ReconciliationId(379), extent_id: 87 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43718 Sep 22 23:22:21.572 INFO [1] client ExtentReopen { repair_id: ReconciliationId(379), extent_id: 87 }
43719 Sep 22 23:22:21.572 INFO [2] received reconcile message
43720 Sep 22 23:22:21.572 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(379), op: ExtentReopen { repair_id: ReconciliationId(379), extent_id: 87 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43721 Sep 22 23:22:21.572 INFO [2] client ExtentReopen { repair_id: ReconciliationId(379), extent_id: 87 }
43722 Sep 22 23:22:21.573 DEBG 379 Reopen extent 87
43723 Sep 22 23:22:21.573 DEBG 379 Reopen extent 87
43724 Sep 22 23:22:21.574 DEBG 379 Reopen extent 87
43725 Sep 22 23:22:21.574 DEBG [2] It's time to notify for 379
43726 Sep 22 23:22:21.574 INFO Completion from [2] id:379 status:true
43727 Sep 22 23:22:21.574 INFO [380/752] Repair commands completed
43728 Sep 22 23:22:21.574 INFO Pop front: ReconcileIO { id: ReconciliationId(380), op: ExtentFlush { repair_id: ReconciliationId(380), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43729 Sep 22 23:22:21.574 INFO Sent repair work, now wait for resp
43730 Sep 22 23:22:21.574 INFO [0] received reconcile message
43731 Sep 22 23:22:21.574 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(380), op: ExtentFlush { repair_id: ReconciliationId(380), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43732 Sep 22 23:22:21.574 INFO [0] client ExtentFlush { repair_id: ReconciliationId(380), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43733 Sep 22 23:22:21.574 INFO [1] received reconcile message
43734 Sep 22 23:22:21.575 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(380), op: ExtentFlush { repair_id: ReconciliationId(380), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43735 Sep 22 23:22:21.575 INFO [1] client ExtentFlush { repair_id: ReconciliationId(380), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43736 Sep 22 23:22:21.575 INFO [2] received reconcile message
43737 Sep 22 23:22:21.575 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(380), op: ExtentFlush { repair_id: ReconciliationId(380), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43738 Sep 22 23:22:21.575 INFO [2] client ExtentFlush { repair_id: ReconciliationId(380), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43739 Sep 22 23:22:21.575 DEBG 380 Flush extent 139 with f:2 g:2
43740 Sep 22 23:22:21.575 DEBG Flush just extent 139 with f:2 and g:2
43741 Sep 22 23:22:21.575 DEBG [1] It's time to notify for 380
43742 Sep 22 23:22:21.575 INFO Completion from [1] id:380 status:true
43743 Sep 22 23:22:21.575 INFO [381/752] Repair commands completed
43744 Sep 22 23:22:21.575 INFO Pop front: ReconcileIO { id: ReconciliationId(381), op: ExtentClose { repair_id: ReconciliationId(381), extent_id: 139 }, state: ClientData([New, New, New]) }
43745 Sep 22 23:22:21.575 INFO Sent repair work, now wait for resp
43746 Sep 22 23:22:21.575 INFO [0] received reconcile message
43747 Sep 22 23:22:21.575 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(381), op: ExtentClose { repair_id: ReconciliationId(381), extent_id: 139 }, state: ClientData([InProgress, New, New]) }, : downstairs
43748 Sep 22 23:22:21.575 INFO [0] client ExtentClose { repair_id: ReconciliationId(381), extent_id: 139 }
43749 Sep 22 23:22:21.575 INFO [1] received reconcile message
43750 Sep 22 23:22:21.575 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(381), op: ExtentClose { repair_id: ReconciliationId(381), extent_id: 139 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43751 Sep 22 23:22:21.575 INFO [1] client ExtentClose { repair_id: ReconciliationId(381), extent_id: 139 }
43752 Sep 22 23:22:21.575 INFO [2] received reconcile message
43753 Sep 22 23:22:21.575 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(381), op: ExtentClose { repair_id: ReconciliationId(381), extent_id: 139 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43754 Sep 22 23:22:21.575 INFO [2] client ExtentClose { repair_id: ReconciliationId(381), extent_id: 139 }
43755 Sep 22 23:22:21.575 DEBG 381 Close extent 139
43756 Sep 22 23:22:21.576 DEBG 381 Close extent 139
43757 Sep 22 23:22:21.576 DEBG 381 Close extent 139
43758 Sep 22 23:22:21.576 DEBG [2] It's time to notify for 381
43759 Sep 22 23:22:21.576 INFO Completion from [2] id:381 status:true
43760 Sep 22 23:22:21.576 INFO [382/752] Repair commands completed
43761 Sep 22 23:22:21.576 INFO Pop front: ReconcileIO { id: ReconciliationId(382), op: ExtentRepair { repair_id: ReconciliationId(382), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43762 Sep 22 23:22:21.576 INFO Sent repair work, now wait for resp
43763 Sep 22 23:22:21.576 INFO [0] received reconcile message
43764 Sep 22 23:22:21.576 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(382), op: ExtentRepair { repair_id: ReconciliationId(382), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43765 Sep 22 23:22:21.576 INFO [0] client ExtentRepair { repair_id: ReconciliationId(382), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43766 Sep 22 23:22:21.576 INFO [0] Sending repair request ReconciliationId(382)
43767 Sep 22 23:22:21.577 INFO [1] received reconcile message
43768 Sep 22 23:22:21.577 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(382), op: ExtentRepair { repair_id: ReconciliationId(382), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43769 Sep 22 23:22:21.577 INFO [1] client ExtentRepair { repair_id: ReconciliationId(382), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43770 Sep 22 23:22:21.577 INFO [1] No action required ReconciliationId(382)
43771 Sep 22 23:22:21.577 INFO [2] received reconcile message
43772 Sep 22 23:22:21.577 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(382), op: ExtentRepair { repair_id: ReconciliationId(382), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43773 Sep 22 23:22:21.577 INFO [2] client ExtentRepair { repair_id: ReconciliationId(382), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43774 Sep 22 23:22:21.577 INFO [2] No action required ReconciliationId(382)
43775 Sep 22 23:22:21.577 DEBG 382 Repair extent 139 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
43776 Sep 22 23:22:21.577 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/08B.copy"
43777 Sep 22 23:22:21.641 INFO accepted connection, remote_addr: 127.0.0.1:35693, local_addr: 127.0.0.1:52864, task: repair
43778 Sep 22 23:22:21.641 DEBG [1] Read AckReady 1083, : downstairs
43779 Sep 22 23:22:21.641 TRCE incoming request, uri: /extent/139/files, method: GET, req_id: 5845da20-bc49-4ff0-b9d8-91c777237045, remote_addr: 127.0.0.1:35693, local_addr: 127.0.0.1:52864, task: repair
43780 Sep 22 23:22:21.641 INFO request completed, latency_us: 196, response_code: 200, uri: /extent/139/files, method: GET, req_id: 5845da20-bc49-4ff0-b9d8-91c777237045, remote_addr: 127.0.0.1:35693, local_addr: 127.0.0.1:52864, task: repair
43781 Sep 22 23:22:21.642 INFO eid:139 Found repair files: ["08B", "08B.db"]
43782 Sep 22 23:22:21.642 TRCE incoming request, uri: /newextent/139/data, method: GET, req_id: 824f179f-3cf6-4070-bcb5-5322ad003d5d, remote_addr: 127.0.0.1:35693, local_addr: 127.0.0.1:52864, task: repair
43783 Sep 22 23:22:21.642 DEBG up_ds_listen was notified
43784 Sep 22 23:22:21.642 DEBG up_ds_listen process 1083
43785 Sep 22 23:22:21.642 DEBG [A] ack job 1083:84, : downstairs
43786 Sep 22 23:22:21.642 INFO request completed, latency_us: 324, response_code: 200, uri: /newextent/139/data, method: GET, req_id: 824f179f-3cf6-4070-bcb5-5322ad003d5d, remote_addr: 127.0.0.1:35693, local_addr: 127.0.0.1:52864, task: repair
43787 Sep 22 23:22:21.647 TRCE incoming request, uri: /newextent/139/db, method: GET, req_id: 331fb13c-587f-4504-a055-5896edbe3bc5, remote_addr: 127.0.0.1:35693, local_addr: 127.0.0.1:52864, task: repair
43788 Sep 22 23:22:21.647 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/139/db, method: GET, req_id: 331fb13c-587f-4504-a055-5896edbe3bc5, remote_addr: 127.0.0.1:35693, local_addr: 127.0.0.1:52864, task: repair
43789 Sep 22 23:22:21.648 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/08B.copy" to "/tmp/downstairs-zrMnlo6G/00/000/08B.replace"
43790 Sep 22 23:22:21.648 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43791 Sep 22 23:22:21.649 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/08B.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
43792 Sep 22 23:22:21.649 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/08B"
43793 Sep 22 23:22:21.650 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/08B.db"
43794 Sep 22 23:22:21.650 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43795 Sep 22 23:22:21.650 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/08B.replace" to "/tmp/downstairs-zrMnlo6G/00/000/08B.completed"
43796 Sep 22 23:22:21.650 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43797 Sep 22 23:22:21.650 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43798 Sep 22 23:22:21.650 DEBG [0] It's time to notify for 382
43799 Sep 22 23:22:21.650 INFO Completion from [0] id:382 status:true
43800 Sep 22 23:22:21.650 INFO [383/752] Repair commands completed
43801 Sep 22 23:22:21.650 INFO Pop front: ReconcileIO { id: ReconciliationId(383), op: ExtentReopen { repair_id: ReconciliationId(383), extent_id: 139 }, state: ClientData([New, New, New]) }
43802 Sep 22 23:22:21.650 INFO Sent repair work, now wait for resp
43803 Sep 22 23:22:21.650 INFO [0] received reconcile message
43804 Sep 22 23:22:21.650 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(383), op: ExtentReopen { repair_id: ReconciliationId(383), extent_id: 139 }, state: ClientData([InProgress, New, New]) }, : downstairs
43805 Sep 22 23:22:21.650 INFO [0] client ExtentReopen { repair_id: ReconciliationId(383), extent_id: 139 }
43806 Sep 22 23:22:21.650 INFO [1] received reconcile message
43807 Sep 22 23:22:21.650 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(383), op: ExtentReopen { repair_id: ReconciliationId(383), extent_id: 139 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43808 Sep 22 23:22:21.650 INFO [1] client ExtentReopen { repair_id: ReconciliationId(383), extent_id: 139 }
43809 Sep 22 23:22:21.650 INFO [2] received reconcile message
43810 Sep 22 23:22:21.650 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(383), op: ExtentReopen { repair_id: ReconciliationId(383), extent_id: 139 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43811 Sep 22 23:22:21.650 INFO [2] client ExtentReopen { repair_id: ReconciliationId(383), extent_id: 139 }
43812 Sep 22 23:22:21.650 DEBG 383 Reopen extent 139
43813 Sep 22 23:22:21.651 DEBG 383 Reopen extent 139
43814 Sep 22 23:22:21.651 DEBG 383 Reopen extent 139
43815 Sep 22 23:22:21.652 DEBG [2] It's time to notify for 383
43816 Sep 22 23:22:21.652 INFO Completion from [2] id:383 status:true
43817 Sep 22 23:22:21.652 INFO [384/752] Repair commands completed
43818 Sep 22 23:22:21.652 INFO Pop front: ReconcileIO { id: ReconciliationId(384), op: ExtentFlush { repair_id: ReconciliationId(384), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43819 Sep 22 23:22:21.652 INFO Sent repair work, now wait for resp
43820 Sep 22 23:22:21.652 INFO [0] received reconcile message
43821 Sep 22 23:22:21.652 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(384), op: ExtentFlush { repair_id: ReconciliationId(384), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43822 Sep 22 23:22:21.652 INFO [0] client ExtentFlush { repair_id: ReconciliationId(384), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43823 Sep 22 23:22:21.652 INFO [1] received reconcile message
43824 Sep 22 23:22:21.652 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(384), op: ExtentFlush { repair_id: ReconciliationId(384), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43825 Sep 22 23:22:21.652 INFO [1] client ExtentFlush { repair_id: ReconciliationId(384), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43826 Sep 22 23:22:21.652 INFO [2] received reconcile message
43827 Sep 22 23:22:21.652 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(384), op: ExtentFlush { repair_id: ReconciliationId(384), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43828 Sep 22 23:22:21.652 INFO [2] client ExtentFlush { repair_id: ReconciliationId(384), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43829 Sep 22 23:22:21.653 DEBG 384 Flush extent 26 with f:2 g:2
43830 Sep 22 23:22:21.653 DEBG Flush just extent 26 with f:2 and g:2
43831 Sep 22 23:22:21.653 DEBG [1] It's time to notify for 384
43832 Sep 22 23:22:21.653 INFO Completion from [1] id:384 status:true
43833 Sep 22 23:22:21.653 INFO [385/752] Repair commands completed
43834 Sep 22 23:22:21.653 INFO Pop front: ReconcileIO { id: ReconciliationId(385), op: ExtentClose { repair_id: ReconciliationId(385), extent_id: 26 }, state: ClientData([New, New, New]) }
43835 Sep 22 23:22:21.653 INFO Sent repair work, now wait for resp
43836 Sep 22 23:22:21.653 INFO [0] received reconcile message
43837 Sep 22 23:22:21.653 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(385), op: ExtentClose { repair_id: ReconciliationId(385), extent_id: 26 }, state: ClientData([InProgress, New, New]) }, : downstairs
43838 Sep 22 23:22:21.653 INFO [0] client ExtentClose { repair_id: ReconciliationId(385), extent_id: 26 }
43839 Sep 22 23:22:21.653 INFO [1] received reconcile message
43840 Sep 22 23:22:21.653 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(385), op: ExtentClose { repair_id: ReconciliationId(385), extent_id: 26 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43841 Sep 22 23:22:21.653 INFO [1] client ExtentClose { repair_id: ReconciliationId(385), extent_id: 26 }
43842 Sep 22 23:22:21.653 INFO [2] received reconcile message
43843 Sep 22 23:22:21.653 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(385), op: ExtentClose { repair_id: ReconciliationId(385), extent_id: 26 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43844 Sep 22 23:22:21.653 INFO [2] client ExtentClose { repair_id: ReconciliationId(385), extent_id: 26 }
43845 Sep 22 23:22:21.653 DEBG 385 Close extent 26
43846 Sep 22 23:22:21.653 DEBG 385 Close extent 26
43847 Sep 22 23:22:21.654 DEBG 385 Close extent 26
43848 Sep 22 23:22:21.654 DEBG [2] It's time to notify for 385
43849 Sep 22 23:22:21.654 INFO Completion from [2] id:385 status:true
43850 Sep 22 23:22:21.654 INFO [386/752] Repair commands completed
43851 Sep 22 23:22:21.654 INFO Pop front: ReconcileIO { id: ReconciliationId(386), op: ExtentRepair { repair_id: ReconciliationId(386), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43852 Sep 22 23:22:21.654 INFO Sent repair work, now wait for resp
43853 Sep 22 23:22:21.654 INFO [0] received reconcile message
43854 Sep 22 23:22:21.654 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(386), op: ExtentRepair { repair_id: ReconciliationId(386), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43855 Sep 22 23:22:21.654 INFO [0] client ExtentRepair { repair_id: ReconciliationId(386), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43856 Sep 22 23:22:21.654 INFO [0] Sending repair request ReconciliationId(386)
43857 Sep 22 23:22:21.654 INFO [1] received reconcile message
43858 Sep 22 23:22:21.654 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(386), op: ExtentRepair { repair_id: ReconciliationId(386), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43859 Sep 22 23:22:21.654 INFO [1] client ExtentRepair { repair_id: ReconciliationId(386), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43860 Sep 22 23:22:21.654 INFO [1] No action required ReconciliationId(386)
43861 Sep 22 23:22:21.654 INFO [2] received reconcile message
43862 Sep 22 23:22:21.654 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(386), op: ExtentRepair { repair_id: ReconciliationId(386), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43863 Sep 22 23:22:21.654 INFO [2] client ExtentRepair { repair_id: ReconciliationId(386), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43864 Sep 22 23:22:21.654 INFO [2] No action required ReconciliationId(386)
43865 Sep 22 23:22:21.655 DEBG 386 Repair extent 26 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
43866 Sep 22 23:22:21.655 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/01A.copy"
43867 Sep 22 23:22:21.695 DEBG up_ds_listen checked 1 jobs, back to waiting
43868 Sep 22 23:22:21.702 DEBG Read :1083 deps:[JobId(1082)] res:true
43869 Sep 22 23:22:21.717 INFO accepted connection, remote_addr: 127.0.0.1:47355, local_addr: 127.0.0.1:52864, task: repair
43870 Sep 22 23:22:21.717 TRCE incoming request, uri: /extent/26/files, method: GET, req_id: e7f8ec6c-c012-4ced-9835-4563c69666fc, remote_addr: 127.0.0.1:47355, local_addr: 127.0.0.1:52864, task: repair
43871 Sep 22 23:22:21.718 INFO request completed, latency_us: 238, response_code: 200, uri: /extent/26/files, method: GET, req_id: e7f8ec6c-c012-4ced-9835-4563c69666fc, remote_addr: 127.0.0.1:47355, local_addr: 127.0.0.1:52864, task: repair
43872 Sep 22 23:22:21.718 INFO eid:26 Found repair files: ["01A", "01A.db"]
43873 Sep 22 23:22:21.718 TRCE incoming request, uri: /newextent/26/data, method: GET, req_id: 3d9d7983-f6f3-451c-a32a-433c198e06b5, remote_addr: 127.0.0.1:47355, local_addr: 127.0.0.1:52864, task: repair
43874 Sep 22 23:22:21.719 INFO request completed, latency_us: 333, response_code: 200, uri: /newextent/26/data, method: GET, req_id: 3d9d7983-f6f3-451c-a32a-433c198e06b5, remote_addr: 127.0.0.1:47355, local_addr: 127.0.0.1:52864, task: repair
43875 Sep 22 23:22:21.723 TRCE incoming request, uri: /newextent/26/db, method: GET, req_id: 537fe346-99aa-4ce6-a2b6-f177755c4d6c, remote_addr: 127.0.0.1:47355, local_addr: 127.0.0.1:52864, task: repair
43876 Sep 22 23:22:21.723 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/26/db, method: GET, req_id: 537fe346-99aa-4ce6-a2b6-f177755c4d6c, remote_addr: 127.0.0.1:47355, local_addr: 127.0.0.1:52864, task: repair
43877 Sep 22 23:22:21.724 DEBG IO Read 1085 has deps [JobId(1084)]
43878 Sep 22 23:22:21.725 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/01A.copy" to "/tmp/downstairs-zrMnlo6G/00/000/01A.replace"
43879 Sep 22 23:22:21.725 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43880 Sep 22 23:22:21.726 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/01A.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
43881 Sep 22 23:22:21.726 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/01A"
43882 Sep 22 23:22:21.726 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/01A.db"
43883 Sep 22 23:22:21.726 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43884 Sep 22 23:22:21.726 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/01A.replace" to "/tmp/downstairs-zrMnlo6G/00/000/01A.completed"
43885 Sep 22 23:22:21.726 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43886 Sep 22 23:22:21.726 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43887 Sep 22 23:22:21.726 DEBG [0] It's time to notify for 386
43888 Sep 22 23:22:21.726 INFO Completion from [0] id:386 status:true
43889 Sep 22 23:22:21.726 INFO [387/752] Repair commands completed
43890 Sep 22 23:22:21.726 INFO Pop front: ReconcileIO { id: ReconciliationId(387), op: ExtentReopen { repair_id: ReconciliationId(387), extent_id: 26 }, state: ClientData([New, New, New]) }
43891 Sep 22 23:22:21.726 INFO Sent repair work, now wait for resp
43892 Sep 22 23:22:21.727 INFO [0] received reconcile message
43893 Sep 22 23:22:21.727 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(387), op: ExtentReopen { repair_id: ReconciliationId(387), extent_id: 26 }, state: ClientData([InProgress, New, New]) }, : downstairs
43894 Sep 22 23:22:21.727 INFO [0] client ExtentReopen { repair_id: ReconciliationId(387), extent_id: 26 }
43895 Sep 22 23:22:21.727 INFO [1] received reconcile message
43896 Sep 22 23:22:21.727 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(387), op: ExtentReopen { repair_id: ReconciliationId(387), extent_id: 26 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43897 Sep 22 23:22:21.727 INFO [1] client ExtentReopen { repair_id: ReconciliationId(387), extent_id: 26 }
43898 Sep 22 23:22:21.727 INFO [2] received reconcile message
43899 Sep 22 23:22:21.727 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(387), op: ExtentReopen { repair_id: ReconciliationId(387), extent_id: 26 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43900 Sep 22 23:22:21.727 INFO [2] client ExtentReopen { repair_id: ReconciliationId(387), extent_id: 26 }
43901 Sep 22 23:22:21.727 DEBG 387 Reopen extent 26
43902 Sep 22 23:22:21.728 DEBG 387 Reopen extent 26
43903 Sep 22 23:22:21.728 DEBG 387 Reopen extent 26
43904 Sep 22 23:22:21.729 DEBG [2] It's time to notify for 387
43905 Sep 22 23:22:21.729 INFO Completion from [2] id:387 status:true
43906 Sep 22 23:22:21.729 INFO [388/752] Repair commands completed
43907 Sep 22 23:22:21.729 INFO Pop front: ReconcileIO { id: ReconciliationId(388), op: ExtentFlush { repair_id: ReconciliationId(388), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43908 Sep 22 23:22:21.729 INFO Sent repair work, now wait for resp
43909 Sep 22 23:22:21.729 INFO [0] received reconcile message
43910 Sep 22 23:22:21.729 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(388), op: ExtentFlush { repair_id: ReconciliationId(388), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43911 Sep 22 23:22:21.729 INFO [0] client ExtentFlush { repair_id: ReconciliationId(388), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43912 Sep 22 23:22:21.729 INFO [1] received reconcile message
43913 Sep 22 23:22:21.729 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(388), op: ExtentFlush { repair_id: ReconciliationId(388), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43914 Sep 22 23:22:21.729 INFO [1] client ExtentFlush { repair_id: ReconciliationId(388), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43915 Sep 22 23:22:21.729 INFO [2] received reconcile message
43916 Sep 22 23:22:21.729 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(388), op: ExtentFlush { repair_id: ReconciliationId(388), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43917 Sep 22 23:22:21.729 INFO [2] client ExtentFlush { repair_id: ReconciliationId(388), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43918 Sep 22 23:22:21.729 DEBG 388 Flush extent 84 with f:2 g:2
43919 Sep 22 23:22:21.729 DEBG Flush just extent 84 with f:2 and g:2
43920 Sep 22 23:22:21.729 DEBG [1] It's time to notify for 388
43921 Sep 22 23:22:21.729 INFO Completion from [1] id:388 status:true
43922 Sep 22 23:22:21.729 INFO [389/752] Repair commands completed
43923 Sep 22 23:22:21.729 INFO Pop front: ReconcileIO { id: ReconciliationId(389), op: ExtentClose { repair_id: ReconciliationId(389), extent_id: 84 }, state: ClientData([New, New, New]) }
43924 Sep 22 23:22:21.729 INFO Sent repair work, now wait for resp
43925 Sep 22 23:22:21.729 INFO [0] received reconcile message
43926 Sep 22 23:22:21.729 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(389), op: ExtentClose { repair_id: ReconciliationId(389), extent_id: 84 }, state: ClientData([InProgress, New, New]) }, : downstairs
43927 Sep 22 23:22:21.729 INFO [0] client ExtentClose { repair_id: ReconciliationId(389), extent_id: 84 }
43928 Sep 22 23:22:21.730 INFO [1] received reconcile message
43929 Sep 22 23:22:21.730 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(389), op: ExtentClose { repair_id: ReconciliationId(389), extent_id: 84 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43930 Sep 22 23:22:21.730 INFO [1] client ExtentClose { repair_id: ReconciliationId(389), extent_id: 84 }
43931 Sep 22 23:22:21.730 INFO [2] received reconcile message
43932 Sep 22 23:22:21.730 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(389), op: ExtentClose { repair_id: ReconciliationId(389), extent_id: 84 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43933 Sep 22 23:22:21.730 INFO [2] client ExtentClose { repair_id: ReconciliationId(389), extent_id: 84 }
43934 Sep 22 23:22:21.730 DEBG 389 Close extent 84
43935 Sep 22 23:22:21.730 DEBG 389 Close extent 84
43936 Sep 22 23:22:21.730 DEBG 389 Close extent 84
43937 Sep 22 23:22:21.731 DEBG [2] It's time to notify for 389
43938 Sep 22 23:22:21.731 INFO Completion from [2] id:389 status:true
43939 Sep 22 23:22:21.731 INFO [390/752] Repair commands completed
43940 Sep 22 23:22:21.731 INFO Pop front: ReconcileIO { id: ReconciliationId(390), op: ExtentRepair { repair_id: ReconciliationId(390), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43941 Sep 22 23:22:21.731 INFO Sent repair work, now wait for resp
43942 Sep 22 23:22:21.731 INFO [0] received reconcile message
43943 Sep 22 23:22:21.731 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(390), op: ExtentRepair { repair_id: ReconciliationId(390), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43944 Sep 22 23:22:21.731 INFO [0] client ExtentRepair { repair_id: ReconciliationId(390), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43945 Sep 22 23:22:21.731 INFO [0] Sending repair request ReconciliationId(390)
43946 Sep 22 23:22:21.731 INFO [1] received reconcile message
43947 Sep 22 23:22:21.731 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(390), op: ExtentRepair { repair_id: ReconciliationId(390), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43948 Sep 22 23:22:21.731 INFO [1] client ExtentRepair { repair_id: ReconciliationId(390), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43949 Sep 22 23:22:21.731 INFO [1] No action required ReconciliationId(390)
43950 Sep 22 23:22:21.731 INFO [2] received reconcile message
43951 Sep 22 23:22:21.731 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(390), op: ExtentRepair { repair_id: ReconciliationId(390), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43952 Sep 22 23:22:21.731 INFO [2] client ExtentRepair { repair_id: ReconciliationId(390), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
43953 Sep 22 23:22:21.731 INFO [2] No action required ReconciliationId(390)
43954 Sep 22 23:22:21.731 DEBG 390 Repair extent 84 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
43955 Sep 22 23:22:21.731 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/054.copy"
43956 Sep 22 23:22:21.737 INFO [lossy] skipping 1084
43957 Sep 22 23:22:21.737 INFO [lossy] skipping 1084
43958 Sep 22 23:22:21.737 DEBG Flush :1084 extent_limit None deps:[JobId(1083), JobId(1082)] res:true f:31 g:1
43959 Sep 22 23:22:21.743 DEBG Read :1085 deps:[JobId(1084)] res:true
43960 Sep 22 23:22:21.793 INFO accepted connection, remote_addr: 127.0.0.1:49228, local_addr: 127.0.0.1:52864, task: repair
43961 Sep 22 23:22:21.793 TRCE incoming request, uri: /extent/84/files, method: GET, req_id: 5da15b25-742b-4fee-a6bc-2f22b72ff218, remote_addr: 127.0.0.1:49228, local_addr: 127.0.0.1:52864, task: repair
43962 Sep 22 23:22:21.793 INFO request completed, latency_us: 207, response_code: 200, uri: /extent/84/files, method: GET, req_id: 5da15b25-742b-4fee-a6bc-2f22b72ff218, remote_addr: 127.0.0.1:49228, local_addr: 127.0.0.1:52864, task: repair
43963 Sep 22 23:22:21.794 INFO eid:84 Found repair files: ["054", "054.db"]
43964 Sep 22 23:22:21.794 TRCE incoming request, uri: /newextent/84/data, method: GET, req_id: 138618f5-ba37-4436-9b12-f4c7fb1df2ce, remote_addr: 127.0.0.1:49228, local_addr: 127.0.0.1:52864, task: repair
43965 Sep 22 23:22:21.794 INFO request completed, latency_us: 329, response_code: 200, uri: /newextent/84/data, method: GET, req_id: 138618f5-ba37-4436-9b12-f4c7fb1df2ce, remote_addr: 127.0.0.1:49228, local_addr: 127.0.0.1:52864, task: repair
43966 Sep 22 23:22:21.799 TRCE incoming request, uri: /newextent/84/db, method: GET, req_id: 9bde8b18-754a-45f8-843e-b1c49c499d45, remote_addr: 127.0.0.1:49228, local_addr: 127.0.0.1:52864, task: repair
43967 Sep 22 23:22:21.799 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/84/db, method: GET, req_id: 9bde8b18-754a-45f8-843e-b1c49c499d45, remote_addr: 127.0.0.1:49228, local_addr: 127.0.0.1:52864, task: repair
43968 Sep 22 23:22:21.801 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/054.copy" to "/tmp/downstairs-zrMnlo6G/00/000/054.replace"
43969 Sep 22 23:22:21.801 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43970 Sep 22 23:22:21.801 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/054.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
43971 Sep 22 23:22:21.802 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/054"
43972 Sep 22 23:22:21.802 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/054.db"
43973 Sep 22 23:22:21.802 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43974 Sep 22 23:22:21.802 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/054.replace" to "/tmp/downstairs-zrMnlo6G/00/000/054.completed"
43975 Sep 22 23:22:21.802 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43976 Sep 22 23:22:21.802 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
43977 Sep 22 23:22:21.802 DEBG [0] It's time to notify for 390
43978 Sep 22 23:22:21.802 INFO Completion from [0] id:390 status:true
43979 Sep 22 23:22:21.802 INFO [391/752] Repair commands completed
43980 Sep 22 23:22:21.802 INFO Pop front: ReconcileIO { id: ReconciliationId(391), op: ExtentReopen { repair_id: ReconciliationId(391), extent_id: 84 }, state: ClientData([New, New, New]) }
43981 Sep 22 23:22:21.802 INFO Sent repair work, now wait for resp
43982 Sep 22 23:22:21.802 INFO [0] received reconcile message
43983 Sep 22 23:22:21.802 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(391), op: ExtentReopen { repair_id: ReconciliationId(391), extent_id: 84 }, state: ClientData([InProgress, New, New]) }, : downstairs
43984 Sep 22 23:22:21.802 INFO [0] client ExtentReopen { repair_id: ReconciliationId(391), extent_id: 84 }
43985 Sep 22 23:22:21.802 INFO [1] received reconcile message
43986 Sep 22 23:22:21.802 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(391), op: ExtentReopen { repair_id: ReconciliationId(391), extent_id: 84 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43987 Sep 22 23:22:21.802 INFO [1] client ExtentReopen { repair_id: ReconciliationId(391), extent_id: 84 }
43988 Sep 22 23:22:21.802 INFO [2] received reconcile message
43989 Sep 22 23:22:21.802 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(391), op: ExtentReopen { repair_id: ReconciliationId(391), extent_id: 84 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43990 Sep 22 23:22:21.802 INFO [2] client ExtentReopen { repair_id: ReconciliationId(391), extent_id: 84 }
43991 Sep 22 23:22:21.803 DEBG 391 Reopen extent 84
43992 Sep 22 23:22:21.803 DEBG 391 Reopen extent 84
43993 Sep 22 23:22:21.804 DEBG 391 Reopen extent 84
43994 Sep 22 23:22:21.804 DEBG [2] It's time to notify for 391
43995 Sep 22 23:22:21.804 INFO Completion from [2] id:391 status:true
43996 Sep 22 23:22:21.804 INFO [392/752] Repair commands completed
43997 Sep 22 23:22:21.804 INFO Pop front: ReconcileIO { id: ReconciliationId(392), op: ExtentFlush { repair_id: ReconciliationId(392), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43998 Sep 22 23:22:21.804 INFO Sent repair work, now wait for resp
43999 Sep 22 23:22:21.804 INFO [0] received reconcile message
44000 Sep 22 23:22:21.804 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(392), op: ExtentFlush { repair_id: ReconciliationId(392), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44001 Sep 22 23:22:21.805 INFO [0] client ExtentFlush { repair_id: ReconciliationId(392), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44002 Sep 22 23:22:21.805 INFO [1] received reconcile message
44003 Sep 22 23:22:21.805 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(392), op: ExtentFlush { repair_id: ReconciliationId(392), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44004 Sep 22 23:22:21.805 INFO [1] client ExtentFlush { repair_id: ReconciliationId(392), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44005 Sep 22 23:22:21.805 INFO [2] received reconcile message
44006 Sep 22 23:22:21.805 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(392), op: ExtentFlush { repair_id: ReconciliationId(392), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44007 Sep 22 23:22:21.805 INFO [2] client ExtentFlush { repair_id: ReconciliationId(392), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44008 Sep 22 23:22:21.805 DEBG 392 Flush extent 56 with f:2 g:2
44009 Sep 22 23:22:21.805 DEBG Flush just extent 56 with f:2 and g:2
44010 Sep 22 23:22:21.805 DEBG [1] It's time to notify for 392
44011 Sep 22 23:22:21.805 INFO Completion from [1] id:392 status:true
44012 Sep 22 23:22:21.805 INFO [393/752] Repair commands completed
44013 Sep 22 23:22:21.805 INFO Pop front: ReconcileIO { id: ReconciliationId(393), op: ExtentClose { repair_id: ReconciliationId(393), extent_id: 56 }, state: ClientData([New, New, New]) }
44014 Sep 22 23:22:21.805 INFO Sent repair work, now wait for resp
44015 Sep 22 23:22:21.805 INFO [0] received reconcile message
44016 Sep 22 23:22:21.805 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(393), op: ExtentClose { repair_id: ReconciliationId(393), extent_id: 56 }, state: ClientData([InProgress, New, New]) }, : downstairs
44017 Sep 22 23:22:21.805 INFO [0] client ExtentClose { repair_id: ReconciliationId(393), extent_id: 56 }
44018 Sep 22 23:22:21.805 INFO [1] received reconcile message
44019 Sep 22 23:22:21.805 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(393), op: ExtentClose { repair_id: ReconciliationId(393), extent_id: 56 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44020 Sep 22 23:22:21.805 INFO [1] client ExtentClose { repair_id: ReconciliationId(393), extent_id: 56 }
44021 Sep 22 23:22:21.805 INFO [2] received reconcile message
44022 Sep 22 23:22:21.805 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(393), op: ExtentClose { repair_id: ReconciliationId(393), extent_id: 56 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44023 Sep 22 23:22:21.805 INFO [2] client ExtentClose { repair_id: ReconciliationId(393), extent_id: 56 }
44024 Sep 22 23:22:21.805 DEBG 393 Close extent 56
44025 Sep 22 23:22:21.806 DEBG 393 Close extent 56
44026 Sep 22 23:22:21.806 DEBG 393 Close extent 56
44027 Sep 22 23:22:21.806 DEBG [2] It's time to notify for 393
44028 Sep 22 23:22:21.806 INFO Completion from [2] id:393 status:true
44029 Sep 22 23:22:21.806 INFO [394/752] Repair commands completed
44030 Sep 22 23:22:21.806 INFO Pop front: ReconcileIO { id: ReconciliationId(394), op: ExtentRepair { repair_id: ReconciliationId(394), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44031 Sep 22 23:22:21.806 INFO Sent repair work, now wait for resp
44032 Sep 22 23:22:21.806 INFO [0] received reconcile message
44033 Sep 22 23:22:21.806 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(394), op: ExtentRepair { repair_id: ReconciliationId(394), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44034 Sep 22 23:22:21.807 INFO [0] client ExtentRepair { repair_id: ReconciliationId(394), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44035 Sep 22 23:22:21.807 INFO [0] Sending repair request ReconciliationId(394)
44036 Sep 22 23:22:21.807 INFO [1] received reconcile message
44037 Sep 22 23:22:21.807 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(394), op: ExtentRepair { repair_id: ReconciliationId(394), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44038 Sep 22 23:22:21.807 INFO [1] client ExtentRepair { repair_id: ReconciliationId(394), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44039 Sep 22 23:22:21.807 INFO [1] No action required ReconciliationId(394)
44040 Sep 22 23:22:21.807 INFO [2] received reconcile message
44041 Sep 22 23:22:21.807 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(394), op: ExtentRepair { repair_id: ReconciliationId(394), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44042 Sep 22 23:22:21.807 INFO [2] client ExtentRepair { repair_id: ReconciliationId(394), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44043 Sep 22 23:22:21.807 INFO [2] No action required ReconciliationId(394)
44044 Sep 22 23:22:21.807 DEBG 394 Repair extent 56 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
44045 Sep 22 23:22:21.807 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/038.copy"
44046 Sep 22 23:22:21.871 INFO accepted connection, remote_addr: 127.0.0.1:51239, local_addr: 127.0.0.1:52864, task: repair
44047 Sep 22 23:22:21.871 TRCE incoming request, uri: /extent/56/files, method: GET, req_id: f2404592-5a05-45ec-a98e-467c9ad6a4da, remote_addr: 127.0.0.1:51239, local_addr: 127.0.0.1:52864, task: repair
44048 Sep 22 23:22:21.872 INFO request completed, latency_us: 201, response_code: 200, uri: /extent/56/files, method: GET, req_id: f2404592-5a05-45ec-a98e-467c9ad6a4da, remote_addr: 127.0.0.1:51239, local_addr: 127.0.0.1:52864, task: repair
44049 Sep 22 23:22:21.872 INFO eid:56 Found repair files: ["038", "038.db"]
44050 Sep 22 23:22:21.872 TRCE incoming request, uri: /newextent/56/data, method: GET, req_id: 693c5484-7cb0-4dba-8f1e-a06d51646f49, remote_addr: 127.0.0.1:51239, local_addr: 127.0.0.1:52864, task: repair
44051 Sep 22 23:22:21.872 INFO request completed, latency_us: 326, response_code: 200, uri: /newextent/56/data, method: GET, req_id: 693c5484-7cb0-4dba-8f1e-a06d51646f49, remote_addr: 127.0.0.1:51239, local_addr: 127.0.0.1:52864, task: repair
44052 Sep 22 23:22:21.877 TRCE incoming request, uri: /newextent/56/db, method: GET, req_id: 0eb03cd7-6f4e-489b-a85c-3fd6c946c7ea, remote_addr: 127.0.0.1:51239, local_addr: 127.0.0.1:52864, task: repair
44053 Sep 22 23:22:21.878 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/56/db, method: GET, req_id: 0eb03cd7-6f4e-489b-a85c-3fd6c946c7ea, remote_addr: 127.0.0.1:51239, local_addr: 127.0.0.1:52864, task: repair
44054 Sep 22 23:22:21.879 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/038.copy" to "/tmp/downstairs-zrMnlo6G/00/000/038.replace"
44055 Sep 22 23:22:21.879 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44056 Sep 22 23:22:21.880 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/038.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
44057 Sep 22 23:22:21.880 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/038"
44058 Sep 22 23:22:21.880 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/038.db"
44059 Sep 22 23:22:21.880 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44060 Sep 22 23:22:21.880 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/038.replace" to "/tmp/downstairs-zrMnlo6G/00/000/038.completed"
44061 Sep 22 23:22:21.880 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44062 Sep 22 23:22:21.880 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44063 Sep 22 23:22:21.880 DEBG [0] It's time to notify for 394
44064 Sep 22 23:22:21.881 INFO Completion from [0] id:394 status:true
44065 Sep 22 23:22:21.881 INFO [395/752] Repair commands completed
44066 Sep 22 23:22:21.881 INFO Pop front: ReconcileIO { id: ReconciliationId(395), op: ExtentReopen { repair_id: ReconciliationId(395), extent_id: 56 }, state: ClientData([New, New, New]) }
44067 Sep 22 23:22:21.881 INFO Sent repair work, now wait for resp
44068 Sep 22 23:22:21.881 INFO [0] received reconcile message
44069 Sep 22 23:22:21.881 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(395), op: ExtentReopen { repair_id: ReconciliationId(395), extent_id: 56 }, state: ClientData([InProgress, New, New]) }, : downstairs
44070 Sep 22 23:22:21.881 INFO [0] client ExtentReopen { repair_id: ReconciliationId(395), extent_id: 56 }
44071 Sep 22 23:22:21.881 INFO [1] received reconcile message
44072 Sep 22 23:22:21.881 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(395), op: ExtentReopen { repair_id: ReconciliationId(395), extent_id: 56 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44073 Sep 22 23:22:21.881 INFO [1] client ExtentReopen { repair_id: ReconciliationId(395), extent_id: 56 }
44074 Sep 22 23:22:21.881 INFO [2] received reconcile message
44075 Sep 22 23:22:21.881 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(395), op: ExtentReopen { repair_id: ReconciliationId(395), extent_id: 56 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44076 Sep 22 23:22:21.881 INFO [2] client ExtentReopen { repair_id: ReconciliationId(395), extent_id: 56 }
44077 Sep 22 23:22:21.881 DEBG 395 Reopen extent 56
44078 Sep 22 23:22:21.882 DEBG 395 Reopen extent 56
44079 Sep 22 23:22:21.882 DEBG 395 Reopen extent 56
44080 Sep 22 23:22:21.883 DEBG [2] It's time to notify for 395
44081 Sep 22 23:22:21.883 INFO Completion from [2] id:395 status:true
44082 Sep 22 23:22:21.883 INFO [396/752] Repair commands completed
44083 Sep 22 23:22:21.883 INFO Pop front: ReconcileIO { id: ReconciliationId(396), op: ExtentFlush { repair_id: ReconciliationId(396), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44084 Sep 22 23:22:21.883 INFO Sent repair work, now wait for resp
44085 Sep 22 23:22:21.883 INFO [0] received reconcile message
44086 Sep 22 23:22:21.883 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(396), op: ExtentFlush { repair_id: ReconciliationId(396), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44087 Sep 22 23:22:21.883 INFO [0] client ExtentFlush { repair_id: ReconciliationId(396), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44088 Sep 22 23:22:21.883 INFO [1] received reconcile message
44089 Sep 22 23:22:21.883 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(396), op: ExtentFlush { repair_id: ReconciliationId(396), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44090 Sep 22 23:22:21.883 INFO [1] client ExtentFlush { repair_id: ReconciliationId(396), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44091 Sep 22 23:22:21.883 INFO [2] received reconcile message
44092 Sep 22 23:22:21.883 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(396), op: ExtentFlush { repair_id: ReconciliationId(396), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44093 Sep 22 23:22:21.883 INFO [2] client ExtentFlush { repair_id: ReconciliationId(396), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44094 Sep 22 23:22:21.883 DEBG 396 Flush extent 94 with f:2 g:2
44095 Sep 22 23:22:21.883 DEBG Flush just extent 94 with f:2 and g:2
44096 Sep 22 23:22:21.883 DEBG [1] It's time to notify for 396
44097 Sep 22 23:22:21.883 INFO Completion from [1] id:396 status:true
44098 Sep 22 23:22:21.883 INFO [397/752] Repair commands completed
44099 Sep 22 23:22:21.883 INFO Pop front: ReconcileIO { id: ReconciliationId(397), op: ExtentClose { repair_id: ReconciliationId(397), extent_id: 94 }, state: ClientData([New, New, New]) }
44100 Sep 22 23:22:21.883 INFO Sent repair work, now wait for resp
44101 Sep 22 23:22:21.883 INFO [0] received reconcile message
44102 Sep 22 23:22:21.883 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(397), op: ExtentClose { repair_id: ReconciliationId(397), extent_id: 94 }, state: ClientData([InProgress, New, New]) }, : downstairs
44103 Sep 22 23:22:21.883 INFO [0] client ExtentClose { repair_id: ReconciliationId(397), extent_id: 94 }
44104 Sep 22 23:22:21.883 INFO [1] received reconcile message
44105 Sep 22 23:22:21.884 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(397), op: ExtentClose { repair_id: ReconciliationId(397), extent_id: 94 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44106 Sep 22 23:22:21.884 INFO [1] client ExtentClose { repair_id: ReconciliationId(397), extent_id: 94 }
44107 Sep 22 23:22:21.884 INFO [2] received reconcile message
44108 Sep 22 23:22:21.884 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(397), op: ExtentClose { repair_id: ReconciliationId(397), extent_id: 94 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44109 Sep 22 23:22:21.884 INFO [2] client ExtentClose { repair_id: ReconciliationId(397), extent_id: 94 }
44110 Sep 22 23:22:21.884 DEBG 397 Close extent 94
44111 Sep 22 23:22:21.884 DEBG 397 Close extent 94
44112 Sep 22 23:22:21.884 DEBG 397 Close extent 94
44113 Sep 22 23:22:21.885 DEBG [2] It's time to notify for 397
44114 Sep 22 23:22:21.885 INFO Completion from [2] id:397 status:true
44115 Sep 22 23:22:21.885 INFO [398/752] Repair commands completed
44116 Sep 22 23:22:21.885 INFO Pop front: ReconcileIO { id: ReconciliationId(398), op: ExtentRepair { repair_id: ReconciliationId(398), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44117 Sep 22 23:22:21.885 INFO Sent repair work, now wait for resp
44118 Sep 22 23:22:21.885 INFO [0] received reconcile message
44119 Sep 22 23:22:21.885 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(398), op: ExtentRepair { repair_id: ReconciliationId(398), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44120 Sep 22 23:22:21.885 INFO [0] client ExtentRepair { repair_id: ReconciliationId(398), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44121 Sep 22 23:22:21.885 INFO [0] Sending repair request ReconciliationId(398)
44122 Sep 22 23:22:21.885 INFO [1] received reconcile message
44123 Sep 22 23:22:21.885 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(398), op: ExtentRepair { repair_id: ReconciliationId(398), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44124 Sep 22 23:22:21.885 INFO [1] client ExtentRepair { repair_id: ReconciliationId(398), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44125 Sep 22 23:22:21.885 INFO [1] No action required ReconciliationId(398)
44126 Sep 22 23:22:21.885 INFO [2] received reconcile message
44127 Sep 22 23:22:21.885 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(398), op: ExtentRepair { repair_id: ReconciliationId(398), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44128 Sep 22 23:22:21.885 INFO [2] client ExtentRepair { repair_id: ReconciliationId(398), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44129 Sep 22 23:22:21.885 INFO [2] No action required ReconciliationId(398)
44130 Sep 22 23:22:21.885 DEBG 398 Repair extent 94 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
44131 Sep 22 23:22:21.885 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/05E.copy"
44132 Sep 22 23:22:21.949 INFO accepted connection, remote_addr: 127.0.0.1:32831, local_addr: 127.0.0.1:52864, task: repair
44133 Sep 22 23:22:21.950 TRCE incoming request, uri: /extent/94/files, method: GET, req_id: 5ae072b3-ef43-40db-aeca-9f3eabcf43f8, remote_addr: 127.0.0.1:32831, local_addr: 127.0.0.1:52864, task: repair
44134 Sep 22 23:22:21.950 INFO request completed, latency_us: 236, response_code: 200, uri: /extent/94/files, method: GET, req_id: 5ae072b3-ef43-40db-aeca-9f3eabcf43f8, remote_addr: 127.0.0.1:32831, local_addr: 127.0.0.1:52864, task: repair
44135 Sep 22 23:22:21.950 INFO eid:94 Found repair files: ["05E", "05E.db"]
44136 Sep 22 23:22:21.950 TRCE incoming request, uri: /newextent/94/data, method: GET, req_id: f30b7809-59e9-47ea-9ef3-03dac011f09c, remote_addr: 127.0.0.1:32831, local_addr: 127.0.0.1:52864, task: repair
44137 Sep 22 23:22:21.951 INFO request completed, latency_us: 326, response_code: 200, uri: /newextent/94/data, method: GET, req_id: f30b7809-59e9-47ea-9ef3-03dac011f09c, remote_addr: 127.0.0.1:32831, local_addr: 127.0.0.1:52864, task: repair
44138 Sep 22 23:22:21.956 TRCE incoming request, uri: /newextent/94/db, method: GET, req_id: 7cb4c93d-cb6e-43dc-9748-091cd235c884, remote_addr: 127.0.0.1:32831, local_addr: 127.0.0.1:52864, task: repair
44139 Sep 22 23:22:21.956 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/94/db, method: GET, req_id: 7cb4c93d-cb6e-43dc-9748-091cd235c884, remote_addr: 127.0.0.1:32831, local_addr: 127.0.0.1:52864, task: repair
44140 Sep 22 23:22:21.957 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/05E.copy" to "/tmp/downstairs-zrMnlo6G/00/000/05E.replace"
44141 Sep 22 23:22:21.957 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44142 Sep 22 23:22:21.958 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/05E.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
44143 Sep 22 23:22:21.958 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/05E"
44144 Sep 22 23:22:21.958 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/05E.db"
44145 Sep 22 23:22:21.958 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44146 Sep 22 23:22:21.958 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/05E.replace" to "/tmp/downstairs-zrMnlo6G/00/000/05E.completed"
44147 Sep 22 23:22:21.959 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44148 Sep 22 23:22:21.959 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44149 Sep 22 23:22:21.959 DEBG [0] It's time to notify for 398
44150 Sep 22 23:22:21.959 INFO Completion from [0] id:398 status:true
44151 Sep 22 23:22:21.959 INFO [399/752] Repair commands completed
44152 Sep 22 23:22:21.959 INFO Pop front: ReconcileIO { id: ReconciliationId(399), op: ExtentReopen { repair_id: ReconciliationId(399), extent_id: 94 }, state: ClientData([New, New, New]) }
44153 Sep 22 23:22:21.959 INFO Sent repair work, now wait for resp
44154 Sep 22 23:22:21.959 INFO [0] received reconcile message
44155 Sep 22 23:22:21.959 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(399), op: ExtentReopen { repair_id: ReconciliationId(399), extent_id: 94 }, state: ClientData([InProgress, New, New]) }, : downstairs
44156 Sep 22 23:22:21.959 INFO [0] client ExtentReopen { repair_id: ReconciliationId(399), extent_id: 94 }
44157 Sep 22 23:22:21.959 INFO [1] received reconcile message
44158 Sep 22 23:22:21.959 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(399), op: ExtentReopen { repair_id: ReconciliationId(399), extent_id: 94 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44159 Sep 22 23:22:21.959 INFO [1] client ExtentReopen { repair_id: ReconciliationId(399), extent_id: 94 }
44160 Sep 22 23:22:21.959 INFO [2] received reconcile message
44161 Sep 22 23:22:21.959 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(399), op: ExtentReopen { repair_id: ReconciliationId(399), extent_id: 94 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44162 Sep 22 23:22:21.959 INFO [2] client ExtentReopen { repair_id: ReconciliationId(399), extent_id: 94 }
44163 Sep 22 23:22:21.959 DEBG 399 Reopen extent 94
44164 Sep 22 23:22:21.960 DEBG 399 Reopen extent 94
44165 Sep 22 23:22:21.961 DEBG 399 Reopen extent 94
44166 Sep 22 23:22:21.961 DEBG [2] It's time to notify for 399
44167 Sep 22 23:22:21.961 INFO Completion from [2] id:399 status:true
44168 Sep 22 23:22:21.961 INFO [400/752] Repair commands completed
44169 Sep 22 23:22:21.961 INFO Pop front: ReconcileIO { id: ReconciliationId(400), op: ExtentFlush { repair_id: ReconciliationId(400), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44170 Sep 22 23:22:21.961 INFO Sent repair work, now wait for resp
44171 Sep 22 23:22:21.961 INFO [0] received reconcile message
44172 Sep 22 23:22:21.961 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(400), op: ExtentFlush { repair_id: ReconciliationId(400), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44173 Sep 22 23:22:21.961 INFO [0] client ExtentFlush { repair_id: ReconciliationId(400), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44174 Sep 22 23:22:21.961 INFO [1] received reconcile message
44175 Sep 22 23:22:21.961 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(400), op: ExtentFlush { repair_id: ReconciliationId(400), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44176 Sep 22 23:22:21.961 INFO [1] client ExtentFlush { repair_id: ReconciliationId(400), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44177 Sep 22 23:22:21.962 INFO [2] received reconcile message
44178 Sep 22 23:22:21.962 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(400), op: ExtentFlush { repair_id: ReconciliationId(400), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44179 Sep 22 23:22:21.962 INFO [2] client ExtentFlush { repair_id: ReconciliationId(400), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44180 Sep 22 23:22:21.962 DEBG 400 Flush extent 93 with f:2 g:2
44181 Sep 22 23:22:21.962 DEBG Flush just extent 93 with f:2 and g:2
44182 Sep 22 23:22:21.962 DEBG [1] It's time to notify for 400
44183 Sep 22 23:22:21.962 INFO Completion from [1] id:400 status:true
44184 Sep 22 23:22:21.962 INFO [401/752] Repair commands completed
44185 Sep 22 23:22:21.962 INFO Pop front: ReconcileIO { id: ReconciliationId(401), op: ExtentClose { repair_id: ReconciliationId(401), extent_id: 93 }, state: ClientData([New, New, New]) }
44186 Sep 22 23:22:21.962 INFO Sent repair work, now wait for resp
44187 Sep 22 23:22:21.962 INFO [0] received reconcile message
44188 Sep 22 23:22:21.962 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(401), op: ExtentClose { repair_id: ReconciliationId(401), extent_id: 93 }, state: ClientData([InProgress, New, New]) }, : downstairs
44189 Sep 22 23:22:21.962 INFO [0] client ExtentClose { repair_id: ReconciliationId(401), extent_id: 93 }
44190 Sep 22 23:22:21.962 INFO [1] received reconcile message
44191 Sep 22 23:22:21.962 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(401), op: ExtentClose { repair_id: ReconciliationId(401), extent_id: 93 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44192 Sep 22 23:22:21.962 INFO [1] client ExtentClose { repair_id: ReconciliationId(401), extent_id: 93 }
44193 Sep 22 23:22:21.962 INFO [2] received reconcile message
44194 Sep 22 23:22:21.962 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(401), op: ExtentClose { repair_id: ReconciliationId(401), extent_id: 93 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44195 Sep 22 23:22:21.962 INFO [2] client ExtentClose { repair_id: ReconciliationId(401), extent_id: 93 }
44196 Sep 22 23:22:21.962 DEBG 401 Close extent 93
44197 Sep 22 23:22:21.963 DEBG 401 Close extent 93
44198 Sep 22 23:22:21.963 DEBG 401 Close extent 93
44199 Sep 22 23:22:21.963 DEBG [2] It's time to notify for 401
44200 Sep 22 23:22:21.963 INFO Completion from [2] id:401 status:true
44201 Sep 22 23:22:21.963 INFO [402/752] Repair commands completed
44202 Sep 22 23:22:21.963 INFO Pop front: ReconcileIO { id: ReconciliationId(402), op: ExtentRepair { repair_id: ReconciliationId(402), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44203 Sep 22 23:22:21.963 INFO Sent repair work, now wait for resp
44204 Sep 22 23:22:21.963 INFO [0] received reconcile message
44205 Sep 22 23:22:21.963 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(402), op: ExtentRepair { repair_id: ReconciliationId(402), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44206 Sep 22 23:22:21.963 INFO [0] client ExtentRepair { repair_id: ReconciliationId(402), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44207 Sep 22 23:22:21.963 INFO [0] Sending repair request ReconciliationId(402)
44208 Sep 22 23:22:21.964 INFO [1] received reconcile message
44209 Sep 22 23:22:21.964 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(402), op: ExtentRepair { repair_id: ReconciliationId(402), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44210 Sep 22 23:22:21.964 INFO [1] client ExtentRepair { repair_id: ReconciliationId(402), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44211 Sep 22 23:22:21.964 INFO [1] No action required ReconciliationId(402)
44212 Sep 22 23:22:21.964 INFO [2] received reconcile message
44213 Sep 22 23:22:21.964 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(402), op: ExtentRepair { repair_id: ReconciliationId(402), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44214 Sep 22 23:22:21.964 INFO [2] client ExtentRepair { repair_id: ReconciliationId(402), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44215 Sep 22 23:22:21.964 INFO [2] No action required ReconciliationId(402)
44216 Sep 22 23:22:21.964 DEBG 402 Repair extent 93 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
44217 Sep 22 23:22:21.964 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/05D.copy"
44218 Sep 22 23:22:22.029 INFO accepted connection, remote_addr: 127.0.0.1:39873, local_addr: 127.0.0.1:52864, task: repair
44219 Sep 22 23:22:22.029 TRCE incoming request, uri: /extent/93/files, method: GET, req_id: 92e54ccf-7732-462e-8b41-d50464e746f5, remote_addr: 127.0.0.1:39873, local_addr: 127.0.0.1:52864, task: repair
44220 Sep 22 23:22:22.029 INFO request completed, latency_us: 264, response_code: 200, uri: /extent/93/files, method: GET, req_id: 92e54ccf-7732-462e-8b41-d50464e746f5, remote_addr: 127.0.0.1:39873, local_addr: 127.0.0.1:52864, task: repair
44221 Sep 22 23:22:22.030 INFO eid:93 Found repair files: ["05D", "05D.db"]
44222 Sep 22 23:22:22.030 TRCE incoming request, uri: /newextent/93/data, method: GET, req_id: f4b98ba5-84e6-4edd-8da4-a7cf6828e059, remote_addr: 127.0.0.1:39873, local_addr: 127.0.0.1:52864, task: repair
44223 Sep 22 23:22:22.030 INFO request completed, latency_us: 374, response_code: 200, uri: /newextent/93/data, method: GET, req_id: f4b98ba5-84e6-4edd-8da4-a7cf6828e059, remote_addr: 127.0.0.1:39873, local_addr: 127.0.0.1:52864, task: repair
44224 Sep 22 23:22:22.035 TRCE incoming request, uri: /newextent/93/db, method: GET, req_id: 1d51a21c-3761-49ce-a8e6-e1fb9b24b5db, remote_addr: 127.0.0.1:39873, local_addr: 127.0.0.1:52864, task: repair
44225 Sep 22 23:22:22.035 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/93/db, method: GET, req_id: 1d51a21c-3761-49ce-a8e6-e1fb9b24b5db, remote_addr: 127.0.0.1:39873, local_addr: 127.0.0.1:52864, task: repair
44226 Sep 22 23:22:22.036 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/05D.copy" to "/tmp/downstairs-zrMnlo6G/00/000/05D.replace"
44227 Sep 22 23:22:22.036 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44228 Sep 22 23:22:22.037 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/05D.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
44229 Sep 22 23:22:22.038 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/05D"
44230 Sep 22 23:22:22.038 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/05D.db"
44231 Sep 22 23:22:22.038 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44232 Sep 22 23:22:22.038 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/05D.replace" to "/tmp/downstairs-zrMnlo6G/00/000/05D.completed"
44233 Sep 22 23:22:22.038 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44234 Sep 22 23:22:22.038 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44235 Sep 22 23:22:22.038 DEBG [0] It's time to notify for 402
44236 Sep 22 23:22:22.038 INFO Completion from [0] id:402 status:true
44237 Sep 22 23:22:22.038 INFO [403/752] Repair commands completed
44238 Sep 22 23:22:22.038 INFO Pop front: ReconcileIO { id: ReconciliationId(403), op: ExtentReopen { repair_id: ReconciliationId(403), extent_id: 93 }, state: ClientData([New, New, New]) }
44239 Sep 22 23:22:22.038 INFO Sent repair work, now wait for resp
44240 Sep 22 23:22:22.038 INFO [0] received reconcile message
44241 Sep 22 23:22:22.038 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(403), op: ExtentReopen { repair_id: ReconciliationId(403), extent_id: 93 }, state: ClientData([InProgress, New, New]) }, : downstairs
44242 Sep 22 23:22:22.038 INFO [0] client ExtentReopen { repair_id: ReconciliationId(403), extent_id: 93 }
44243 Sep 22 23:22:22.039 INFO [1] received reconcile message
44244 Sep 22 23:22:22.039 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(403), op: ExtentReopen { repair_id: ReconciliationId(403), extent_id: 93 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44245 Sep 22 23:22:22.039 INFO [1] client ExtentReopen { repair_id: ReconciliationId(403), extent_id: 93 }
44246 Sep 22 23:22:22.039 INFO [2] received reconcile message
44247 Sep 22 23:22:22.039 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(403), op: ExtentReopen { repair_id: ReconciliationId(403), extent_id: 93 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44248 Sep 22 23:22:22.039 INFO [2] client ExtentReopen { repair_id: ReconciliationId(403), extent_id: 93 }
44249 Sep 22 23:22:22.039 DEBG 403 Reopen extent 93
44250 Sep 22 23:22:22.040 DEBG 403 Reopen extent 93
44251 Sep 22 23:22:22.040 DEBG 403 Reopen extent 93
44252 Sep 22 23:22:22.041 DEBG [2] It's time to notify for 403
44253 Sep 22 23:22:22.041 INFO Completion from [2] id:403 status:true
44254 Sep 22 23:22:22.041 INFO [404/752] Repair commands completed
44255 Sep 22 23:22:22.041 INFO Pop front: ReconcileIO { id: ReconciliationId(404), op: ExtentFlush { repair_id: ReconciliationId(404), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44256 Sep 22 23:22:22.041 INFO Sent repair work, now wait for resp
44257 Sep 22 23:22:22.041 INFO [0] received reconcile message
44258 Sep 22 23:22:22.041 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(404), op: ExtentFlush { repair_id: ReconciliationId(404), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44259 Sep 22 23:22:22.041 INFO [0] client ExtentFlush { repair_id: ReconciliationId(404), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44260 Sep 22 23:22:22.041 INFO [1] received reconcile message
44261 Sep 22 23:22:22.041 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(404), op: ExtentFlush { repair_id: ReconciliationId(404), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44262 Sep 22 23:22:22.041 INFO [1] client ExtentFlush { repair_id: ReconciliationId(404), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44263 Sep 22 23:22:22.041 INFO [2] received reconcile message
44264 Sep 22 23:22:22.041 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(404), op: ExtentFlush { repair_id: ReconciliationId(404), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44265 Sep 22 23:22:22.041 INFO [2] client ExtentFlush { repair_id: ReconciliationId(404), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44266 Sep 22 23:22:22.041 DEBG 404 Flush extent 44 with f:2 g:2
44267 Sep 22 23:22:22.041 DEBG Flush just extent 44 with f:2 and g:2
44268 Sep 22 23:22:22.041 DEBG [1] It's time to notify for 404
44269 Sep 22 23:22:22.041 INFO Completion from [1] id:404 status:true
44270 Sep 22 23:22:22.041 INFO [405/752] Repair commands completed
44271 Sep 22 23:22:22.041 INFO Pop front: ReconcileIO { id: ReconciliationId(405), op: ExtentClose { repair_id: ReconciliationId(405), extent_id: 44 }, state: ClientData([New, New, New]) }
44272 Sep 22 23:22:22.041 INFO Sent repair work, now wait for resp
44273 Sep 22 23:22:22.041 INFO [0] received reconcile message
44274 Sep 22 23:22:22.041 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(405), op: ExtentClose { repair_id: ReconciliationId(405), extent_id: 44 }, state: ClientData([InProgress, New, New]) }, : downstairs
44275 Sep 22 23:22:22.041 INFO [0] client ExtentClose { repair_id: ReconciliationId(405), extent_id: 44 }
44276 Sep 22 23:22:22.042 INFO [1] received reconcile message
44277 Sep 22 23:22:22.042 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(405), op: ExtentClose { repair_id: ReconciliationId(405), extent_id: 44 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44278 Sep 22 23:22:22.042 INFO [1] client ExtentClose { repair_id: ReconciliationId(405), extent_id: 44 }
44279 Sep 22 23:22:22.042 INFO [2] received reconcile message
44280 Sep 22 23:22:22.042 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(405), op: ExtentClose { repair_id: ReconciliationId(405), extent_id: 44 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44281 Sep 22 23:22:22.042 INFO [2] client ExtentClose { repair_id: ReconciliationId(405), extent_id: 44 }
44282 Sep 22 23:22:22.042 DEBG 405 Close extent 44
44283 Sep 22 23:22:22.042 DEBG 405 Close extent 44
44284 Sep 22 23:22:22.042 DEBG 405 Close extent 44
44285 Sep 22 23:22:22.043 DEBG [2] It's time to notify for 405
44286 Sep 22 23:22:22.043 INFO Completion from [2] id:405 status:true
44287 Sep 22 23:22:22.043 INFO [406/752] Repair commands completed
44288 Sep 22 23:22:22.043 INFO Pop front: ReconcileIO { id: ReconciliationId(406), op: ExtentRepair { repair_id: ReconciliationId(406), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44289 Sep 22 23:22:22.043 INFO Sent repair work, now wait for resp
44290 Sep 22 23:22:22.043 INFO [0] received reconcile message
44291 Sep 22 23:22:22.043 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(406), op: ExtentRepair { repair_id: ReconciliationId(406), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44292 Sep 22 23:22:22.043 INFO [0] client ExtentRepair { repair_id: ReconciliationId(406), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44293 Sep 22 23:22:22.043 INFO [0] Sending repair request ReconciliationId(406)
44294 Sep 22 23:22:22.043 INFO [1] received reconcile message
44295 Sep 22 23:22:22.043 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(406), op: ExtentRepair { repair_id: ReconciliationId(406), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44296 Sep 22 23:22:22.043 INFO [1] client ExtentRepair { repair_id: ReconciliationId(406), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44297 Sep 22 23:22:22.043 INFO [1] No action required ReconciliationId(406)
44298 Sep 22 23:22:22.043 INFO [2] received reconcile message
44299 Sep 22 23:22:22.043 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(406), op: ExtentRepair { repair_id: ReconciliationId(406), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44300 Sep 22 23:22:22.043 INFO [2] client ExtentRepair { repair_id: ReconciliationId(406), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44301 Sep 22 23:22:22.043 INFO [2] No action required ReconciliationId(406)
44302 Sep 22 23:22:22.043 DEBG 406 Repair extent 44 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
44303 Sep 22 23:22:22.043 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/02C.copy"
44304 Sep 22 23:22:22.107 INFO accepted connection, remote_addr: 127.0.0.1:47489, local_addr: 127.0.0.1:52864, task: repair
44305 Sep 22 23:22:22.107 TRCE incoming request, uri: /extent/44/files, method: GET, req_id: 54ed40bf-ac6b-448e-bf6a-07b93bc3eeb4, remote_addr: 127.0.0.1:47489, local_addr: 127.0.0.1:52864, task: repair
44306 Sep 22 23:22:22.108 INFO request completed, latency_us: 282, response_code: 200, uri: /extent/44/files, method: GET, req_id: 54ed40bf-ac6b-448e-bf6a-07b93bc3eeb4, remote_addr: 127.0.0.1:47489, local_addr: 127.0.0.1:52864, task: repair
44307 Sep 22 23:22:22.108 INFO eid:44 Found repair files: ["02C", "02C.db"]
44308 Sep 22 23:22:22.108 TRCE incoming request, uri: /newextent/44/data, method: GET, req_id: d7373006-990b-40ff-850c-14658e02a461, remote_addr: 127.0.0.1:47489, local_addr: 127.0.0.1:52864, task: repair
44309 Sep 22 23:22:22.109 INFO request completed, latency_us: 350, response_code: 200, uri: /newextent/44/data, method: GET, req_id: d7373006-990b-40ff-850c-14658e02a461, remote_addr: 127.0.0.1:47489, local_addr: 127.0.0.1:52864, task: repair
44310 Sep 22 23:22:22.113 TRCE incoming request, uri: /newextent/44/db, method: GET, req_id: 35fcbe58-b60f-4a71-a921-a4b338070505, remote_addr: 127.0.0.1:47489, local_addr: 127.0.0.1:52864, task: repair
44311 Sep 22 23:22:22.114 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/44/db, method: GET, req_id: 35fcbe58-b60f-4a71-a921-a4b338070505, remote_addr: 127.0.0.1:47489, local_addr: 127.0.0.1:52864, task: repair
44312 Sep 22 23:22:22.115 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/02C.copy" to "/tmp/downstairs-zrMnlo6G/00/000/02C.replace"
44313 Sep 22 23:22:22.115 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44314 Sep 22 23:22:22.116 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/02C.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
44315 Sep 22 23:22:22.116 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/02C"
44316 Sep 22 23:22:22.116 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/02C.db"
44317 Sep 22 23:22:22.116 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44318 Sep 22 23:22:22.116 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/02C.replace" to "/tmp/downstairs-zrMnlo6G/00/000/02C.completed"
44319 Sep 22 23:22:22.116 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44320 Sep 22 23:22:22.116 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44321 Sep 22 23:22:22.117 DEBG [0] It's time to notify for 406
44322 Sep 22 23:22:22.117 INFO Completion from [0] id:406 status:true
44323 Sep 22 23:22:22.117 INFO [407/752] Repair commands completed
44324 Sep 22 23:22:22.117 INFO Pop front: ReconcileIO { id: ReconciliationId(407), op: ExtentReopen { repair_id: ReconciliationId(407), extent_id: 44 }, state: ClientData([New, New, New]) }
44325 Sep 22 23:22:22.117 INFO Sent repair work, now wait for resp
44326 Sep 22 23:22:22.117 INFO [0] received reconcile message
44327 Sep 22 23:22:22.117 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(407), op: ExtentReopen { repair_id: ReconciliationId(407), extent_id: 44 }, state: ClientData([InProgress, New, New]) }, : downstairs
44328 Sep 22 23:22:22.117 INFO [0] client ExtentReopen { repair_id: ReconciliationId(407), extent_id: 44 }
44329 Sep 22 23:22:22.117 INFO [1] received reconcile message
44330 Sep 22 23:22:22.117 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(407), op: ExtentReopen { repair_id: ReconciliationId(407), extent_id: 44 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44331 Sep 22 23:22:22.117 INFO [1] client ExtentReopen { repair_id: ReconciliationId(407), extent_id: 44 }
44332 Sep 22 23:22:22.117 INFO [2] received reconcile message
44333 Sep 22 23:22:22.117 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(407), op: ExtentReopen { repair_id: ReconciliationId(407), extent_id: 44 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44334 Sep 22 23:22:22.117 INFO [2] client ExtentReopen { repair_id: ReconciliationId(407), extent_id: 44 }
44335 Sep 22 23:22:22.117 DEBG 407 Reopen extent 44
44336 Sep 22 23:22:22.118 DEBG 407 Reopen extent 44
44337 Sep 22 23:22:22.119 DEBG 407 Reopen extent 44
44338 Sep 22 23:22:22.119 DEBG [2] It's time to notify for 407
44339 Sep 22 23:22:22.119 INFO Completion from [2] id:407 status:true
44340 Sep 22 23:22:22.119 INFO [408/752] Repair commands completed
44341 Sep 22 23:22:22.119 INFO Pop front: ReconcileIO { id: ReconciliationId(408), op: ExtentFlush { repair_id: ReconciliationId(408), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44342 Sep 22 23:22:22.119 INFO Sent repair work, now wait for resp
44343 Sep 22 23:22:22.119 INFO [0] received reconcile message
44344 Sep 22 23:22:22.120 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(408), op: ExtentFlush { repair_id: ReconciliationId(408), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44345 Sep 22 23:22:22.120 INFO [0] client ExtentFlush { repair_id: ReconciliationId(408), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44346 Sep 22 23:22:22.120 INFO [1] received reconcile message
44347 Sep 22 23:22:22.120 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(408), op: ExtentFlush { repair_id: ReconciliationId(408), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44348 Sep 22 23:22:22.120 INFO [1] client ExtentFlush { repair_id: ReconciliationId(408), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44349 Sep 22 23:22:22.120 INFO [2] received reconcile message
44350 Sep 22 23:22:22.120 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(408), op: ExtentFlush { repair_id: ReconciliationId(408), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44351 Sep 22 23:22:22.120 INFO [2] client ExtentFlush { repair_id: ReconciliationId(408), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44352 Sep 22 23:22:22.120 DEBG 408 Flush extent 45 with f:2 g:2
44353 Sep 22 23:22:22.120 DEBG Flush just extent 45 with f:2 and g:2
44354 Sep 22 23:22:22.120 DEBG [1] It's time to notify for 408
44355 Sep 22 23:22:22.120 INFO Completion from [1] id:408 status:true
44356 Sep 22 23:22:22.120 INFO [409/752] Repair commands completed
44357 Sep 22 23:22:22.120 INFO Pop front: ReconcileIO { id: ReconciliationId(409), op: ExtentClose { repair_id: ReconciliationId(409), extent_id: 45 }, state: ClientData([New, New, New]) }
44358 Sep 22 23:22:22.120 INFO Sent repair work, now wait for resp
44359 Sep 22 23:22:22.120 INFO [0] received reconcile message
44360 Sep 22 23:22:22.120 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(409), op: ExtentClose { repair_id: ReconciliationId(409), extent_id: 45 }, state: ClientData([InProgress, New, New]) }, : downstairs
44361 Sep 22 23:22:22.120 INFO [0] client ExtentClose { repair_id: ReconciliationId(409), extent_id: 45 }
44362 Sep 22 23:22:22.120 INFO [1] received reconcile message
44363 Sep 22 23:22:22.120 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(409), op: ExtentClose { repair_id: ReconciliationId(409), extent_id: 45 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44364 Sep 22 23:22:22.120 INFO [1] client ExtentClose { repair_id: ReconciliationId(409), extent_id: 45 }
44365 Sep 22 23:22:22.120 INFO [2] received reconcile message
44366 Sep 22 23:22:22.120 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(409), op: ExtentClose { repair_id: ReconciliationId(409), extent_id: 45 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44367 Sep 22 23:22:22.120 INFO [2] client ExtentClose { repair_id: ReconciliationId(409), extent_id: 45 }
44368 Sep 22 23:22:22.120 DEBG 409 Close extent 45
44369 Sep 22 23:22:22.121 DEBG 409 Close extent 45
44370 Sep 22 23:22:22.121 DEBG 409 Close extent 45
44371 Sep 22 23:22:22.121 DEBG [2] It's time to notify for 409
44372 Sep 22 23:22:22.121 INFO Completion from [2] id:409 status:true
44373 Sep 22 23:22:22.121 INFO [410/752] Repair commands completed
44374 Sep 22 23:22:22.121 INFO Pop front: ReconcileIO { id: ReconciliationId(410), op: ExtentRepair { repair_id: ReconciliationId(410), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44375 Sep 22 23:22:22.121 INFO Sent repair work, now wait for resp
44376 Sep 22 23:22:22.122 INFO [0] received reconcile message
44377 Sep 22 23:22:22.122 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(410), op: ExtentRepair { repair_id: ReconciliationId(410), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44378 Sep 22 23:22:22.122 INFO [0] client ExtentRepair { repair_id: ReconciliationId(410), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44379 Sep 22 23:22:22.122 INFO [0] Sending repair request ReconciliationId(410)
44380 Sep 22 23:22:22.122 INFO [1] received reconcile message
44381 Sep 22 23:22:22.122 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(410), op: ExtentRepair { repair_id: ReconciliationId(410), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44382 Sep 22 23:22:22.122 INFO [1] client ExtentRepair { repair_id: ReconciliationId(410), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44383 Sep 22 23:22:22.122 INFO [1] No action required ReconciliationId(410)
44384 Sep 22 23:22:22.122 INFO [2] received reconcile message
44385 Sep 22 23:22:22.122 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(410), op: ExtentRepair { repair_id: ReconciliationId(410), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44386 Sep 22 23:22:22.122 INFO [2] client ExtentRepair { repair_id: ReconciliationId(410), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44387 Sep 22 23:22:22.122 INFO [2] No action required ReconciliationId(410)
44388 Sep 22 23:22:22.122 DEBG 410 Repair extent 45 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
44389 Sep 22 23:22:22.122 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/02D.copy"
44390 Sep 22 23:22:22.146 DEBG up_ds_listen was notified
44391 Sep 22 23:22:22.146 DEBG up_ds_listen process 1084
44392 Sep 22 23:22:22.146 DEBG [A] ack job 1084:85, : downstairs
44393 Sep 22 23:22:22.146 DEBG up_ds_listen checked 1 jobs, back to waiting
44394 Sep 22 23:22:22.146 DEBG Flush :1082 extent_limit None deps:[JobId(1081), JobId(1080)] res:true f:30 g:1
44395 Sep 22 23:22:22.152 DEBG Read :1083 deps:[JobId(1082)] res:true
44396 Sep 22 23:22:22.173 DEBG [rc] retire 1082 clears [JobId(1081), JobId(1082)], : downstairs
44397 Sep 22 23:22:22.176 INFO [lossy] sleeping 1 second
44398 Sep 22 23:22:22.187 INFO accepted connection, remote_addr: 127.0.0.1:64850, local_addr: 127.0.0.1:52864, task: repair
44399 Sep 22 23:22:22.188 TRCE incoming request, uri: /extent/45/files, method: GET, req_id: f5aed8c7-4314-4fda-8d73-97115e9581ca, remote_addr: 127.0.0.1:64850, local_addr: 127.0.0.1:52864, task: repair
44400 Sep 22 23:22:22.188 INFO request completed, latency_us: 264, response_code: 200, uri: /extent/45/files, method: GET, req_id: f5aed8c7-4314-4fda-8d73-97115e9581ca, remote_addr: 127.0.0.1:64850, local_addr: 127.0.0.1:52864, task: repair
44401 Sep 22 23:22:22.188 INFO eid:45 Found repair files: ["02D", "02D.db"]
44402 Sep 22 23:22:22.189 TRCE incoming request, uri: /newextent/45/data, method: GET, req_id: 6b98c4e4-1d47-4e98-b2f5-a8b6803c4e4c, remote_addr: 127.0.0.1:64850, local_addr: 127.0.0.1:52864, task: repair
44403 Sep 22 23:22:22.189 INFO request completed, latency_us: 345, response_code: 200, uri: /newextent/45/data, method: GET, req_id: 6b98c4e4-1d47-4e98-b2f5-a8b6803c4e4c, remote_addr: 127.0.0.1:64850, local_addr: 127.0.0.1:52864, task: repair
44404 Sep 22 23:22:22.194 TRCE incoming request, uri: /newextent/45/db, method: GET, req_id: b059bdd3-383b-4827-ae6d-d1fbb3b2be02, remote_addr: 127.0.0.1:64850, local_addr: 127.0.0.1:52864, task: repair
44405 Sep 22 23:22:22.194 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/45/db, method: GET, req_id: b059bdd3-383b-4827-ae6d-d1fbb3b2be02, remote_addr: 127.0.0.1:64850, local_addr: 127.0.0.1:52864, task: repair
44406 Sep 22 23:22:22.195 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/02D.copy" to "/tmp/downstairs-zrMnlo6G/00/000/02D.replace"
44407 Sep 22 23:22:22.195 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44408 Sep 22 23:22:22.196 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/02D.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
44409 Sep 22 23:22:22.196 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/02D"
44410 Sep 22 23:22:22.196 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/02D.db"
44411 Sep 22 23:22:22.196 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44412 Sep 22 23:22:22.196 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/02D.replace" to "/tmp/downstairs-zrMnlo6G/00/000/02D.completed"
44413 Sep 22 23:22:22.196 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44414 Sep 22 23:22:22.197 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44415 Sep 22 23:22:22.197 DEBG [0] It's time to notify for 410
44416 Sep 22 23:22:22.197 INFO Completion from [0] id:410 status:true
44417 Sep 22 23:22:22.197 INFO [411/752] Repair commands completed
44418 Sep 22 23:22:22.197 INFO Pop front: ReconcileIO { id: ReconciliationId(411), op: ExtentReopen { repair_id: ReconciliationId(411), extent_id: 45 }, state: ClientData([New, New, New]) }
44419 Sep 22 23:22:22.197 INFO Sent repair work, now wait for resp
44420 Sep 22 23:22:22.197 INFO [0] received reconcile message
44421 Sep 22 23:22:22.197 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(411), op: ExtentReopen { repair_id: ReconciliationId(411), extent_id: 45 }, state: ClientData([InProgress, New, New]) }, : downstairs
44422 Sep 22 23:22:22.197 INFO [0] client ExtentReopen { repair_id: ReconciliationId(411), extent_id: 45 }
44423 Sep 22 23:22:22.197 INFO [1] received reconcile message
44424 Sep 22 23:22:22.197 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(411), op: ExtentReopen { repair_id: ReconciliationId(411), extent_id: 45 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44425 Sep 22 23:22:22.197 INFO [1] client ExtentReopen { repair_id: ReconciliationId(411), extent_id: 45 }
44426 Sep 22 23:22:22.197 INFO [2] received reconcile message
44427 Sep 22 23:22:22.197 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(411), op: ExtentReopen { repair_id: ReconciliationId(411), extent_id: 45 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44428 Sep 22 23:22:22.197 INFO [2] client ExtentReopen { repair_id: ReconciliationId(411), extent_id: 45 }
44429 Sep 22 23:22:22.197 DEBG 411 Reopen extent 45
44430 Sep 22 23:22:22.198 DEBG 411 Reopen extent 45
44431 Sep 22 23:22:22.199 DEBG 411 Reopen extent 45
44432 Sep 22 23:22:22.199 DEBG [2] It's time to notify for 411
44433 Sep 22 23:22:22.199 INFO Completion from [2] id:411 status:true
44434 Sep 22 23:22:22.199 INFO [412/752] Repair commands completed
44435 Sep 22 23:22:22.199 INFO Pop front: ReconcileIO { id: ReconciliationId(412), op: ExtentFlush { repair_id: ReconciliationId(412), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44436 Sep 22 23:22:22.199 INFO Sent repair work, now wait for resp
44437 Sep 22 23:22:22.199 INFO [0] received reconcile message
44438 Sep 22 23:22:22.199 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(412), op: ExtentFlush { repair_id: ReconciliationId(412), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44439 Sep 22 23:22:22.200 INFO [0] client ExtentFlush { repair_id: ReconciliationId(412), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44440 Sep 22 23:22:22.200 INFO [1] received reconcile message
44441 Sep 22 23:22:22.200 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(412), op: ExtentFlush { repair_id: ReconciliationId(412), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44442 Sep 22 23:22:22.200 INFO [1] client ExtentFlush { repair_id: ReconciliationId(412), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44443 Sep 22 23:22:22.200 INFO [2] received reconcile message
44444 Sep 22 23:22:22.200 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(412), op: ExtentFlush { repair_id: ReconciliationId(412), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44445 Sep 22 23:22:22.200 INFO [2] client ExtentFlush { repair_id: ReconciliationId(412), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44446 Sep 22 23:22:22.200 DEBG 412 Flush extent 115 with f:2 g:2
44447 Sep 22 23:22:22.200 DEBG Flush just extent 115 with f:2 and g:2
44448 Sep 22 23:22:22.200 DEBG [1] It's time to notify for 412
44449 Sep 22 23:22:22.200 INFO Completion from [1] id:412 status:true
44450 Sep 22 23:22:22.200 INFO [413/752] Repair commands completed
44451 Sep 22 23:22:22.200 INFO Pop front: ReconcileIO { id: ReconciliationId(413), op: ExtentClose { repair_id: ReconciliationId(413), extent_id: 115 }, state: ClientData([New, New, New]) }
44452 Sep 22 23:22:22.200 INFO Sent repair work, now wait for resp
44453 Sep 22 23:22:22.200 INFO [0] received reconcile message
44454 Sep 22 23:22:22.200 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(413), op: ExtentClose { repair_id: ReconciliationId(413), extent_id: 115 }, state: ClientData([InProgress, New, New]) }, : downstairs
44455 Sep 22 23:22:22.200 INFO [0] client ExtentClose { repair_id: ReconciliationId(413), extent_id: 115 }
44456 Sep 22 23:22:22.200 INFO [1] received reconcile message
44457 Sep 22 23:22:22.200 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(413), op: ExtentClose { repair_id: ReconciliationId(413), extent_id: 115 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44458 Sep 22 23:22:22.200 INFO [1] client ExtentClose { repair_id: ReconciliationId(413), extent_id: 115 }
44459 Sep 22 23:22:22.200 INFO [2] received reconcile message
44460 Sep 22 23:22:22.200 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(413), op: ExtentClose { repair_id: ReconciliationId(413), extent_id: 115 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44461 Sep 22 23:22:22.200 INFO [2] client ExtentClose { repair_id: ReconciliationId(413), extent_id: 115 }
44462 Sep 22 23:22:22.200 DEBG 413 Close extent 115
44463 Sep 22 23:22:22.201 DEBG 413 Close extent 115
44464 Sep 22 23:22:22.201 DEBG 413 Close extent 115
44465 Sep 22 23:22:22.201 DEBG [2] It's time to notify for 413
44466 Sep 22 23:22:22.201 INFO Completion from [2] id:413 status:true
44467 Sep 22 23:22:22.201 INFO [414/752] Repair commands completed
44468 Sep 22 23:22:22.201 INFO Pop front: ReconcileIO { id: ReconciliationId(414), op: ExtentRepair { repair_id: ReconciliationId(414), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44469 Sep 22 23:22:22.201 INFO Sent repair work, now wait for resp
44470 Sep 22 23:22:22.202 INFO [0] received reconcile message
44471 Sep 22 23:22:22.202 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(414), op: ExtentRepair { repair_id: ReconciliationId(414), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44472 Sep 22 23:22:22.202 INFO [0] client ExtentRepair { repair_id: ReconciliationId(414), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44473 Sep 22 23:22:22.202 INFO [0] Sending repair request ReconciliationId(414)
44474 Sep 22 23:22:22.202 INFO [1] received reconcile message
44475 Sep 22 23:22:22.202 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(414), op: ExtentRepair { repair_id: ReconciliationId(414), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44476 Sep 22 23:22:22.202 INFO [1] client ExtentRepair { repair_id: ReconciliationId(414), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44477 Sep 22 23:22:22.202 INFO [1] No action required ReconciliationId(414)
44478 Sep 22 23:22:22.202 INFO [2] received reconcile message
44479 Sep 22 23:22:22.202 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(414), op: ExtentRepair { repair_id: ReconciliationId(414), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44480 Sep 22 23:22:22.202 INFO [2] client ExtentRepair { repair_id: ReconciliationId(414), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44481 Sep 22 23:22:22.202 INFO [2] No action required ReconciliationId(414)
44482 Sep 22 23:22:22.202 DEBG 414 Repair extent 115 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
44483 Sep 22 23:22:22.202 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/073.copy"
44484 Sep 22 23:22:22.265 INFO accepted connection, remote_addr: 127.0.0.1:58736, local_addr: 127.0.0.1:52864, task: repair
44485 Sep 22 23:22:22.265 TRCE incoming request, uri: /extent/115/files, method: GET, req_id: 4aa6657a-e91a-49b7-bc25-aebfd9e854d4, remote_addr: 127.0.0.1:58736, local_addr: 127.0.0.1:52864, task: repair
44486 Sep 22 23:22:22.266 INFO request completed, latency_us: 209, response_code: 200, uri: /extent/115/files, method: GET, req_id: 4aa6657a-e91a-49b7-bc25-aebfd9e854d4, remote_addr: 127.0.0.1:58736, local_addr: 127.0.0.1:52864, task: repair
44487 Sep 22 23:22:22.266 INFO eid:115 Found repair files: ["073", "073.db"]
44488 Sep 22 23:22:22.266 TRCE incoming request, uri: /newextent/115/data, method: GET, req_id: 2cacbc77-5007-40f1-9550-92f52cfa09af, remote_addr: 127.0.0.1:58736, local_addr: 127.0.0.1:52864, task: repair
44489 Sep 22 23:22:22.267 INFO request completed, latency_us: 311, response_code: 200, uri: /newextent/115/data, method: GET, req_id: 2cacbc77-5007-40f1-9550-92f52cfa09af, remote_addr: 127.0.0.1:58736, local_addr: 127.0.0.1:52864, task: repair
44490 Sep 22 23:22:22.271 TRCE incoming request, uri: /newextent/115/db, method: GET, req_id: fe5b4061-9199-4095-b921-361ed976f001, remote_addr: 127.0.0.1:58736, local_addr: 127.0.0.1:52864, task: repair
44491 Sep 22 23:22:22.272 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/115/db, method: GET, req_id: fe5b4061-9199-4095-b921-361ed976f001, remote_addr: 127.0.0.1:58736, local_addr: 127.0.0.1:52864, task: repair
44492 Sep 22 23:22:22.273 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/073.copy" to "/tmp/downstairs-zrMnlo6G/00/000/073.replace"
44493 Sep 22 23:22:22.273 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44494 Sep 22 23:22:22.274 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/073.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
44495 Sep 22 23:22:22.274 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/073"
44496 Sep 22 23:22:22.274 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/073.db"
44497 Sep 22 23:22:22.274 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44498 Sep 22 23:22:22.274 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/073.replace" to "/tmp/downstairs-zrMnlo6G/00/000/073.completed"
44499 Sep 22 23:22:22.274 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44500 Sep 22 23:22:22.274 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44501 Sep 22 23:22:22.274 DEBG [0] It's time to notify for 414
44502 Sep 22 23:22:22.274 INFO Completion from [0] id:414 status:true
44503 Sep 22 23:22:22.274 INFO [415/752] Repair commands completed
44504 Sep 22 23:22:22.274 INFO Pop front: ReconcileIO { id: ReconciliationId(415), op: ExtentReopen { repair_id: ReconciliationId(415), extent_id: 115 }, state: ClientData([New, New, New]) }
44505 Sep 22 23:22:22.274 INFO Sent repair work, now wait for resp
44506 Sep 22 23:22:22.274 INFO [0] received reconcile message
44507 Sep 22 23:22:22.274 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(415), op: ExtentReopen { repair_id: ReconciliationId(415), extent_id: 115 }, state: ClientData([InProgress, New, New]) }, : downstairs
44508 Sep 22 23:22:22.275 INFO [0] client ExtentReopen { repair_id: ReconciliationId(415), extent_id: 115 }
44509 Sep 22 23:22:22.275 INFO [1] received reconcile message
44510 Sep 22 23:22:22.275 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(415), op: ExtentReopen { repair_id: ReconciliationId(415), extent_id: 115 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44511 Sep 22 23:22:22.275 INFO [1] client ExtentReopen { repair_id: ReconciliationId(415), extent_id: 115 }
44512 Sep 22 23:22:22.275 INFO [2] received reconcile message
44513 Sep 22 23:22:22.275 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(415), op: ExtentReopen { repair_id: ReconciliationId(415), extent_id: 115 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44514 Sep 22 23:22:22.275 INFO [2] client ExtentReopen { repair_id: ReconciliationId(415), extent_id: 115 }
44515 Sep 22 23:22:22.275 DEBG 415 Reopen extent 115
44516 Sep 22 23:22:22.275 DEBG 415 Reopen extent 115
44517 Sep 22 23:22:22.276 DEBG 415 Reopen extent 115
44518 Sep 22 23:22:22.277 DEBG [2] It's time to notify for 415
44519 Sep 22 23:22:22.277 INFO Completion from [2] id:415 status:true
44520 Sep 22 23:22:22.277 INFO [416/752] Repair commands completed
44521 Sep 22 23:22:22.277 INFO Pop front: ReconcileIO { id: ReconciliationId(416), op: ExtentFlush { repair_id: ReconciliationId(416), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44522 Sep 22 23:22:22.277 INFO Sent repair work, now wait for resp
44523 Sep 22 23:22:22.277 INFO [0] received reconcile message
44524 Sep 22 23:22:22.277 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(416), op: ExtentFlush { repair_id: ReconciliationId(416), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44525 Sep 22 23:22:22.277 INFO [0] client ExtentFlush { repair_id: ReconciliationId(416), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44526 Sep 22 23:22:22.277 INFO [1] received reconcile message
44527 Sep 22 23:22:22.277 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(416), op: ExtentFlush { repair_id: ReconciliationId(416), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44528 Sep 22 23:22:22.277 INFO [1] client ExtentFlush { repair_id: ReconciliationId(416), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44529 Sep 22 23:22:22.277 INFO [2] received reconcile message
44530 Sep 22 23:22:22.277 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(416), op: ExtentFlush { repair_id: ReconciliationId(416), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44531 Sep 22 23:22:22.277 INFO [2] client ExtentFlush { repair_id: ReconciliationId(416), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44532 Sep 22 23:22:22.277 DEBG 416 Flush extent 32 with f:2 g:2
44533 Sep 22 23:22:22.277 DEBG Flush just extent 32 with f:2 and g:2
44534 Sep 22 23:22:22.277 DEBG [1] It's time to notify for 416
44535 Sep 22 23:22:22.277 INFO Completion from [1] id:416 status:true
44536 Sep 22 23:22:22.277 INFO [417/752] Repair commands completed
44537 Sep 22 23:22:22.277 INFO Pop front: ReconcileIO { id: ReconciliationId(417), op: ExtentClose { repair_id: ReconciliationId(417), extent_id: 32 }, state: ClientData([New, New, New]) }
44538 Sep 22 23:22:22.277 INFO Sent repair work, now wait for resp
44539 Sep 22 23:22:22.277 INFO [0] received reconcile message
44540 Sep 22 23:22:22.277 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(417), op: ExtentClose { repair_id: ReconciliationId(417), extent_id: 32 }, state: ClientData([InProgress, New, New]) }, : downstairs
44541 Sep 22 23:22:22.277 INFO [0] client ExtentClose { repair_id: ReconciliationId(417), extent_id: 32 }
44542 Sep 22 23:22:22.277 INFO [1] received reconcile message
44543 Sep 22 23:22:22.277 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(417), op: ExtentClose { repair_id: ReconciliationId(417), extent_id: 32 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44544 Sep 22 23:22:22.277 INFO [1] client ExtentClose { repair_id: ReconciliationId(417), extent_id: 32 }
44545 Sep 22 23:22:22.277 INFO [2] received reconcile message
44546 Sep 22 23:22:22.277 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(417), op: ExtentClose { repair_id: ReconciliationId(417), extent_id: 32 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44547 Sep 22 23:22:22.277 INFO [2] client ExtentClose { repair_id: ReconciliationId(417), extent_id: 32 }
44548 Sep 22 23:22:22.278 DEBG 417 Close extent 32
44549 Sep 22 23:22:22.278 DEBG 417 Close extent 32
44550 Sep 22 23:22:22.278 DEBG 417 Close extent 32
44551 Sep 22 23:22:22.279 DEBG [2] It's time to notify for 417
44552 Sep 22 23:22:22.279 INFO Completion from [2] id:417 status:true
44553 Sep 22 23:22:22.279 INFO [418/752] Repair commands completed
44554 Sep 22 23:22:22.279 INFO Pop front: ReconcileIO { id: ReconciliationId(418), op: ExtentRepair { repair_id: ReconciliationId(418), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44555 Sep 22 23:22:22.279 INFO Sent repair work, now wait for resp
44556 Sep 22 23:22:22.279 INFO [0] received reconcile message
44557 Sep 22 23:22:22.279 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(418), op: ExtentRepair { repair_id: ReconciliationId(418), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44558 Sep 22 23:22:22.279 INFO [0] client ExtentRepair { repair_id: ReconciliationId(418), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44559 Sep 22 23:22:22.279 INFO [0] Sending repair request ReconciliationId(418)
44560 Sep 22 23:22:22.279 INFO [1] received reconcile message
44561 Sep 22 23:22:22.279 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(418), op: ExtentRepair { repair_id: ReconciliationId(418), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44562 Sep 22 23:22:22.279 INFO [1] client ExtentRepair { repair_id: ReconciliationId(418), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44563 Sep 22 23:22:22.279 INFO [1] No action required ReconciliationId(418)
44564 Sep 22 23:22:22.279 INFO [2] received reconcile message
44565 Sep 22 23:22:22.279 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(418), op: ExtentRepair { repair_id: ReconciliationId(418), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44566 Sep 22 23:22:22.279 INFO [2] client ExtentRepair { repair_id: ReconciliationId(418), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44567 Sep 22 23:22:22.279 INFO [2] No action required ReconciliationId(418)
44568 Sep 22 23:22:22.279 DEBG 418 Repair extent 32 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
44569 Sep 22 23:22:22.279 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/020.copy"
44570 Sep 22 23:22:22.344 INFO accepted connection, remote_addr: 127.0.0.1:65445, local_addr: 127.0.0.1:52864, task: repair
44571 Sep 22 23:22:22.344 TRCE incoming request, uri: /extent/32/files, method: GET, req_id: 3e786468-3ace-4608-9bb9-be0e1efd4ca4, remote_addr: 127.0.0.1:65445, local_addr: 127.0.0.1:52864, task: repair
44572 Sep 22 23:22:22.344 INFO request completed, latency_us: 209, response_code: 200, uri: /extent/32/files, method: GET, req_id: 3e786468-3ace-4608-9bb9-be0e1efd4ca4, remote_addr: 127.0.0.1:65445, local_addr: 127.0.0.1:52864, task: repair
44573 Sep 22 23:22:22.344 INFO eid:32 Found repair files: ["020", "020.db"]
44574 Sep 22 23:22:22.345 TRCE incoming request, uri: /newextent/32/data, method: GET, req_id: b1e603a4-e6d0-4154-8a4d-ed999ce6747f, remote_addr: 127.0.0.1:65445, local_addr: 127.0.0.1:52864, task: repair
44575 Sep 22 23:22:22.345 INFO request completed, latency_us: 265, response_code: 200, uri: /newextent/32/data, method: GET, req_id: b1e603a4-e6d0-4154-8a4d-ed999ce6747f, remote_addr: 127.0.0.1:65445, local_addr: 127.0.0.1:52864, task: repair
44576 Sep 22 23:22:22.350 TRCE incoming request, uri: /newextent/32/db, method: GET, req_id: e03cf918-1563-48f4-93b5-34d018f48076, remote_addr: 127.0.0.1:65445, local_addr: 127.0.0.1:52864, task: repair
44577 Sep 22 23:22:22.350 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/32/db, method: GET, req_id: e03cf918-1563-48f4-93b5-34d018f48076, remote_addr: 127.0.0.1:65445, local_addr: 127.0.0.1:52864, task: repair
44578 Sep 22 23:22:22.351 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/020.copy" to "/tmp/downstairs-zrMnlo6G/00/000/020.replace"
44579 Sep 22 23:22:22.351 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44580 Sep 22 23:22:22.352 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/020.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
44581 Sep 22 23:22:22.352 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/020"
44582 Sep 22 23:22:22.352 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/020.db"
44583 Sep 22 23:22:22.352 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44584 Sep 22 23:22:22.352 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/020.replace" to "/tmp/downstairs-zrMnlo6G/00/000/020.completed"
44585 Sep 22 23:22:22.352 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44586 Sep 22 23:22:22.352 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44587 Sep 22 23:22:22.353 DEBG [0] It's time to notify for 418
44588 Sep 22 23:22:22.353 INFO Completion from [0] id:418 status:true
44589 Sep 22 23:22:22.353 INFO [419/752] Repair commands completed
44590 Sep 22 23:22:22.353 INFO Pop front: ReconcileIO { id: ReconciliationId(419), op: ExtentReopen { repair_id: ReconciliationId(419), extent_id: 32 }, state: ClientData([New, New, New]) }
44591 Sep 22 23:22:22.353 INFO Sent repair work, now wait for resp
44592 Sep 22 23:22:22.353 INFO [0] received reconcile message
44593 Sep 22 23:22:22.353 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(419), op: ExtentReopen { repair_id: ReconciliationId(419), extent_id: 32 }, state: ClientData([InProgress, New, New]) }, : downstairs
44594 Sep 22 23:22:22.353 INFO [0] client ExtentReopen { repair_id: ReconciliationId(419), extent_id: 32 }
44595 Sep 22 23:22:22.353 INFO [1] received reconcile message
44596 Sep 22 23:22:22.353 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(419), op: ExtentReopen { repair_id: ReconciliationId(419), extent_id: 32 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44597 Sep 22 23:22:22.353 INFO [1] client ExtentReopen { repair_id: ReconciliationId(419), extent_id: 32 }
44598 Sep 22 23:22:22.353 INFO [2] received reconcile message
44599 Sep 22 23:22:22.353 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(419), op: ExtentReopen { repair_id: ReconciliationId(419), extent_id: 32 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44600 Sep 22 23:22:22.353 INFO [2] client ExtentReopen { repair_id: ReconciliationId(419), extent_id: 32 }
44601 Sep 22 23:22:22.353 DEBG 419 Reopen extent 32
44602 Sep 22 23:22:22.354 DEBG 419 Reopen extent 32
44603 Sep 22 23:22:22.354 DEBG 419 Reopen extent 32
44604 Sep 22 23:22:22.355 DEBG [2] It's time to notify for 419
44605 Sep 22 23:22:22.355 INFO Completion from [2] id:419 status:true
44606 Sep 22 23:22:22.355 INFO [420/752] Repair commands completed
44607 Sep 22 23:22:22.355 INFO Pop front: ReconcileIO { id: ReconciliationId(420), op: ExtentFlush { repair_id: ReconciliationId(420), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44608 Sep 22 23:22:22.355 INFO Sent repair work, now wait for resp
44609 Sep 22 23:22:22.355 INFO [0] received reconcile message
44610 Sep 22 23:22:22.355 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(420), op: ExtentFlush { repair_id: ReconciliationId(420), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44611 Sep 22 23:22:22.355 INFO [0] client ExtentFlush { repair_id: ReconciliationId(420), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44612 Sep 22 23:22:22.355 INFO [1] received reconcile message
44613 Sep 22 23:22:22.355 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(420), op: ExtentFlush { repair_id: ReconciliationId(420), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44614 Sep 22 23:22:22.355 INFO [1] client ExtentFlush { repair_id: ReconciliationId(420), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44615 Sep 22 23:22:22.355 INFO [2] received reconcile message
44616 Sep 22 23:22:22.355 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(420), op: ExtentFlush { repair_id: ReconciliationId(420), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44617 Sep 22 23:22:22.355 INFO [2] client ExtentFlush { repair_id: ReconciliationId(420), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44618 Sep 22 23:22:22.355 DEBG 420 Flush extent 141 with f:2 g:2
44619 Sep 22 23:22:22.355 DEBG Flush just extent 141 with f:2 and g:2
44620 Sep 22 23:22:22.356 DEBG [1] It's time to notify for 420
44621 Sep 22 23:22:22.356 INFO Completion from [1] id:420 status:true
44622 Sep 22 23:22:22.356 INFO [421/752] Repair commands completed
44623 Sep 22 23:22:22.356 INFO Pop front: ReconcileIO { id: ReconciliationId(421), op: ExtentClose { repair_id: ReconciliationId(421), extent_id: 141 }, state: ClientData([New, New, New]) }
44624 Sep 22 23:22:22.356 INFO Sent repair work, now wait for resp
44625 Sep 22 23:22:22.356 INFO [0] received reconcile message
44626 Sep 22 23:22:22.356 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(421), op: ExtentClose { repair_id: ReconciliationId(421), extent_id: 141 }, state: ClientData([InProgress, New, New]) }, : downstairs
44627 Sep 22 23:22:22.356 INFO [0] client ExtentClose { repair_id: ReconciliationId(421), extent_id: 141 }
44628 Sep 22 23:22:22.356 INFO [1] received reconcile message
44629 Sep 22 23:22:22.356 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(421), op: ExtentClose { repair_id: ReconciliationId(421), extent_id: 141 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44630 Sep 22 23:22:22.356 INFO [1] client ExtentClose { repair_id: ReconciliationId(421), extent_id: 141 }
44631 Sep 22 23:22:22.356 INFO [2] received reconcile message
44632 Sep 22 23:22:22.356 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(421), op: ExtentClose { repair_id: ReconciliationId(421), extent_id: 141 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44633 Sep 22 23:22:22.356 INFO [2] client ExtentClose { repair_id: ReconciliationId(421), extent_id: 141 }
44634 Sep 22 23:22:22.356 DEBG 421 Close extent 141
44635 Sep 22 23:22:22.356 DEBG 421 Close extent 141
44636 Sep 22 23:22:22.357 DEBG 421 Close extent 141
44637 Sep 22 23:22:22.357 DEBG [2] It's time to notify for 421
44638 Sep 22 23:22:22.357 INFO Completion from [2] id:421 status:true
44639 Sep 22 23:22:22.357 INFO [422/752] Repair commands completed
44640 Sep 22 23:22:22.357 INFO Pop front: ReconcileIO { id: ReconciliationId(422), op: ExtentRepair { repair_id: ReconciliationId(422), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44641 Sep 22 23:22:22.357 INFO Sent repair work, now wait for resp
44642 Sep 22 23:22:22.357 INFO [0] received reconcile message
44643 Sep 22 23:22:22.357 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(422), op: ExtentRepair { repair_id: ReconciliationId(422), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44644 Sep 22 23:22:22.357 INFO [0] client ExtentRepair { repair_id: ReconciliationId(422), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44645 Sep 22 23:22:22.357 INFO [0] Sending repair request ReconciliationId(422)
44646 Sep 22 23:22:22.357 INFO [1] received reconcile message
44647 Sep 22 23:22:22.357 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(422), op: ExtentRepair { repair_id: ReconciliationId(422), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44648 Sep 22 23:22:22.357 INFO [1] client ExtentRepair { repair_id: ReconciliationId(422), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44649 Sep 22 23:22:22.357 INFO [1] No action required ReconciliationId(422)
44650 Sep 22 23:22:22.357 INFO [2] received reconcile message
44651 Sep 22 23:22:22.357 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(422), op: ExtentRepair { repair_id: ReconciliationId(422), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44652 Sep 22 23:22:22.357 INFO [2] client ExtentRepair { repair_id: ReconciliationId(422), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44653 Sep 22 23:22:22.357 INFO [2] No action required ReconciliationId(422)
44654 Sep 22 23:22:22.357 DEBG 422 Repair extent 141 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
44655 Sep 22 23:22:22.358 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/08D.copy"
44656 Sep 22 23:22:22.422 INFO accepted connection, remote_addr: 127.0.0.1:64214, local_addr: 127.0.0.1:52864, task: repair
44657 Sep 22 23:22:22.423 TRCE incoming request, uri: /extent/141/files, method: GET, req_id: 66d607e5-b0b1-4e4d-ba8d-8eb1c7288ada, remote_addr: 127.0.0.1:64214, local_addr: 127.0.0.1:52864, task: repair
44658 Sep 22 23:22:22.423 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/141/files, method: GET, req_id: 66d607e5-b0b1-4e4d-ba8d-8eb1c7288ada, remote_addr: 127.0.0.1:64214, local_addr: 127.0.0.1:52864, task: repair
44659 Sep 22 23:22:22.423 INFO eid:141 Found repair files: ["08D", "08D.db"]
44660 Sep 22 23:22:22.423 TRCE incoming request, uri: /newextent/141/data, method: GET, req_id: 8abbe60c-6f9f-42f4-9b40-0edc524f86d5, remote_addr: 127.0.0.1:64214, local_addr: 127.0.0.1:52864, task: repair
44661 Sep 22 23:22:22.424 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/141/data, method: GET, req_id: 8abbe60c-6f9f-42f4-9b40-0edc524f86d5, remote_addr: 127.0.0.1:64214, local_addr: 127.0.0.1:52864, task: repair
44662 Sep 22 23:22:22.428 TRCE incoming request, uri: /newextent/141/db, method: GET, req_id: c81c42cd-0acd-4f62-ac98-ab1adbc4885e, remote_addr: 127.0.0.1:64214, local_addr: 127.0.0.1:52864, task: repair
44663 Sep 22 23:22:22.428 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/141/db, method: GET, req_id: c81c42cd-0acd-4f62-ac98-ab1adbc4885e, remote_addr: 127.0.0.1:64214, local_addr: 127.0.0.1:52864, task: repair
44664 Sep 22 23:22:22.430 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/08D.copy" to "/tmp/downstairs-zrMnlo6G/00/000/08D.replace"
44665 Sep 22 23:22:22.430 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44666 Sep 22 23:22:22.430 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/08D.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
44667 Sep 22 23:22:22.431 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/08D"
44668 Sep 22 23:22:22.431 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/08D.db"
44669 Sep 22 23:22:22.431 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44670 Sep 22 23:22:22.431 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/08D.replace" to "/tmp/downstairs-zrMnlo6G/00/000/08D.completed"
44671 Sep 22 23:22:22.431 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44672 Sep 22 23:22:22.431 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44673 Sep 22 23:22:22.431 DEBG [0] It's time to notify for 422
44674 Sep 22 23:22:22.431 INFO Completion from [0] id:422 status:true
44675 Sep 22 23:22:22.431 INFO [423/752] Repair commands completed
44676 Sep 22 23:22:22.431 INFO Pop front: ReconcileIO { id: ReconciliationId(423), op: ExtentReopen { repair_id: ReconciliationId(423), extent_id: 141 }, state: ClientData([New, New, New]) }
44677 Sep 22 23:22:22.431 INFO Sent repair work, now wait for resp
44678 Sep 22 23:22:22.431 INFO [0] received reconcile message
44679 Sep 22 23:22:22.431 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(423), op: ExtentReopen { repair_id: ReconciliationId(423), extent_id: 141 }, state: ClientData([InProgress, New, New]) }, : downstairs
44680 Sep 22 23:22:22.431 INFO [0] client ExtentReopen { repair_id: ReconciliationId(423), extent_id: 141 }
44681 Sep 22 23:22:22.431 INFO [1] received reconcile message
44682 Sep 22 23:22:22.431 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(423), op: ExtentReopen { repair_id: ReconciliationId(423), extent_id: 141 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44683 Sep 22 23:22:22.431 INFO [1] client ExtentReopen { repair_id: ReconciliationId(423), extent_id: 141 }
44684 Sep 22 23:22:22.432 INFO [2] received reconcile message
44685 Sep 22 23:22:22.432 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(423), op: ExtentReopen { repair_id: ReconciliationId(423), extent_id: 141 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44686 Sep 22 23:22:22.432 INFO [2] client ExtentReopen { repair_id: ReconciliationId(423), extent_id: 141 }
44687 Sep 22 23:22:22.432 DEBG 423 Reopen extent 141
44688 Sep 22 23:22:22.432 DEBG 423 Reopen extent 141
44689 Sep 22 23:22:22.433 DEBG 423 Reopen extent 141
44690 Sep 22 23:22:22.433 DEBG [2] It's time to notify for 423
44691 Sep 22 23:22:22.433 INFO Completion from [2] id:423 status:true
44692 Sep 22 23:22:22.433 INFO [424/752] Repair commands completed
44693 Sep 22 23:22:22.433 INFO Pop front: ReconcileIO { id: ReconciliationId(424), op: ExtentFlush { repair_id: ReconciliationId(424), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44694 Sep 22 23:22:22.433 INFO Sent repair work, now wait for resp
44695 Sep 22 23:22:22.434 INFO [0] received reconcile message
44696 Sep 22 23:22:22.434 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(424), op: ExtentFlush { repair_id: ReconciliationId(424), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44697 Sep 22 23:22:22.434 INFO [0] client ExtentFlush { repair_id: ReconciliationId(424), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44698 Sep 22 23:22:22.434 INFO [1] received reconcile message
44699 Sep 22 23:22:22.434 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(424), op: ExtentFlush { repair_id: ReconciliationId(424), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44700 Sep 22 23:22:22.434 INFO [1] client ExtentFlush { repair_id: ReconciliationId(424), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44701 Sep 22 23:22:22.434 INFO [2] received reconcile message
44702 Sep 22 23:22:22.434 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(424), op: ExtentFlush { repair_id: ReconciliationId(424), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44703 Sep 22 23:22:22.434 INFO [2] client ExtentFlush { repair_id: ReconciliationId(424), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44704 Sep 22 23:22:22.434 DEBG 424 Flush extent 9 with f:2 g:2
44705 Sep 22 23:22:22.434 DEBG Flush just extent 9 with f:2 and g:2
44706 Sep 22 23:22:22.434 DEBG [1] It's time to notify for 424
44707 Sep 22 23:22:22.434 INFO Completion from [1] id:424 status:true
44708 Sep 22 23:22:22.434 INFO [425/752] Repair commands completed
44709 Sep 22 23:22:22.434 INFO Pop front: ReconcileIO { id: ReconciliationId(425), op: ExtentClose { repair_id: ReconciliationId(425), extent_id: 9 }, state: ClientData([New, New, New]) }
44710 Sep 22 23:22:22.434 INFO Sent repair work, now wait for resp
44711 Sep 22 23:22:22.434 INFO [0] received reconcile message
44712 Sep 22 23:22:22.434 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(425), op: ExtentClose { repair_id: ReconciliationId(425), extent_id: 9 }, state: ClientData([InProgress, New, New]) }, : downstairs
44713 Sep 22 23:22:22.434 INFO [0] client ExtentClose { repair_id: ReconciliationId(425), extent_id: 9 }
44714 Sep 22 23:22:22.434 INFO [1] received reconcile message
44715 Sep 22 23:22:22.434 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(425), op: ExtentClose { repair_id: ReconciliationId(425), extent_id: 9 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44716 Sep 22 23:22:22.434 INFO [1] client ExtentClose { repair_id: ReconciliationId(425), extent_id: 9 }
44717 Sep 22 23:22:22.434 INFO [2] received reconcile message
44718 Sep 22 23:22:22.434 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(425), op: ExtentClose { repair_id: ReconciliationId(425), extent_id: 9 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44719 Sep 22 23:22:22.434 INFO [2] client ExtentClose { repair_id: ReconciliationId(425), extent_id: 9 }
44720 Sep 22 23:22:22.434 DEBG 425 Close extent 9
44721 Sep 22 23:22:22.435 DEBG 425 Close extent 9
44722 Sep 22 23:22:22.435 DEBG 425 Close extent 9
44723 Sep 22 23:22:22.435 DEBG [2] It's time to notify for 425
44724 Sep 22 23:22:22.435 INFO Completion from [2] id:425 status:true
44725 Sep 22 23:22:22.435 INFO [426/752] Repair commands completed
44726 Sep 22 23:22:22.435 INFO Pop front: ReconcileIO { id: ReconciliationId(426), op: ExtentRepair { repair_id: ReconciliationId(426), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44727 Sep 22 23:22:22.435 INFO Sent repair work, now wait for resp
44728 Sep 22 23:22:22.436 INFO [0] received reconcile message
44729 Sep 22 23:22:22.436 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(426), op: ExtentRepair { repair_id: ReconciliationId(426), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44730 Sep 22 23:22:22.436 INFO [0] client ExtentRepair { repair_id: ReconciliationId(426), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44731 Sep 22 23:22:22.436 INFO [0] Sending repair request ReconciliationId(426)
44732 Sep 22 23:22:22.436 INFO [1] received reconcile message
44733 Sep 22 23:22:22.436 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(426), op: ExtentRepair { repair_id: ReconciliationId(426), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44734 Sep 22 23:22:22.436 INFO [1] client ExtentRepair { repair_id: ReconciliationId(426), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44735 Sep 22 23:22:22.436 INFO [1] No action required ReconciliationId(426)
44736 Sep 22 23:22:22.436 INFO [2] received reconcile message
44737 Sep 22 23:22:22.436 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(426), op: ExtentRepair { repair_id: ReconciliationId(426), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44738 Sep 22 23:22:22.436 INFO [2] client ExtentRepair { repair_id: ReconciliationId(426), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44739 Sep 22 23:22:22.436 INFO [2] No action required ReconciliationId(426)
44740 Sep 22 23:22:22.436 DEBG 426 Repair extent 9 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
44741 Sep 22 23:22:22.436 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/009.copy"
44742 Sep 22 23:22:22.500 INFO accepted connection, remote_addr: 127.0.0.1:51090, local_addr: 127.0.0.1:52864, task: repair
44743 Sep 22 23:22:22.501 TRCE incoming request, uri: /extent/9/files, method: GET, req_id: bebd1bf1-1ff9-4206-bc31-69ebb6ff499e, remote_addr: 127.0.0.1:51090, local_addr: 127.0.0.1:52864, task: repair
44744 Sep 22 23:22:22.501 INFO request completed, latency_us: 208, response_code: 200, uri: /extent/9/files, method: GET, req_id: bebd1bf1-1ff9-4206-bc31-69ebb6ff499e, remote_addr: 127.0.0.1:51090, local_addr: 127.0.0.1:52864, task: repair
44745 Sep 22 23:22:22.501 INFO eid:9 Found repair files: ["009", "009.db"]
44746 Sep 22 23:22:22.501 TRCE incoming request, uri: /newextent/9/data, method: GET, req_id: b4c4876a-f563-482e-878a-88392de4c9e6, remote_addr: 127.0.0.1:51090, local_addr: 127.0.0.1:52864, task: repair
44747 Sep 22 23:22:22.502 INFO request completed, latency_us: 315, response_code: 200, uri: /newextent/9/data, method: GET, req_id: b4c4876a-f563-482e-878a-88392de4c9e6, remote_addr: 127.0.0.1:51090, local_addr: 127.0.0.1:52864, task: repair
44748 Sep 22 23:22:22.506 TRCE incoming request, uri: /newextent/9/db, method: GET, req_id: 510f2e47-4533-468c-86cc-ffeb6be2d09b, remote_addr: 127.0.0.1:51090, local_addr: 127.0.0.1:52864, task: repair
44749 Sep 22 23:22:22.507 INFO request completed, latency_us: 306, response_code: 200, uri: /newextent/9/db, method: GET, req_id: 510f2e47-4533-468c-86cc-ffeb6be2d09b, remote_addr: 127.0.0.1:51090, local_addr: 127.0.0.1:52864, task: repair
44750 Sep 22 23:22:22.508 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/009.copy" to "/tmp/downstairs-zrMnlo6G/00/000/009.replace"
44751 Sep 22 23:22:22.508 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44752 Sep 22 23:22:22.509 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/009.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
44753 Sep 22 23:22:22.509 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/009"
44754 Sep 22 23:22:22.509 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/009.db"
44755 Sep 22 23:22:22.509 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44756 Sep 22 23:22:22.509 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/009.replace" to "/tmp/downstairs-zrMnlo6G/00/000/009.completed"
44757 Sep 22 23:22:22.509 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44758 Sep 22 23:22:22.509 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44759 Sep 22 23:22:22.509 DEBG [0] It's time to notify for 426
44760 Sep 22 23:22:22.510 INFO Completion from [0] id:426 status:true
44761 Sep 22 23:22:22.510 INFO [427/752] Repair commands completed
44762 Sep 22 23:22:22.510 INFO Pop front: ReconcileIO { id: ReconciliationId(427), op: ExtentReopen { repair_id: ReconciliationId(427), extent_id: 9 }, state: ClientData([New, New, New]) }
44763 Sep 22 23:22:22.510 INFO Sent repair work, now wait for resp
44764 Sep 22 23:22:22.510 INFO [0] received reconcile message
44765 Sep 22 23:22:22.510 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(427), op: ExtentReopen { repair_id: ReconciliationId(427), extent_id: 9 }, state: ClientData([InProgress, New, New]) }, : downstairs
44766 Sep 22 23:22:22.510 INFO [0] client ExtentReopen { repair_id: ReconciliationId(427), extent_id: 9 }
44767 Sep 22 23:22:22.510 INFO [1] received reconcile message
44768 Sep 22 23:22:22.510 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(427), op: ExtentReopen { repair_id: ReconciliationId(427), extent_id: 9 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44769 Sep 22 23:22:22.510 INFO [1] client ExtentReopen { repair_id: ReconciliationId(427), extent_id: 9 }
44770 Sep 22 23:22:22.510 INFO [2] received reconcile message
44771 Sep 22 23:22:22.510 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(427), op: ExtentReopen { repair_id: ReconciliationId(427), extent_id: 9 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44772 Sep 22 23:22:22.510 INFO [2] client ExtentReopen { repair_id: ReconciliationId(427), extent_id: 9 }
44773 Sep 22 23:22:22.510 DEBG 427 Reopen extent 9
44774 Sep 22 23:22:22.511 DEBG 427 Reopen extent 9
44775 Sep 22 23:22:22.511 DEBG 427 Reopen extent 9
44776 Sep 22 23:22:22.512 DEBG [2] It's time to notify for 427
44777 Sep 22 23:22:22.512 INFO Completion from [2] id:427 status:true
44778 Sep 22 23:22:22.512 INFO [428/752] Repair commands completed
44779 Sep 22 23:22:22.512 INFO Pop front: ReconcileIO { id: ReconciliationId(428), op: ExtentFlush { repair_id: ReconciliationId(428), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44780 Sep 22 23:22:22.512 INFO Sent repair work, now wait for resp
44781 Sep 22 23:22:22.512 INFO [0] received reconcile message
44782 Sep 22 23:22:22.512 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(428), op: ExtentFlush { repair_id: ReconciliationId(428), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44783 Sep 22 23:22:22.512 INFO [0] client ExtentFlush { repair_id: ReconciliationId(428), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44784 Sep 22 23:22:22.512 INFO [1] received reconcile message
44785 Sep 22 23:22:22.512 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(428), op: ExtentFlush { repair_id: ReconciliationId(428), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44786 Sep 22 23:22:22.512 INFO [1] client ExtentFlush { repair_id: ReconciliationId(428), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44787 Sep 22 23:22:22.512 INFO [2] received reconcile message
44788 Sep 22 23:22:22.512 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(428), op: ExtentFlush { repair_id: ReconciliationId(428), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44789 Sep 22 23:22:22.512 INFO [2] client ExtentFlush { repair_id: ReconciliationId(428), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44790 Sep 22 23:22:22.512 DEBG 428 Flush extent 186 with f:2 g:2
44791 Sep 22 23:22:22.512 DEBG Flush just extent 186 with f:2 and g:2
44792 Sep 22 23:22:22.512 DEBG [1] It's time to notify for 428
44793 Sep 22 23:22:22.512 INFO Completion from [1] id:428 status:true
44794 Sep 22 23:22:22.512 INFO [429/752] Repair commands completed
44795 Sep 22 23:22:22.512 INFO Pop front: ReconcileIO { id: ReconciliationId(429), op: ExtentClose { repair_id: ReconciliationId(429), extent_id: 186 }, state: ClientData([New, New, New]) }
44796 Sep 22 23:22:22.512 INFO Sent repair work, now wait for resp
44797 Sep 22 23:22:22.512 INFO [0] received reconcile message
44798 Sep 22 23:22:22.513 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(429), op: ExtentClose { repair_id: ReconciliationId(429), extent_id: 186 }, state: ClientData([InProgress, New, New]) }, : downstairs
44799 Sep 22 23:22:22.513 INFO [0] client ExtentClose { repair_id: ReconciliationId(429), extent_id: 186 }
44800 Sep 22 23:22:22.513 INFO [1] received reconcile message
44801 Sep 22 23:22:22.513 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(429), op: ExtentClose { repair_id: ReconciliationId(429), extent_id: 186 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44802 Sep 22 23:22:22.513 INFO [1] client ExtentClose { repair_id: ReconciliationId(429), extent_id: 186 }
44803 Sep 22 23:22:22.513 INFO [2] received reconcile message
44804 Sep 22 23:22:22.513 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(429), op: ExtentClose { repair_id: ReconciliationId(429), extent_id: 186 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44805 Sep 22 23:22:22.513 INFO [2] client ExtentClose { repair_id: ReconciliationId(429), extent_id: 186 }
44806 Sep 22 23:22:22.513 DEBG 429 Close extent 186
44807 Sep 22 23:22:22.513 DEBG 429 Close extent 186
44808 Sep 22 23:22:22.513 DEBG 429 Close extent 186
44809 Sep 22 23:22:22.514 DEBG [2] It's time to notify for 429
44810 Sep 22 23:22:22.514 INFO Completion from [2] id:429 status:true
44811 Sep 22 23:22:22.514 INFO [430/752] Repair commands completed
44812 Sep 22 23:22:22.514 INFO Pop front: ReconcileIO { id: ReconciliationId(430), op: ExtentRepair { repair_id: ReconciliationId(430), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44813 Sep 22 23:22:22.514 INFO Sent repair work, now wait for resp
44814 Sep 22 23:22:22.514 INFO [0] received reconcile message
44815 Sep 22 23:22:22.514 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(430), op: ExtentRepair { repair_id: ReconciliationId(430), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44816 Sep 22 23:22:22.514 INFO [0] client ExtentRepair { repair_id: ReconciliationId(430), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44817 Sep 22 23:22:22.514 INFO [0] Sending repair request ReconciliationId(430)
44818 Sep 22 23:22:22.514 INFO [1] received reconcile message
44819 Sep 22 23:22:22.514 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(430), op: ExtentRepair { repair_id: ReconciliationId(430), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44820 Sep 22 23:22:22.514 INFO [1] client ExtentRepair { repair_id: ReconciliationId(430), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44821 Sep 22 23:22:22.514 INFO [1] No action required ReconciliationId(430)
44822 Sep 22 23:22:22.514 INFO [2] received reconcile message
44823 Sep 22 23:22:22.514 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(430), op: ExtentRepair { repair_id: ReconciliationId(430), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44824 Sep 22 23:22:22.514 INFO [2] client ExtentRepair { repair_id: ReconciliationId(430), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44825 Sep 22 23:22:22.514 INFO [2] No action required ReconciliationId(430)
44826 Sep 22 23:22:22.514 DEBG 430 Repair extent 186 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
44827 Sep 22 23:22:22.514 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0BA.copy"
44828 Sep 22 23:22:22.553 DEBG [0] Read AckReady 1085, : downstairs
44829 Sep 22 23:22:22.554 DEBG up_ds_listen was notified
44830 Sep 22 23:22:22.555 DEBG up_ds_listen process 1085
44831 Sep 22 23:22:22.555 DEBG [A] ack job 1085:86, : downstairs
44832 Sep 22 23:22:22.579 INFO accepted connection, remote_addr: 127.0.0.1:38910, local_addr: 127.0.0.1:52864, task: repair
44833 Sep 22 23:22:22.579 TRCE incoming request, uri: /extent/186/files, method: GET, req_id: c8bd42f2-88fa-4bd2-94ff-b44d949588ff, remote_addr: 127.0.0.1:38910, local_addr: 127.0.0.1:52864, task: repair
44834 Sep 22 23:22:22.579 INFO request completed, latency_us: 202, response_code: 200, uri: /extent/186/files, method: GET, req_id: c8bd42f2-88fa-4bd2-94ff-b44d949588ff, remote_addr: 127.0.0.1:38910, local_addr: 127.0.0.1:52864, task: repair
44835 Sep 22 23:22:22.579 INFO eid:186 Found repair files: ["0BA", "0BA.db"]
44836 Sep 22 23:22:22.580 TRCE incoming request, uri: /newextent/186/data, method: GET, req_id: 695c9b92-f1b2-46c7-ba20-72a0e9b8f8da, remote_addr: 127.0.0.1:38910, local_addr: 127.0.0.1:52864, task: repair
44837 Sep 22 23:22:22.580 INFO request completed, latency_us: 261, response_code: 200, uri: /newextent/186/data, method: GET, req_id: 695c9b92-f1b2-46c7-ba20-72a0e9b8f8da, remote_addr: 127.0.0.1:38910, local_addr: 127.0.0.1:52864, task: repair
44838 Sep 22 23:22:22.585 TRCE incoming request, uri: /newextent/186/db, method: GET, req_id: d5f0695a-18cc-44c8-948c-7d05792d94c1, remote_addr: 127.0.0.1:38910, local_addr: 127.0.0.1:52864, task: repair
44839 Sep 22 23:22:22.585 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/186/db, method: GET, req_id: d5f0695a-18cc-44c8-948c-7d05792d94c1, remote_addr: 127.0.0.1:38910, local_addr: 127.0.0.1:52864, task: repair
44840 Sep 22 23:22:22.586 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0BA.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0BA.replace"
44841 Sep 22 23:22:22.586 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44842 Sep 22 23:22:22.587 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0BA.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
44843 Sep 22 23:22:22.587 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0BA"
44844 Sep 22 23:22:22.587 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0BA.db"
44845 Sep 22 23:22:22.588 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44846 Sep 22 23:22:22.588 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0BA.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0BA.completed"
44847 Sep 22 23:22:22.588 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44848 Sep 22 23:22:22.588 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44849 Sep 22 23:22:22.588 DEBG [0] It's time to notify for 430
44850 Sep 22 23:22:22.588 INFO Completion from [0] id:430 status:true
44851 Sep 22 23:22:22.588 INFO [431/752] Repair commands completed
44852 Sep 22 23:22:22.588 INFO Pop front: ReconcileIO { id: ReconciliationId(431), op: ExtentReopen { repair_id: ReconciliationId(431), extent_id: 186 }, state: ClientData([New, New, New]) }
44853 Sep 22 23:22:22.588 INFO Sent repair work, now wait for resp
44854 Sep 22 23:22:22.588 INFO [0] received reconcile message
44855 Sep 22 23:22:22.588 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(431), op: ExtentReopen { repair_id: ReconciliationId(431), extent_id: 186 }, state: ClientData([InProgress, New, New]) }, : downstairs
44856 Sep 22 23:22:22.588 INFO [0] client ExtentReopen { repair_id: ReconciliationId(431), extent_id: 186 }
44857 Sep 22 23:22:22.588 INFO [1] received reconcile message
44858 Sep 22 23:22:22.588 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(431), op: ExtentReopen { repair_id: ReconciliationId(431), extent_id: 186 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44859 Sep 22 23:22:22.588 INFO [1] client ExtentReopen { repair_id: ReconciliationId(431), extent_id: 186 }
44860 Sep 22 23:22:22.588 INFO [2] received reconcile message
44861 Sep 22 23:22:22.588 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(431), op: ExtentReopen { repair_id: ReconciliationId(431), extent_id: 186 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44862 Sep 22 23:22:22.588 INFO [2] client ExtentReopen { repair_id: ReconciliationId(431), extent_id: 186 }
44863 Sep 22 23:22:22.588 DEBG 431 Reopen extent 186
44864 Sep 22 23:22:22.589 DEBG 431 Reopen extent 186
44865 Sep 22 23:22:22.590 DEBG 431 Reopen extent 186
44866 Sep 22 23:22:22.590 DEBG [2] It's time to notify for 431
44867 Sep 22 23:22:22.590 INFO Completion from [2] id:431 status:true
44868 Sep 22 23:22:22.590 INFO [432/752] Repair commands completed
44869 Sep 22 23:22:22.590 INFO Pop front: ReconcileIO { id: ReconciliationId(432), op: ExtentFlush { repair_id: ReconciliationId(432), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44870 Sep 22 23:22:22.590 INFO Sent repair work, now wait for resp
44871 Sep 22 23:22:22.590 INFO [0] received reconcile message
44872 Sep 22 23:22:22.590 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(432), op: ExtentFlush { repair_id: ReconciliationId(432), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44873 Sep 22 23:22:22.590 INFO [0] client ExtentFlush { repair_id: ReconciliationId(432), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44874 Sep 22 23:22:22.590 INFO [1] received reconcile message
44875 Sep 22 23:22:22.590 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(432), op: ExtentFlush { repair_id: ReconciliationId(432), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44876 Sep 22 23:22:22.590 INFO [1] client ExtentFlush { repair_id: ReconciliationId(432), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44877 Sep 22 23:22:22.590 INFO [2] received reconcile message
44878 Sep 22 23:22:22.590 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(432), op: ExtentFlush { repair_id: ReconciliationId(432), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44879 Sep 22 23:22:22.590 INFO [2] client ExtentFlush { repair_id: ReconciliationId(432), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44880 Sep 22 23:22:22.591 DEBG 432 Flush extent 130 with f:2 g:2
44881 Sep 22 23:22:22.591 DEBG Flush just extent 130 with f:2 and g:2
44882 Sep 22 23:22:22.591 DEBG [1] It's time to notify for 432
44883 Sep 22 23:22:22.591 INFO Completion from [1] id:432 status:true
44884 Sep 22 23:22:22.591 INFO [433/752] Repair commands completed
44885 Sep 22 23:22:22.591 INFO Pop front: ReconcileIO { id: ReconciliationId(433), op: ExtentClose { repair_id: ReconciliationId(433), extent_id: 130 }, state: ClientData([New, New, New]) }
44886 Sep 22 23:22:22.591 INFO Sent repair work, now wait for resp
44887 Sep 22 23:22:22.591 INFO [0] received reconcile message
44888 Sep 22 23:22:22.591 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(433), op: ExtentClose { repair_id: ReconciliationId(433), extent_id: 130 }, state: ClientData([InProgress, New, New]) }, : downstairs
44889 Sep 22 23:22:22.591 INFO [0] client ExtentClose { repair_id: ReconciliationId(433), extent_id: 130 }
44890 Sep 22 23:22:22.591 INFO [1] received reconcile message
44891 Sep 22 23:22:22.591 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(433), op: ExtentClose { repair_id: ReconciliationId(433), extent_id: 130 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44892 Sep 22 23:22:22.591 INFO [1] client ExtentClose { repair_id: ReconciliationId(433), extent_id: 130 }
44893 Sep 22 23:22:22.591 INFO [2] received reconcile message
44894 Sep 22 23:22:22.591 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(433), op: ExtentClose { repair_id: ReconciliationId(433), extent_id: 130 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44895 Sep 22 23:22:22.591 INFO [2] client ExtentClose { repair_id: ReconciliationId(433), extent_id: 130 }
44896 Sep 22 23:22:22.591 DEBG 433 Close extent 130
44897 Sep 22 23:22:22.591 DEBG 433 Close extent 130
44898 Sep 22 23:22:22.592 DEBG 433 Close extent 130
44899 Sep 22 23:22:22.592 DEBG [2] It's time to notify for 433
44900 Sep 22 23:22:22.592 INFO Completion from [2] id:433 status:true
44901 Sep 22 23:22:22.592 INFO [434/752] Repair commands completed
44902 Sep 22 23:22:22.592 INFO Pop front: ReconcileIO { id: ReconciliationId(434), op: ExtentRepair { repair_id: ReconciliationId(434), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44903 Sep 22 23:22:22.592 INFO Sent repair work, now wait for resp
44904 Sep 22 23:22:22.592 INFO [0] received reconcile message
44905 Sep 22 23:22:22.592 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(434), op: ExtentRepair { repair_id: ReconciliationId(434), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44906 Sep 22 23:22:22.592 INFO [0] client ExtentRepair { repair_id: ReconciliationId(434), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44907 Sep 22 23:22:22.592 INFO [0] Sending repair request ReconciliationId(434)
44908 Sep 22 23:22:22.592 INFO [1] received reconcile message
44909 Sep 22 23:22:22.592 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(434), op: ExtentRepair { repair_id: ReconciliationId(434), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44910 Sep 22 23:22:22.592 INFO [1] client ExtentRepair { repair_id: ReconciliationId(434), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44911 Sep 22 23:22:22.592 INFO [1] No action required ReconciliationId(434)
44912 Sep 22 23:22:22.592 INFO [2] received reconcile message
44913 Sep 22 23:22:22.593 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(434), op: ExtentRepair { repair_id: ReconciliationId(434), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44914 Sep 22 23:22:22.593 INFO [2] client ExtentRepair { repair_id: ReconciliationId(434), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
44915 Sep 22 23:22:22.593 INFO [2] No action required ReconciliationId(434)
44916 Sep 22 23:22:22.593 DEBG 434 Repair extent 130 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
44917 Sep 22 23:22:22.593 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/082.copy"
44918 Sep 22 23:22:22.608 DEBG up_ds_listen checked 1 jobs, back to waiting
44919 Sep 22 23:22:22.610 INFO [lossy] skipping 1084
44920 Sep 22 23:22:22.610 WARN returning error on flush!
44921 Sep 22 23:22:22.610 DEBG Flush :1084 extent_limit None deps:[JobId(1083), JobId(1082)] res:false f:31 g:1
44922 Sep 22 23:22:22.610 DEBG Flush :1084 extent_limit None deps:[JobId(1083), JobId(1082)] res:true f:31 g:1
44923 Sep 22 23:22:22.610 INFO [lossy] sleeping 1 second
44924 Sep 22 23:22:22.610 DEBG IO Flush 1086 has deps [JobId(1085), JobId(1084)]
44925 Sep 22 23:22:22.611 WARN returning error on read!
44926 Sep 22 23:22:22.611 DEBG Read :1085 deps:[JobId(1084)] res:false
44927 Sep 22 23:22:22.616 DEBG Read :1085 deps:[JobId(1084)] res:true
44928 Sep 22 23:22:22.655 INFO accepted connection, remote_addr: 127.0.0.1:51399, local_addr: 127.0.0.1:52864, task: repair
44929 Sep 22 23:22:22.655 TRCE incoming request, uri: /extent/130/files, method: GET, req_id: 5f59ca70-05a0-4cbb-9a3b-f15103f9d530, remote_addr: 127.0.0.1:51399, local_addr: 127.0.0.1:52864, task: repair
44930 Sep 22 23:22:22.655 INFO request completed, latency_us: 209, response_code: 200, uri: /extent/130/files, method: GET, req_id: 5f59ca70-05a0-4cbb-9a3b-f15103f9d530, remote_addr: 127.0.0.1:51399, local_addr: 127.0.0.1:52864, task: repair
44931 Sep 22 23:22:22.656 INFO eid:130 Found repair files: ["082", "082.db"]
44932 Sep 22 23:22:22.656 TRCE incoming request, uri: /newextent/130/data, method: GET, req_id: 394cb65e-e60c-4787-88aa-76b22081db71, remote_addr: 127.0.0.1:51399, local_addr: 127.0.0.1:52864, task: repair
44933 Sep 22 23:22:22.656 INFO request completed, latency_us: 333, response_code: 200, uri: /newextent/130/data, method: GET, req_id: 394cb65e-e60c-4787-88aa-76b22081db71, remote_addr: 127.0.0.1:51399, local_addr: 127.0.0.1:52864, task: repair
44934 Sep 22 23:22:22.661 TRCE incoming request, uri: /newextent/130/db, method: GET, req_id: 5585788a-dd13-4f6a-9cfb-eebd32258be1, remote_addr: 127.0.0.1:51399, local_addr: 127.0.0.1:52864, task: repair
44935 Sep 22 23:22:22.661 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/130/db, method: GET, req_id: 5585788a-dd13-4f6a-9cfb-eebd32258be1, remote_addr: 127.0.0.1:51399, local_addr: 127.0.0.1:52864, task: repair
44936 Sep 22 23:22:22.662 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/082.copy" to "/tmp/downstairs-zrMnlo6G/00/000/082.replace"
44937 Sep 22 23:22:22.662 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44938 Sep 22 23:22:22.663 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/082.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
44939 Sep 22 23:22:22.663 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/082"
44940 Sep 22 23:22:22.663 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/082.db"
44941 Sep 22 23:22:22.663 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44942 Sep 22 23:22:22.663 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/082.replace" to "/tmp/downstairs-zrMnlo6G/00/000/082.completed"
44943 Sep 22 23:22:22.664 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44944 Sep 22 23:22:22.664 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
44945 Sep 22 23:22:22.664 DEBG [0] It's time to notify for 434
44946 Sep 22 23:22:22.664 INFO Completion from [0] id:434 status:true
44947 Sep 22 23:22:22.664 INFO [435/752] Repair commands completed
44948 Sep 22 23:22:22.664 INFO Pop front: ReconcileIO { id: ReconciliationId(435), op: ExtentReopen { repair_id: ReconciliationId(435), extent_id: 130 }, state: ClientData([New, New, New]) }
44949 Sep 22 23:22:22.664 INFO Sent repair work, now wait for resp
44950 Sep 22 23:22:22.664 INFO [0] received reconcile message
44951 Sep 22 23:22:22.664 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(435), op: ExtentReopen { repair_id: ReconciliationId(435), extent_id: 130 }, state: ClientData([InProgress, New, New]) }, : downstairs
44952 Sep 22 23:22:22.664 INFO [0] client ExtentReopen { repair_id: ReconciliationId(435), extent_id: 130 }
44953 Sep 22 23:22:22.664 INFO [1] received reconcile message
44954 Sep 22 23:22:22.664 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(435), op: ExtentReopen { repair_id: ReconciliationId(435), extent_id: 130 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44955 Sep 22 23:22:22.664 INFO [1] client ExtentReopen { repair_id: ReconciliationId(435), extent_id: 130 }
44956 Sep 22 23:22:22.664 INFO [2] received reconcile message
44957 Sep 22 23:22:22.664 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(435), op: ExtentReopen { repair_id: ReconciliationId(435), extent_id: 130 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44958 Sep 22 23:22:22.664 INFO [2] client ExtentReopen { repair_id: ReconciliationId(435), extent_id: 130 }
44959 Sep 22 23:22:22.664 DEBG 435 Reopen extent 130
44960 Sep 22 23:22:22.665 DEBG 435 Reopen extent 130
44961 Sep 22 23:22:22.666 DEBG 435 Reopen extent 130
44962 Sep 22 23:22:22.666 DEBG [2] It's time to notify for 435
44963 Sep 22 23:22:22.666 INFO Completion from [2] id:435 status:true
44964 Sep 22 23:22:22.666 INFO [436/752] Repair commands completed
44965 Sep 22 23:22:22.666 INFO Pop front: ReconcileIO { id: ReconciliationId(436), op: ExtentFlush { repair_id: ReconciliationId(436), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44966 Sep 22 23:22:22.666 INFO Sent repair work, now wait for resp
44967 Sep 22 23:22:22.666 INFO [0] received reconcile message
44968 Sep 22 23:22:22.666 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(436), op: ExtentFlush { repair_id: ReconciliationId(436), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44969 Sep 22 23:22:22.666 INFO [0] client ExtentFlush { repair_id: ReconciliationId(436), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44970 Sep 22 23:22:22.666 INFO [1] received reconcile message
44971 Sep 22 23:22:22.666 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(436), op: ExtentFlush { repair_id: ReconciliationId(436), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44972 Sep 22 23:22:22.666 INFO [1] client ExtentFlush { repair_id: ReconciliationId(436), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44973 Sep 22 23:22:22.666 INFO [2] received reconcile message
44974 Sep 22 23:22:22.666 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(436), op: ExtentFlush { repair_id: ReconciliationId(436), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44975 Sep 22 23:22:22.666 INFO [2] client ExtentFlush { repair_id: ReconciliationId(436), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44976 Sep 22 23:22:22.667 DEBG 436 Flush extent 122 with f:2 g:2
44977 Sep 22 23:22:22.667 DEBG Flush just extent 122 with f:2 and g:2
44978 Sep 22 23:22:22.667 DEBG [1] It's time to notify for 436
44979 Sep 22 23:22:22.667 INFO Completion from [1] id:436 status:true
44980 Sep 22 23:22:22.667 INFO [437/752] Repair commands completed
44981 Sep 22 23:22:22.667 INFO Pop front: ReconcileIO { id: ReconciliationId(437), op: ExtentClose { repair_id: ReconciliationId(437), extent_id: 122 }, state: ClientData([New, New, New]) }
44982 Sep 22 23:22:22.667 INFO Sent repair work, now wait for resp
44983 Sep 22 23:22:22.667 INFO [0] received reconcile message
44984 Sep 22 23:22:22.667 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(437), op: ExtentClose { repair_id: ReconciliationId(437), extent_id: 122 }, state: ClientData([InProgress, New, New]) }, : downstairs
44985 Sep 22 23:22:22.667 INFO [0] client ExtentClose { repair_id: ReconciliationId(437), extent_id: 122 }
44986 Sep 22 23:22:22.667 INFO [1] received reconcile message
44987 Sep 22 23:22:22.667 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(437), op: ExtentClose { repair_id: ReconciliationId(437), extent_id: 122 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44988 Sep 22 23:22:22.667 INFO [1] client ExtentClose { repair_id: ReconciliationId(437), extent_id: 122 }
44989 Sep 22 23:22:22.667 INFO [2] received reconcile message
44990 Sep 22 23:22:22.667 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(437), op: ExtentClose { repair_id: ReconciliationId(437), extent_id: 122 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44991 Sep 22 23:22:22.667 INFO [2] client ExtentClose { repair_id: ReconciliationId(437), extent_id: 122 }
44992 Sep 22 23:22:22.667 DEBG 437 Close extent 122
44993 Sep 22 23:22:22.667 DEBG 437 Close extent 122
44994 Sep 22 23:22:22.668 DEBG 437 Close extent 122
44995 Sep 22 23:22:22.668 DEBG [2] It's time to notify for 437
44996 Sep 22 23:22:22.668 INFO Completion from [2] id:437 status:true
44997 Sep 22 23:22:22.668 INFO [438/752] Repair commands completed
44998 Sep 22 23:22:22.668 INFO Pop front: ReconcileIO { id: ReconciliationId(438), op: ExtentRepair { repair_id: ReconciliationId(438), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44999 Sep 22 23:22:22.668 INFO Sent repair work, now wait for resp
45000 Sep 22 23:22:22.668 INFO [0] received reconcile message
45001 Sep 22 23:22:22.668 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(438), op: ExtentRepair { repair_id: ReconciliationId(438), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45002 Sep 22 23:22:22.668 INFO [0] client ExtentRepair { repair_id: ReconciliationId(438), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45003 Sep 22 23:22:22.668 INFO [0] Sending repair request ReconciliationId(438)
45004 Sep 22 23:22:22.668 INFO [1] received reconcile message
45005 Sep 22 23:22:22.668 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(438), op: ExtentRepair { repair_id: ReconciliationId(438), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45006 Sep 22 23:22:22.668 INFO [1] client ExtentRepair { repair_id: ReconciliationId(438), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45007 Sep 22 23:22:22.668 INFO [1] No action required ReconciliationId(438)
45008 Sep 22 23:22:22.669 INFO [2] received reconcile message
45009 Sep 22 23:22:22.669 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(438), op: ExtentRepair { repair_id: ReconciliationId(438), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45010 Sep 22 23:22:22.669 INFO [2] client ExtentRepair { repair_id: ReconciliationId(438), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45011 Sep 22 23:22:22.669 INFO [2] No action required ReconciliationId(438)
45012 Sep 22 23:22:22.669 DEBG 438 Repair extent 122 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
45013 Sep 22 23:22:22.669 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/07A.copy"
45014 Sep 22 23:22:22.686 DEBG IO Read 1087 has deps [JobId(1086)]
45015 Sep 22 23:22:22.686 ERRO [1] job id 1085 saw error GenericError("test error")
45016 Sep 22 23:22:22.731 INFO accepted connection, remote_addr: 127.0.0.1:48075, local_addr: 127.0.0.1:52864, task: repair
45017 Sep 22 23:22:22.731 TRCE incoming request, uri: /extent/122/files, method: GET, req_id: 92ca1dce-fde1-4120-931d-837f7da5c7fe, remote_addr: 127.0.0.1:48075, local_addr: 127.0.0.1:52864, task: repair
45018 Sep 22 23:22:22.731 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/122/files, method: GET, req_id: 92ca1dce-fde1-4120-931d-837f7da5c7fe, remote_addr: 127.0.0.1:48075, local_addr: 127.0.0.1:52864, task: repair
45019 Sep 22 23:22:22.732 INFO eid:122 Found repair files: ["07A", "07A.db"]
45020 Sep 22 23:22:22.732 TRCE incoming request, uri: /newextent/122/data, method: GET, req_id: 8ea21843-fa3d-45c4-bff7-d8881f7b135b, remote_addr: 127.0.0.1:48075, local_addr: 127.0.0.1:52864, task: repair
45021 Sep 22 23:22:22.732 INFO request completed, latency_us: 324, response_code: 200, uri: /newextent/122/data, method: GET, req_id: 8ea21843-fa3d-45c4-bff7-d8881f7b135b, remote_addr: 127.0.0.1:48075, local_addr: 127.0.0.1:52864, task: repair
45022 Sep 22 23:22:22.737 TRCE incoming request, uri: /newextent/122/db, method: GET, req_id: b1134cf7-f68a-48b2-aa26-a38a22d21d1d, remote_addr: 127.0.0.1:48075, local_addr: 127.0.0.1:52864, task: repair
45023 Sep 22 23:22:22.737 INFO request completed, latency_us: 302, response_code: 200, uri: /newextent/122/db, method: GET, req_id: b1134cf7-f68a-48b2-aa26-a38a22d21d1d, remote_addr: 127.0.0.1:48075, local_addr: 127.0.0.1:52864, task: repair
45024 Sep 22 23:22:22.738 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/07A.copy" to "/tmp/downstairs-zrMnlo6G/00/000/07A.replace"
45025 Sep 22 23:22:22.738 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45026 Sep 22 23:22:22.739 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/07A.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
45027 Sep 22 23:22:22.739 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/07A"
45028 Sep 22 23:22:22.740 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/07A.db"
45029 Sep 22 23:22:22.740 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45030 Sep 22 23:22:22.740 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/07A.replace" to "/tmp/downstairs-zrMnlo6G/00/000/07A.completed"
45031 Sep 22 23:22:22.740 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45032 Sep 22 23:22:22.740 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45033 Sep 22 23:22:22.740 DEBG [0] It's time to notify for 438
45034 Sep 22 23:22:22.740 INFO Completion from [0] id:438 status:true
45035 Sep 22 23:22:22.740 INFO [439/752] Repair commands completed
45036 Sep 22 23:22:22.740 INFO Pop front: ReconcileIO { id: ReconciliationId(439), op: ExtentReopen { repair_id: ReconciliationId(439), extent_id: 122 }, state: ClientData([New, New, New]) }
45037 Sep 22 23:22:22.740 INFO Sent repair work, now wait for resp
45038 Sep 22 23:22:22.740 INFO [0] received reconcile message
45039 Sep 22 23:22:22.740 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(439), op: ExtentReopen { repair_id: ReconciliationId(439), extent_id: 122 }, state: ClientData([InProgress, New, New]) }, : downstairs
45040 Sep 22 23:22:22.740 INFO [0] client ExtentReopen { repair_id: ReconciliationId(439), extent_id: 122 }
45041 Sep 22 23:22:22.740 INFO [1] received reconcile message
45042 Sep 22 23:22:22.740 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(439), op: ExtentReopen { repair_id: ReconciliationId(439), extent_id: 122 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45043 Sep 22 23:22:22.740 INFO [1] client ExtentReopen { repair_id: ReconciliationId(439), extent_id: 122 }
45044 Sep 22 23:22:22.740 INFO [2] received reconcile message
45045 Sep 22 23:22:22.740 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(439), op: ExtentReopen { repair_id: ReconciliationId(439), extent_id: 122 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45046 Sep 22 23:22:22.740 INFO [2] client ExtentReopen { repair_id: ReconciliationId(439), extent_id: 122 }
45047 Sep 22 23:22:22.740 DEBG 439 Reopen extent 122
45048 Sep 22 23:22:22.741 DEBG 439 Reopen extent 122
45049 Sep 22 23:22:22.742 DEBG 439 Reopen extent 122
45050 Sep 22 23:22:22.742 DEBG [2] It's time to notify for 439
45051 Sep 22 23:22:22.742 INFO Completion from [2] id:439 status:true
45052 Sep 22 23:22:22.742 INFO [440/752] Repair commands completed
45053 Sep 22 23:22:22.742 INFO Pop front: ReconcileIO { id: ReconciliationId(440), op: ExtentFlush { repair_id: ReconciliationId(440), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45054 Sep 22 23:22:22.742 INFO Sent repair work, now wait for resp
45055 Sep 22 23:22:22.742 INFO [0] received reconcile message
45056 Sep 22 23:22:22.742 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(440), op: ExtentFlush { repair_id: ReconciliationId(440), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45057 Sep 22 23:22:22.742 INFO [0] client ExtentFlush { repair_id: ReconciliationId(440), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45058 Sep 22 23:22:22.742 INFO [1] received reconcile message
45059 Sep 22 23:22:22.742 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(440), op: ExtentFlush { repair_id: ReconciliationId(440), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45060 Sep 22 23:22:22.742 INFO [1] client ExtentFlush { repair_id: ReconciliationId(440), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45061 Sep 22 23:22:22.742 INFO [2] received reconcile message
45062 Sep 22 23:22:22.742 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(440), op: ExtentFlush { repair_id: ReconciliationId(440), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45063 Sep 22 23:22:22.742 INFO [2] client ExtentFlush { repair_id: ReconciliationId(440), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45064 Sep 22 23:22:22.743 DEBG 440 Flush extent 105 with f:2 g:2
45065 Sep 22 23:22:22.743 DEBG Flush just extent 105 with f:2 and g:2
45066 Sep 22 23:22:22.743 DEBG [1] It's time to notify for 440
45067 Sep 22 23:22:22.743 INFO Completion from [1] id:440 status:true
45068 Sep 22 23:22:22.743 INFO [441/752] Repair commands completed
45069 Sep 22 23:22:22.743 INFO Pop front: ReconcileIO { id: ReconciliationId(441), op: ExtentClose { repair_id: ReconciliationId(441), extent_id: 105 }, state: ClientData([New, New, New]) }
45070 Sep 22 23:22:22.743 INFO Sent repair work, now wait for resp
45071 Sep 22 23:22:22.743 INFO [0] received reconcile message
45072 Sep 22 23:22:22.743 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(441), op: ExtentClose { repair_id: ReconciliationId(441), extent_id: 105 }, state: ClientData([InProgress, New, New]) }, : downstairs
45073 Sep 22 23:22:22.743 INFO [0] client ExtentClose { repair_id: ReconciliationId(441), extent_id: 105 }
45074 Sep 22 23:22:22.743 INFO [1] received reconcile message
45075 Sep 22 23:22:22.743 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(441), op: ExtentClose { repair_id: ReconciliationId(441), extent_id: 105 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45076 Sep 22 23:22:22.743 INFO [1] client ExtentClose { repair_id: ReconciliationId(441), extent_id: 105 }
45077 Sep 22 23:22:22.743 INFO [2] received reconcile message
45078 Sep 22 23:22:22.743 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(441), op: ExtentClose { repair_id: ReconciliationId(441), extent_id: 105 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45079 Sep 22 23:22:22.743 INFO [2] client ExtentClose { repair_id: ReconciliationId(441), extent_id: 105 }
45080 Sep 22 23:22:22.743 DEBG 441 Close extent 105
45081 Sep 22 23:22:22.743 DEBG 441 Close extent 105
45082 Sep 22 23:22:22.744 DEBG 441 Close extent 105
45083 Sep 22 23:22:22.744 DEBG [2] It's time to notify for 441
45084 Sep 22 23:22:22.744 INFO Completion from [2] id:441 status:true
45085 Sep 22 23:22:22.744 INFO [442/752] Repair commands completed
45086 Sep 22 23:22:22.744 INFO Pop front: ReconcileIO { id: ReconciliationId(442), op: ExtentRepair { repair_id: ReconciliationId(442), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45087 Sep 22 23:22:22.744 INFO Sent repair work, now wait for resp
45088 Sep 22 23:22:22.744 INFO [0] received reconcile message
45089 Sep 22 23:22:22.744 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(442), op: ExtentRepair { repair_id: ReconciliationId(442), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45090 Sep 22 23:22:22.744 INFO [0] client ExtentRepair { repair_id: ReconciliationId(442), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45091 Sep 22 23:22:22.744 INFO [0] Sending repair request ReconciliationId(442)
45092 Sep 22 23:22:22.744 INFO [1] received reconcile message
45093 Sep 22 23:22:22.744 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(442), op: ExtentRepair { repair_id: ReconciliationId(442), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45094 Sep 22 23:22:22.744 INFO [1] client ExtentRepair { repair_id: ReconciliationId(442), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45095 Sep 22 23:22:22.744 INFO [1] No action required ReconciliationId(442)
45096 Sep 22 23:22:22.745 INFO [2] received reconcile message
45097 Sep 22 23:22:22.745 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(442), op: ExtentRepair { repair_id: ReconciliationId(442), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45098 Sep 22 23:22:22.745 INFO [2] client ExtentRepair { repair_id: ReconciliationId(442), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45099 Sep 22 23:22:22.745 INFO [2] No action required ReconciliationId(442)
45100 Sep 22 23:22:22.745 DEBG 442 Repair extent 105 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
45101 Sep 22 23:22:22.745 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/069.copy"
45102 Sep 22 23:22:22.810 INFO accepted connection, remote_addr: 127.0.0.1:36700, local_addr: 127.0.0.1:52864, task: repair
45103 Sep 22 23:22:22.810 TRCE incoming request, uri: /extent/105/files, method: GET, req_id: 15ff1093-a55b-470a-bfda-1120ec4c7ac2, remote_addr: 127.0.0.1:36700, local_addr: 127.0.0.1:52864, task: repair
45104 Sep 22 23:22:22.811 INFO request completed, latency_us: 204, response_code: 200, uri: /extent/105/files, method: GET, req_id: 15ff1093-a55b-470a-bfda-1120ec4c7ac2, remote_addr: 127.0.0.1:36700, local_addr: 127.0.0.1:52864, task: repair
45105 Sep 22 23:22:22.811 INFO eid:105 Found repair files: ["069", "069.db"]
45106 Sep 22 23:22:22.811 TRCE incoming request, uri: /newextent/105/data, method: GET, req_id: dee41f68-0c16-4c5b-8af8-38c73a0ff2bf, remote_addr: 127.0.0.1:36700, local_addr: 127.0.0.1:52864, task: repair
45107 Sep 22 23:22:22.811 INFO request completed, latency_us: 328, response_code: 200, uri: /newextent/105/data, method: GET, req_id: dee41f68-0c16-4c5b-8af8-38c73a0ff2bf, remote_addr: 127.0.0.1:36700, local_addr: 127.0.0.1:52864, task: repair
45108 Sep 22 23:22:22.816 TRCE incoming request, uri: /newextent/105/db, method: GET, req_id: 4920d333-733a-4a77-ae57-7c1d85db3686, remote_addr: 127.0.0.1:36700, local_addr: 127.0.0.1:52864, task: repair
45109 Sep 22 23:22:22.816 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/105/db, method: GET, req_id: 4920d333-733a-4a77-ae57-7c1d85db3686, remote_addr: 127.0.0.1:36700, local_addr: 127.0.0.1:52864, task: repair
45110 Sep 22 23:22:22.818 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/069.copy" to "/tmp/downstairs-zrMnlo6G/00/000/069.replace"
45111 Sep 22 23:22:22.818 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45112 Sep 22 23:22:22.818 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/069.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
45113 Sep 22 23:22:22.819 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/069"
45114 Sep 22 23:22:22.819 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/069.db"
45115 Sep 22 23:22:22.819 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45116 Sep 22 23:22:22.819 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/069.replace" to "/tmp/downstairs-zrMnlo6G/00/000/069.completed"
45117 Sep 22 23:22:22.819 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45118 Sep 22 23:22:22.819 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45119 Sep 22 23:22:22.819 DEBG [0] It's time to notify for 442
45120 Sep 22 23:22:22.819 INFO Completion from [0] id:442 status:true
45121 Sep 22 23:22:22.819 INFO [443/752] Repair commands completed
45122 Sep 22 23:22:22.819 INFO Pop front: ReconcileIO { id: ReconciliationId(443), op: ExtentReopen { repair_id: ReconciliationId(443), extent_id: 105 }, state: ClientData([New, New, New]) }
45123 Sep 22 23:22:22.819 INFO Sent repair work, now wait for resp
45124 Sep 22 23:22:22.819 INFO [0] received reconcile message
45125 Sep 22 23:22:22.819 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(443), op: ExtentReopen { repair_id: ReconciliationId(443), extent_id: 105 }, state: ClientData([InProgress, New, New]) }, : downstairs
45126 Sep 22 23:22:22.819 INFO [0] client ExtentReopen { repair_id: ReconciliationId(443), extent_id: 105 }
45127 Sep 22 23:22:22.819 INFO [1] received reconcile message
45128 Sep 22 23:22:22.819 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(443), op: ExtentReopen { repair_id: ReconciliationId(443), extent_id: 105 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45129 Sep 22 23:22:22.819 INFO [1] client ExtentReopen { repair_id: ReconciliationId(443), extent_id: 105 }
45130 Sep 22 23:22:22.819 INFO [2] received reconcile message
45131 Sep 22 23:22:22.819 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(443), op: ExtentReopen { repair_id: ReconciliationId(443), extent_id: 105 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45132 Sep 22 23:22:22.819 INFO [2] client ExtentReopen { repair_id: ReconciliationId(443), extent_id: 105 }
45133 Sep 22 23:22:22.820 DEBG 443 Reopen extent 105
45134 Sep 22 23:22:22.820 DEBG 443 Reopen extent 105
45135 Sep 22 23:22:22.821 DEBG 443 Reopen extent 105
45136 Sep 22 23:22:22.821 DEBG [2] It's time to notify for 443
45137 Sep 22 23:22:22.821 INFO Completion from [2] id:443 status:true
45138 Sep 22 23:22:22.821 INFO [444/752] Repair commands completed
45139 Sep 22 23:22:22.821 INFO Pop front: ReconcileIO { id: ReconciliationId(444), op: ExtentFlush { repair_id: ReconciliationId(444), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45140 Sep 22 23:22:22.821 INFO Sent repair work, now wait for resp
45141 Sep 22 23:22:22.821 INFO [0] received reconcile message
45142 Sep 22 23:22:22.821 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(444), op: ExtentFlush { repair_id: ReconciliationId(444), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45143 Sep 22 23:22:22.822 INFO [0] client ExtentFlush { repair_id: ReconciliationId(444), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45144 Sep 22 23:22:22.822 INFO [1] received reconcile message
45145 Sep 22 23:22:22.822 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(444), op: ExtentFlush { repair_id: ReconciliationId(444), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45146 Sep 22 23:22:22.822 INFO [1] client ExtentFlush { repair_id: ReconciliationId(444), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45147 Sep 22 23:22:22.822 INFO [2] received reconcile message
45148 Sep 22 23:22:22.822 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(444), op: ExtentFlush { repair_id: ReconciliationId(444), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45149 Sep 22 23:22:22.822 INFO [2] client ExtentFlush { repair_id: ReconciliationId(444), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45150 Sep 22 23:22:22.822 DEBG 444 Flush extent 137 with f:2 g:2
45151 Sep 22 23:22:22.822 DEBG Flush just extent 137 with f:2 and g:2
45152 Sep 22 23:22:22.822 DEBG [1] It's time to notify for 444
45153 Sep 22 23:22:22.822 INFO Completion from [1] id:444 status:true
45154 Sep 22 23:22:22.822 INFO [445/752] Repair commands completed
45155 Sep 22 23:22:22.822 INFO Pop front: ReconcileIO { id: ReconciliationId(445), op: ExtentClose { repair_id: ReconciliationId(445), extent_id: 137 }, state: ClientData([New, New, New]) }
45156 Sep 22 23:22:22.822 INFO Sent repair work, now wait for resp
45157 Sep 22 23:22:22.822 INFO [0] received reconcile message
45158 Sep 22 23:22:22.822 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(445), op: ExtentClose { repair_id: ReconciliationId(445), extent_id: 137 }, state: ClientData([InProgress, New, New]) }, : downstairs
45159 Sep 22 23:22:22.822 INFO [0] client ExtentClose { repair_id: ReconciliationId(445), extent_id: 137 }
45160 Sep 22 23:22:22.822 INFO [1] received reconcile message
45161 Sep 22 23:22:22.822 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(445), op: ExtentClose { repair_id: ReconciliationId(445), extent_id: 137 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45162 Sep 22 23:22:22.822 INFO [1] client ExtentClose { repair_id: ReconciliationId(445), extent_id: 137 }
45163 Sep 22 23:22:22.822 INFO [2] received reconcile message
45164 Sep 22 23:22:22.822 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(445), op: ExtentClose { repair_id: ReconciliationId(445), extent_id: 137 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45165 Sep 22 23:22:22.822 INFO [2] client ExtentClose { repair_id: ReconciliationId(445), extent_id: 137 }
45166 Sep 22 23:22:22.822 DEBG 445 Close extent 137
45167 Sep 22 23:22:22.823 DEBG 445 Close extent 137
45168 Sep 22 23:22:22.823 DEBG 445 Close extent 137
45169 Sep 22 23:22:22.823 DEBG [2] It's time to notify for 445
45170 Sep 22 23:22:22.823 INFO Completion from [2] id:445 status:true
45171 Sep 22 23:22:22.823 INFO [446/752] Repair commands completed
45172 Sep 22 23:22:22.823 INFO Pop front: ReconcileIO { id: ReconciliationId(446), op: ExtentRepair { repair_id: ReconciliationId(446), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45173 Sep 22 23:22:22.823 INFO Sent repair work, now wait for resp
45174 Sep 22 23:22:22.823 INFO [0] received reconcile message
45175 Sep 22 23:22:22.824 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(446), op: ExtentRepair { repair_id: ReconciliationId(446), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45176 Sep 22 23:22:22.824 INFO [0] client ExtentRepair { repair_id: ReconciliationId(446), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45177 Sep 22 23:22:22.824 INFO [0] Sending repair request ReconciliationId(446)
45178 Sep 22 23:22:22.824 INFO [1] received reconcile message
45179 Sep 22 23:22:22.824 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(446), op: ExtentRepair { repair_id: ReconciliationId(446), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45180 Sep 22 23:22:22.824 INFO [1] client ExtentRepair { repair_id: ReconciliationId(446), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45181 Sep 22 23:22:22.824 INFO [1] No action required ReconciliationId(446)
45182 Sep 22 23:22:22.824 INFO [2] received reconcile message
45183 Sep 22 23:22:22.824 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(446), op: ExtentRepair { repair_id: ReconciliationId(446), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45184 Sep 22 23:22:22.824 INFO [2] client ExtentRepair { repair_id: ReconciliationId(446), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45185 Sep 22 23:22:22.824 INFO [2] No action required ReconciliationId(446)
45186 Sep 22 23:22:22.824 DEBG 446 Repair extent 137 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
45187 Sep 22 23:22:22.824 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/089.copy"
45188 Sep 22 23:22:22.886 INFO accepted connection, remote_addr: 127.0.0.1:44079, local_addr: 127.0.0.1:52864, task: repair
45189 Sep 22 23:22:22.886 TRCE incoming request, uri: /extent/137/files, method: GET, req_id: 5ff75bb0-2da9-4dd4-ab22-671ed4cc4804, remote_addr: 127.0.0.1:44079, local_addr: 127.0.0.1:52864, task: repair
45190 Sep 22 23:22:22.886 INFO request completed, latency_us: 197, response_code: 200, uri: /extent/137/files, method: GET, req_id: 5ff75bb0-2da9-4dd4-ab22-671ed4cc4804, remote_addr: 127.0.0.1:44079, local_addr: 127.0.0.1:52864, task: repair
45191 Sep 22 23:22:22.887 INFO eid:137 Found repair files: ["089", "089.db"]
45192 Sep 22 23:22:22.887 TRCE incoming request, uri: /newextent/137/data, method: GET, req_id: 5f9407db-c4d9-4a96-b20c-d1b9f022cc0a, remote_addr: 127.0.0.1:44079, local_addr: 127.0.0.1:52864, task: repair
45193 Sep 22 23:22:22.887 INFO request completed, latency_us: 319, response_code: 200, uri: /newextent/137/data, method: GET, req_id: 5f9407db-c4d9-4a96-b20c-d1b9f022cc0a, remote_addr: 127.0.0.1:44079, local_addr: 127.0.0.1:52864, task: repair
45194 Sep 22 23:22:22.892 TRCE incoming request, uri: /newextent/137/db, method: GET, req_id: 7e2063e9-d11e-47db-9a7e-7582000e706b, remote_addr: 127.0.0.1:44079, local_addr: 127.0.0.1:52864, task: repair
45195 Sep 22 23:22:22.892 INFO request completed, latency_us: 305, response_code: 200, uri: /newextent/137/db, method: GET, req_id: 7e2063e9-d11e-47db-9a7e-7582000e706b, remote_addr: 127.0.0.1:44079, local_addr: 127.0.0.1:52864, task: repair
45196 Sep 22 23:22:22.893 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/089.copy" to "/tmp/downstairs-zrMnlo6G/00/000/089.replace"
45197 Sep 22 23:22:22.893 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45198 Sep 22 23:22:22.894 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/089.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
45199 Sep 22 23:22:22.894 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/089"
45200 Sep 22 23:22:22.895 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/089.db"
45201 Sep 22 23:22:22.895 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45202 Sep 22 23:22:22.895 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/089.replace" to "/tmp/downstairs-zrMnlo6G/00/000/089.completed"
45203 Sep 22 23:22:22.895 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45204 Sep 22 23:22:22.895 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45205 Sep 22 23:22:22.895 DEBG [0] It's time to notify for 446
45206 Sep 22 23:22:22.895 INFO Completion from [0] id:446 status:true
45207 Sep 22 23:22:22.895 INFO [447/752] Repair commands completed
45208 Sep 22 23:22:22.895 INFO Pop front: ReconcileIO { id: ReconciliationId(447), op: ExtentReopen { repair_id: ReconciliationId(447), extent_id: 137 }, state: ClientData([New, New, New]) }
45209 Sep 22 23:22:22.895 INFO Sent repair work, now wait for resp
45210 Sep 22 23:22:22.895 INFO [0] received reconcile message
45211 Sep 22 23:22:22.895 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(447), op: ExtentReopen { repair_id: ReconciliationId(447), extent_id: 137 }, state: ClientData([InProgress, New, New]) }, : downstairs
45212 Sep 22 23:22:22.895 INFO [0] client ExtentReopen { repair_id: ReconciliationId(447), extent_id: 137 }
45213 Sep 22 23:22:22.895 INFO [1] received reconcile message
45214 Sep 22 23:22:22.895 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(447), op: ExtentReopen { repair_id: ReconciliationId(447), extent_id: 137 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45215 Sep 22 23:22:22.895 INFO [1] client ExtentReopen { repair_id: ReconciliationId(447), extent_id: 137 }
45216 Sep 22 23:22:22.895 INFO [2] received reconcile message
45217 Sep 22 23:22:22.895 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(447), op: ExtentReopen { repair_id: ReconciliationId(447), extent_id: 137 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45218 Sep 22 23:22:22.895 INFO [2] client ExtentReopen { repair_id: ReconciliationId(447), extent_id: 137 }
45219 Sep 22 23:22:22.895 DEBG 447 Reopen extent 137
45220 Sep 22 23:22:22.896 DEBG 447 Reopen extent 137
45221 Sep 22 23:22:22.896 DEBG 447 Reopen extent 137
45222 Sep 22 23:22:22.897 DEBG [2] It's time to notify for 447
45223 Sep 22 23:22:22.897 INFO Completion from [2] id:447 status:true
45224 Sep 22 23:22:22.897 INFO [448/752] Repair commands completed
45225 Sep 22 23:22:22.897 INFO Pop front: ReconcileIO { id: ReconciliationId(448), op: ExtentFlush { repair_id: ReconciliationId(448), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45226 Sep 22 23:22:22.897 INFO Sent repair work, now wait for resp
45227 Sep 22 23:22:22.897 INFO [0] received reconcile message
45228 Sep 22 23:22:22.897 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(448), op: ExtentFlush { repair_id: ReconciliationId(448), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45229 Sep 22 23:22:22.897 INFO [0] client ExtentFlush { repair_id: ReconciliationId(448), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45230 Sep 22 23:22:22.897 INFO [1] received reconcile message
45231 Sep 22 23:22:22.897 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(448), op: ExtentFlush { repair_id: ReconciliationId(448), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45232 Sep 22 23:22:22.897 INFO [1] client ExtentFlush { repair_id: ReconciliationId(448), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45233 Sep 22 23:22:22.897 INFO [2] received reconcile message
45234 Sep 22 23:22:22.897 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(448), op: ExtentFlush { repair_id: ReconciliationId(448), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45235 Sep 22 23:22:22.897 INFO [2] client ExtentFlush { repair_id: ReconciliationId(448), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45236 Sep 22 23:22:22.898 DEBG 448 Flush extent 7 with f:2 g:2
45237 Sep 22 23:22:22.898 DEBG Flush just extent 7 with f:2 and g:2
45238 Sep 22 23:22:22.898 DEBG [1] It's time to notify for 448
45239 Sep 22 23:22:22.898 INFO Completion from [1] id:448 status:true
45240 Sep 22 23:22:22.898 INFO [449/752] Repair commands completed
45241 Sep 22 23:22:22.898 INFO Pop front: ReconcileIO { id: ReconciliationId(449), op: ExtentClose { repair_id: ReconciliationId(449), extent_id: 7 }, state: ClientData([New, New, New]) }
45242 Sep 22 23:22:22.898 INFO Sent repair work, now wait for resp
45243 Sep 22 23:22:22.898 INFO [0] received reconcile message
45244 Sep 22 23:22:22.898 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(449), op: ExtentClose { repair_id: ReconciliationId(449), extent_id: 7 }, state: ClientData([InProgress, New, New]) }, : downstairs
45245 Sep 22 23:22:22.898 INFO [0] client ExtentClose { repair_id: ReconciliationId(449), extent_id: 7 }
45246 Sep 22 23:22:22.898 INFO [1] received reconcile message
45247 Sep 22 23:22:22.898 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(449), op: ExtentClose { repair_id: ReconciliationId(449), extent_id: 7 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45248 Sep 22 23:22:22.898 INFO [1] client ExtentClose { repair_id: ReconciliationId(449), extent_id: 7 }
45249 Sep 22 23:22:22.898 INFO [2] received reconcile message
45250 Sep 22 23:22:22.898 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(449), op: ExtentClose { repair_id: ReconciliationId(449), extent_id: 7 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45251 Sep 22 23:22:22.898 INFO [2] client ExtentClose { repair_id: ReconciliationId(449), extent_id: 7 }
45252 Sep 22 23:22:22.898 DEBG 449 Close extent 7
45253 Sep 22 23:22:22.898 DEBG 449 Close extent 7
45254 Sep 22 23:22:22.899 DEBG 449 Close extent 7
45255 Sep 22 23:22:22.899 DEBG [2] It's time to notify for 449
45256 Sep 22 23:22:22.899 INFO Completion from [2] id:449 status:true
45257 Sep 22 23:22:22.899 INFO [450/752] Repair commands completed
45258 Sep 22 23:22:22.899 INFO Pop front: ReconcileIO { id: ReconciliationId(450), op: ExtentRepair { repair_id: ReconciliationId(450), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45259 Sep 22 23:22:22.899 INFO Sent repair work, now wait for resp
45260 Sep 22 23:22:22.899 INFO [0] received reconcile message
45261 Sep 22 23:22:22.899 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(450), op: ExtentRepair { repair_id: ReconciliationId(450), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45262 Sep 22 23:22:22.899 INFO [0] client ExtentRepair { repair_id: ReconciliationId(450), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45263 Sep 22 23:22:22.899 INFO [0] Sending repair request ReconciliationId(450)
45264 Sep 22 23:22:22.899 INFO [1] received reconcile message
45265 Sep 22 23:22:22.899 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(450), op: ExtentRepair { repair_id: ReconciliationId(450), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45266 Sep 22 23:22:22.899 INFO [1] client ExtentRepair { repair_id: ReconciliationId(450), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45267 Sep 22 23:22:22.899 INFO [1] No action required ReconciliationId(450)
45268 Sep 22 23:22:22.899 INFO [2] received reconcile message
45269 Sep 22 23:22:22.899 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(450), op: ExtentRepair { repair_id: ReconciliationId(450), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45270 Sep 22 23:22:22.899 INFO [2] client ExtentRepair { repair_id: ReconciliationId(450), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45271 Sep 22 23:22:22.900 INFO [2] No action required ReconciliationId(450)
45272 Sep 22 23:22:22.900 DEBG 450 Repair extent 7 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
45273 Sep 22 23:22:22.900 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/007.copy"
45274 Sep 22 23:22:22.962 INFO accepted connection, remote_addr: 127.0.0.1:59962, local_addr: 127.0.0.1:52864, task: repair
45275 Sep 22 23:22:22.962 TRCE incoming request, uri: /extent/7/files, method: GET, req_id: 81d13e72-f213-451c-990d-4b8e63611f7d, remote_addr: 127.0.0.1:59962, local_addr: 127.0.0.1:52864, task: repair
45276 Sep 22 23:22:22.962 INFO request completed, latency_us: 196, response_code: 200, uri: /extent/7/files, method: GET, req_id: 81d13e72-f213-451c-990d-4b8e63611f7d, remote_addr: 127.0.0.1:59962, local_addr: 127.0.0.1:52864, task: repair
45277 Sep 22 23:22:22.962 INFO eid:7 Found repair files: ["007", "007.db"]
45278 Sep 22 23:22:22.963 TRCE incoming request, uri: /newextent/7/data, method: GET, req_id: 8f4984ae-a373-49dc-9f1f-1847cab5d6bc, remote_addr: 127.0.0.1:59962, local_addr: 127.0.0.1:52864, task: repair
45279 Sep 22 23:22:22.963 INFO request completed, latency_us: 259, response_code: 200, uri: /newextent/7/data, method: GET, req_id: 8f4984ae-a373-49dc-9f1f-1847cab5d6bc, remote_addr: 127.0.0.1:59962, local_addr: 127.0.0.1:52864, task: repair
45280 Sep 22 23:22:22.968 TRCE incoming request, uri: /newextent/7/db, method: GET, req_id: b85a5600-8937-458d-abef-5654c0823fbd, remote_addr: 127.0.0.1:59962, local_addr: 127.0.0.1:52864, task: repair
45281 Sep 22 23:22:22.968 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/7/db, method: GET, req_id: b85a5600-8937-458d-abef-5654c0823fbd, remote_addr: 127.0.0.1:59962, local_addr: 127.0.0.1:52864, task: repair
45282 Sep 22 23:22:22.969 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/007.copy" to "/tmp/downstairs-zrMnlo6G/00/000/007.replace"
45283 Sep 22 23:22:22.969 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45284 Sep 22 23:22:22.970 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/007.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
45285 Sep 22 23:22:22.970 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/007"
45286 Sep 22 23:22:22.970 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/007.db"
45287 Sep 22 23:22:22.970 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45288 Sep 22 23:22:22.970 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/007.replace" to "/tmp/downstairs-zrMnlo6G/00/000/007.completed"
45289 Sep 22 23:22:22.970 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45290 Sep 22 23:22:22.970 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45291 Sep 22 23:22:22.971 DEBG [0] It's time to notify for 450
45292 Sep 22 23:22:22.971 INFO Completion from [0] id:450 status:true
45293 Sep 22 23:22:22.971 INFO [451/752] Repair commands completed
45294 Sep 22 23:22:22.971 INFO Pop front: ReconcileIO { id: ReconciliationId(451), op: ExtentReopen { repair_id: ReconciliationId(451), extent_id: 7 }, state: ClientData([New, New, New]) }
45295 Sep 22 23:22:22.971 INFO Sent repair work, now wait for resp
45296 Sep 22 23:22:22.971 INFO [0] received reconcile message
45297 Sep 22 23:22:22.971 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(451), op: ExtentReopen { repair_id: ReconciliationId(451), extent_id: 7 }, state: ClientData([InProgress, New, New]) }, : downstairs
45298 Sep 22 23:22:22.971 INFO [0] client ExtentReopen { repair_id: ReconciliationId(451), extent_id: 7 }
45299 Sep 22 23:22:22.971 INFO [1] received reconcile message
45300 Sep 22 23:22:22.971 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(451), op: ExtentReopen { repair_id: ReconciliationId(451), extent_id: 7 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45301 Sep 22 23:22:22.971 INFO [1] client ExtentReopen { repair_id: ReconciliationId(451), extent_id: 7 }
45302 Sep 22 23:22:22.971 INFO [2] received reconcile message
45303 Sep 22 23:22:22.971 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(451), op: ExtentReopen { repair_id: ReconciliationId(451), extent_id: 7 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45304 Sep 22 23:22:22.971 INFO [2] client ExtentReopen { repair_id: ReconciliationId(451), extent_id: 7 }
45305 Sep 22 23:22:22.971 DEBG 451 Reopen extent 7
45306 Sep 22 23:22:22.972 DEBG 451 Reopen extent 7
45307 Sep 22 23:22:22.972 DEBG 451 Reopen extent 7
45308 Sep 22 23:22:22.973 DEBG [2] It's time to notify for 451
45309 Sep 22 23:22:22.973 INFO Completion from [2] id:451 status:true
45310 Sep 22 23:22:22.973 INFO [452/752] Repair commands completed
45311 Sep 22 23:22:22.973 INFO Pop front: ReconcileIO { id: ReconciliationId(452), op: ExtentFlush { repair_id: ReconciliationId(452), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45312 Sep 22 23:22:22.973 INFO Sent repair work, now wait for resp
45313 Sep 22 23:22:22.973 INFO [0] received reconcile message
45314 Sep 22 23:22:22.973 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(452), op: ExtentFlush { repair_id: ReconciliationId(452), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45315 Sep 22 23:22:22.973 INFO [0] client ExtentFlush { repair_id: ReconciliationId(452), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45316 Sep 22 23:22:22.973 INFO [1] received reconcile message
45317 Sep 22 23:22:22.973 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(452), op: ExtentFlush { repair_id: ReconciliationId(452), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45318 Sep 22 23:22:22.973 INFO [1] client ExtentFlush { repair_id: ReconciliationId(452), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45319 Sep 22 23:22:22.973 INFO [2] received reconcile message
45320 Sep 22 23:22:22.973 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(452), op: ExtentFlush { repair_id: ReconciliationId(452), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45321 Sep 22 23:22:22.973 INFO [2] client ExtentFlush { repair_id: ReconciliationId(452), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45322 Sep 22 23:22:22.973 DEBG 452 Flush extent 63 with f:2 g:2
45323 Sep 22 23:22:22.973 DEBG Flush just extent 63 with f:2 and g:2
45324 Sep 22 23:22:22.974 DEBG [1] It's time to notify for 452
45325 Sep 22 23:22:22.974 INFO Completion from [1] id:452 status:true
45326 Sep 22 23:22:22.974 INFO [453/752] Repair commands completed
45327 Sep 22 23:22:22.974 INFO Pop front: ReconcileIO { id: ReconciliationId(453), op: ExtentClose { repair_id: ReconciliationId(453), extent_id: 63 }, state: ClientData([New, New, New]) }
45328 Sep 22 23:22:22.974 INFO Sent repair work, now wait for resp
45329 Sep 22 23:22:22.974 INFO [0] received reconcile message
45330 Sep 22 23:22:22.974 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(453), op: ExtentClose { repair_id: ReconciliationId(453), extent_id: 63 }, state: ClientData([InProgress, New, New]) }, : downstairs
45331 Sep 22 23:22:22.974 INFO [0] client ExtentClose { repair_id: ReconciliationId(453), extent_id: 63 }
45332 Sep 22 23:22:22.974 INFO [1] received reconcile message
45333 Sep 22 23:22:22.974 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(453), op: ExtentClose { repair_id: ReconciliationId(453), extent_id: 63 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45334 Sep 22 23:22:22.974 INFO [1] client ExtentClose { repair_id: ReconciliationId(453), extent_id: 63 }
45335 Sep 22 23:22:22.974 INFO [2] received reconcile message
45336 Sep 22 23:22:22.974 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(453), op: ExtentClose { repair_id: ReconciliationId(453), extent_id: 63 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45337 Sep 22 23:22:22.974 INFO [2] client ExtentClose { repair_id: ReconciliationId(453), extent_id: 63 }
45338 Sep 22 23:22:22.974 DEBG 453 Close extent 63
45339 Sep 22 23:22:22.974 DEBG 453 Close extent 63
45340 Sep 22 23:22:22.975 DEBG 453 Close extent 63
45341 Sep 22 23:22:22.975 DEBG [2] It's time to notify for 453
45342 Sep 22 23:22:22.975 INFO Completion from [2] id:453 status:true
45343 Sep 22 23:22:22.975 INFO [454/752] Repair commands completed
45344 Sep 22 23:22:22.975 INFO Pop front: ReconcileIO { id: ReconciliationId(454), op: ExtentRepair { repair_id: ReconciliationId(454), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45345 Sep 22 23:22:22.975 INFO Sent repair work, now wait for resp
45346 Sep 22 23:22:22.975 INFO [0] received reconcile message
45347 Sep 22 23:22:22.975 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(454), op: ExtentRepair { repair_id: ReconciliationId(454), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45348 Sep 22 23:22:22.975 INFO [0] client ExtentRepair { repair_id: ReconciliationId(454), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45349 Sep 22 23:22:22.975 INFO [0] Sending repair request ReconciliationId(454)
45350 Sep 22 23:22:22.975 INFO [1] received reconcile message
45351 Sep 22 23:22:22.975 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(454), op: ExtentRepair { repair_id: ReconciliationId(454), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45352 Sep 22 23:22:22.975 INFO [1] client ExtentRepair { repair_id: ReconciliationId(454), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45353 Sep 22 23:22:22.975 INFO [1] No action required ReconciliationId(454)
45354 Sep 22 23:22:22.975 INFO [2] received reconcile message
45355 Sep 22 23:22:22.975 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(454), op: ExtentRepair { repair_id: ReconciliationId(454), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45356 Sep 22 23:22:22.975 INFO [2] client ExtentRepair { repair_id: ReconciliationId(454), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45357 Sep 22 23:22:22.975 INFO [2] No action required ReconciliationId(454)
45358 Sep 22 23:22:22.975 DEBG 454 Repair extent 63 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
45359 Sep 22 23:22:22.975 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/03F.copy"
45360 Sep 22 23:22:23.019 ERRO [2] job id 1084 saw error GenericError("test error")
45361 Sep 22 23:22:23.019 DEBG [rc] retire 1084 clears [JobId(1083), JobId(1084)], : downstairs
45362 Sep 22 23:22:23.034 WARN returning error on flush!
45363 Sep 22 23:22:23.034 DEBG Flush :1086 extent_limit None deps:[JobId(1085), JobId(1084)] res:false f:32 g:1
45364 Sep 22 23:22:23.034 INFO [lossy] skipping 1087
45365 Sep 22 23:22:23.034 DEBG Flush :1086 extent_limit None deps:[JobId(1085), JobId(1084)] res:true f:32 g:1
45366 Sep 22 23:22:23.040 DEBG Read :1087 deps:[JobId(1086)] res:true
45367 Sep 22 23:22:23.042 INFO accepted connection, remote_addr: 127.0.0.1:40604, local_addr: 127.0.0.1:52864, task: repair
45368 Sep 22 23:22:23.042 TRCE incoming request, uri: /extent/63/files, method: GET, req_id: 371d99ed-57ad-4b29-a15e-3fc077feb00d, remote_addr: 127.0.0.1:40604, local_addr: 127.0.0.1:52864, task: repair
45369 Sep 22 23:22:23.042 INFO request completed, latency_us: 267, response_code: 200, uri: /extent/63/files, method: GET, req_id: 371d99ed-57ad-4b29-a15e-3fc077feb00d, remote_addr: 127.0.0.1:40604, local_addr: 127.0.0.1:52864, task: repair
45370 Sep 22 23:22:23.043 INFO eid:63 Found repair files: ["03F", "03F.db"]
45371 Sep 22 23:22:23.043 TRCE incoming request, uri: /newextent/63/data, method: GET, req_id: 2efc4e3f-0f46-4d13-bd24-cbae4417ec1b, remote_addr: 127.0.0.1:40604, local_addr: 127.0.0.1:52864, task: repair
45372 Sep 22 23:22:23.043 INFO request completed, latency_us: 370, response_code: 200, uri: /newextent/63/data, method: GET, req_id: 2efc4e3f-0f46-4d13-bd24-cbae4417ec1b, remote_addr: 127.0.0.1:40604, local_addr: 127.0.0.1:52864, task: repair
45373 Sep 22 23:22:23.048 TRCE incoming request, uri: /newextent/63/db, method: GET, req_id: 28e2ab0b-3c3f-4120-b526-816e5c996973, remote_addr: 127.0.0.1:40604, local_addr: 127.0.0.1:52864, task: repair
45374 Sep 22 23:22:23.048 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/63/db, method: GET, req_id: 28e2ab0b-3c3f-4120-b526-816e5c996973, remote_addr: 127.0.0.1:40604, local_addr: 127.0.0.1:52864, task: repair
45375 Sep 22 23:22:23.050 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/03F.copy" to "/tmp/downstairs-zrMnlo6G/00/000/03F.replace"
45376 Sep 22 23:22:23.050 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45377 Sep 22 23:22:23.051 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/03F.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
45378 Sep 22 23:22:23.051 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/03F"
45379 Sep 22 23:22:23.051 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/03F.db"
45380 Sep 22 23:22:23.051 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45381 Sep 22 23:22:23.051 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/03F.replace" to "/tmp/downstairs-zrMnlo6G/00/000/03F.completed"
45382 Sep 22 23:22:23.051 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45383 Sep 22 23:22:23.051 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45384 Sep 22 23:22:23.051 DEBG [0] It's time to notify for 454
45385 Sep 22 23:22:23.052 INFO Completion from [0] id:454 status:true
45386 Sep 22 23:22:23.052 INFO [455/752] Repair commands completed
45387 Sep 22 23:22:23.052 INFO Pop front: ReconcileIO { id: ReconciliationId(455), op: ExtentReopen { repair_id: ReconciliationId(455), extent_id: 63 }, state: ClientData([New, New, New]) }
45388 Sep 22 23:22:23.052 INFO Sent repair work, now wait for resp
45389 Sep 22 23:22:23.052 INFO [0] received reconcile message
45390 Sep 22 23:22:23.052 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(455), op: ExtentReopen { repair_id: ReconciliationId(455), extent_id: 63 }, state: ClientData([InProgress, New, New]) }, : downstairs
45391 Sep 22 23:22:23.052 INFO [0] client ExtentReopen { repair_id: ReconciliationId(455), extent_id: 63 }
45392 Sep 22 23:22:23.052 INFO [1] received reconcile message
45393 Sep 22 23:22:23.052 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(455), op: ExtentReopen { repair_id: ReconciliationId(455), extent_id: 63 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45394 Sep 22 23:22:23.052 INFO [1] client ExtentReopen { repair_id: ReconciliationId(455), extent_id: 63 }
45395 Sep 22 23:22:23.052 INFO [2] received reconcile message
45396 Sep 22 23:22:23.052 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(455), op: ExtentReopen { repair_id: ReconciliationId(455), extent_id: 63 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45397 Sep 22 23:22:23.052 INFO [2] client ExtentReopen { repair_id: ReconciliationId(455), extent_id: 63 }
45398 Sep 22 23:22:23.052 DEBG 455 Reopen extent 63
45399 Sep 22 23:22:23.053 DEBG 455 Reopen extent 63
45400 Sep 22 23:22:23.053 DEBG 455 Reopen extent 63
45401 Sep 22 23:22:23.054 DEBG [2] It's time to notify for 455
45402 Sep 22 23:22:23.054 INFO Completion from [2] id:455 status:true
45403 Sep 22 23:22:23.054 INFO [456/752] Repair commands completed
45404 Sep 22 23:22:23.054 INFO Pop front: ReconcileIO { id: ReconciliationId(456), op: ExtentFlush { repair_id: ReconciliationId(456), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45405 Sep 22 23:22:23.054 INFO Sent repair work, now wait for resp
45406 Sep 22 23:22:23.054 INFO [0] received reconcile message
45407 Sep 22 23:22:23.054 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(456), op: ExtentFlush { repair_id: ReconciliationId(456), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45408 Sep 22 23:22:23.054 INFO [0] client ExtentFlush { repair_id: ReconciliationId(456), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45409 Sep 22 23:22:23.054 INFO [1] received reconcile message
45410 Sep 22 23:22:23.054 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(456), op: ExtentFlush { repair_id: ReconciliationId(456), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45411 Sep 22 23:22:23.054 INFO [1] client ExtentFlush { repair_id: ReconciliationId(456), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45412 Sep 22 23:22:23.054 INFO [2] received reconcile message
45413 Sep 22 23:22:23.054 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(456), op: ExtentFlush { repair_id: ReconciliationId(456), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45414 Sep 22 23:22:23.054 INFO [2] client ExtentFlush { repair_id: ReconciliationId(456), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45415 Sep 22 23:22:23.054 DEBG 456 Flush extent 58 with f:2 g:2
45416 Sep 22 23:22:23.054 DEBG Flush just extent 58 with f:2 and g:2
45417 Sep 22 23:22:23.055 DEBG [1] It's time to notify for 456
45418 Sep 22 23:22:23.055 INFO Completion from [1] id:456 status:true
45419 Sep 22 23:22:23.055 INFO [457/752] Repair commands completed
45420 Sep 22 23:22:23.055 INFO Pop front: ReconcileIO { id: ReconciliationId(457), op: ExtentClose { repair_id: ReconciliationId(457), extent_id: 58 }, state: ClientData([New, New, New]) }
45421 Sep 22 23:22:23.055 INFO Sent repair work, now wait for resp
45422 Sep 22 23:22:23.055 INFO [0] received reconcile message
45423 Sep 22 23:22:23.055 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(457), op: ExtentClose { repair_id: ReconciliationId(457), extent_id: 58 }, state: ClientData([InProgress, New, New]) }, : downstairs
45424 Sep 22 23:22:23.055 INFO [0] client ExtentClose { repair_id: ReconciliationId(457), extent_id: 58 }
45425 Sep 22 23:22:23.055 INFO [1] received reconcile message
45426 Sep 22 23:22:23.055 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(457), op: ExtentClose { repair_id: ReconciliationId(457), extent_id: 58 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45427 Sep 22 23:22:23.055 INFO [1] client ExtentClose { repair_id: ReconciliationId(457), extent_id: 58 }
45428 Sep 22 23:22:23.055 INFO [2] received reconcile message
45429 Sep 22 23:22:23.055 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(457), op: ExtentClose { repair_id: ReconciliationId(457), extent_id: 58 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45430 Sep 22 23:22:23.055 INFO [2] client ExtentClose { repair_id: ReconciliationId(457), extent_id: 58 }
45431 Sep 22 23:22:23.055 DEBG 457 Close extent 58
45432 Sep 22 23:22:23.055 DEBG 457 Close extent 58
45433 Sep 22 23:22:23.056 DEBG 457 Close extent 58
45434 Sep 22 23:22:23.056 DEBG [2] It's time to notify for 457
45435 Sep 22 23:22:23.056 INFO Completion from [2] id:457 status:true
45436 Sep 22 23:22:23.056 INFO [458/752] Repair commands completed
45437 Sep 22 23:22:23.056 INFO Pop front: ReconcileIO { id: ReconciliationId(458), op: ExtentRepair { repair_id: ReconciliationId(458), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45438 Sep 22 23:22:23.056 INFO Sent repair work, now wait for resp
45439 Sep 22 23:22:23.056 INFO [0] received reconcile message
45440 Sep 22 23:22:23.056 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(458), op: ExtentRepair { repair_id: ReconciliationId(458), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45441 Sep 22 23:22:23.056 INFO [0] client ExtentRepair { repair_id: ReconciliationId(458), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45442 Sep 22 23:22:23.056 INFO [0] Sending repair request ReconciliationId(458)
45443 Sep 22 23:22:23.056 INFO [1] received reconcile message
45444 Sep 22 23:22:23.056 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(458), op: ExtentRepair { repair_id: ReconciliationId(458), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45445 Sep 22 23:22:23.056 INFO [1] client ExtentRepair { repair_id: ReconciliationId(458), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45446 Sep 22 23:22:23.056 INFO [1] No action required ReconciliationId(458)
45447 Sep 22 23:22:23.056 INFO [2] received reconcile message
45448 Sep 22 23:22:23.056 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(458), op: ExtentRepair { repair_id: ReconciliationId(458), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45449 Sep 22 23:22:23.056 INFO [2] client ExtentRepair { repair_id: ReconciliationId(458), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45450 Sep 22 23:22:23.056 INFO [2] No action required ReconciliationId(458)
45451 Sep 22 23:22:23.057 DEBG 458 Repair extent 58 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
45452 Sep 22 23:22:23.057 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/03A.copy"
45453 Sep 22 23:22:23.118 INFO accepted connection, remote_addr: 127.0.0.1:33238, local_addr: 127.0.0.1:52864, task: repair
45454 Sep 22 23:22:23.119 TRCE incoming request, uri: /extent/58/files, method: GET, req_id: fdff48ca-2f8d-4758-bd73-cf22271aa9a5, remote_addr: 127.0.0.1:33238, local_addr: 127.0.0.1:52864, task: repair
45455 Sep 22 23:22:23.119 INFO request completed, latency_us: 208, response_code: 200, uri: /extent/58/files, method: GET, req_id: fdff48ca-2f8d-4758-bd73-cf22271aa9a5, remote_addr: 127.0.0.1:33238, local_addr: 127.0.0.1:52864, task: repair
45456 Sep 22 23:22:23.119 INFO eid:58 Found repair files: ["03A", "03A.db"]
45457 Sep 22 23:22:23.119 TRCE incoming request, uri: /newextent/58/data, method: GET, req_id: 577b5e80-c943-4541-a2ad-66739862b242, remote_addr: 127.0.0.1:33238, local_addr: 127.0.0.1:52864, task: repair
45458 Sep 22 23:22:23.120 INFO request completed, latency_us: 308, response_code: 200, uri: /newextent/58/data, method: GET, req_id: 577b5e80-c943-4541-a2ad-66739862b242, remote_addr: 127.0.0.1:33238, local_addr: 127.0.0.1:52864, task: repair
45459 Sep 22 23:22:23.124 TRCE incoming request, uri: /newextent/58/db, method: GET, req_id: 89bc34f9-1fb7-468b-96f9-4ec832ee626e, remote_addr: 127.0.0.1:33238, local_addr: 127.0.0.1:52864, task: repair
45460 Sep 22 23:22:23.125 INFO request completed, latency_us: 306, response_code: 200, uri: /newextent/58/db, method: GET, req_id: 89bc34f9-1fb7-468b-96f9-4ec832ee626e, remote_addr: 127.0.0.1:33238, local_addr: 127.0.0.1:52864, task: repair
45461 Sep 22 23:22:23.126 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/03A.copy" to "/tmp/downstairs-zrMnlo6G/00/000/03A.replace"
45462 Sep 22 23:22:23.126 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45463 Sep 22 23:22:23.127 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/03A.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
45464 Sep 22 23:22:23.127 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/03A"
45465 Sep 22 23:22:23.127 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/03A.db"
45466 Sep 22 23:22:23.127 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45467 Sep 22 23:22:23.127 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/03A.replace" to "/tmp/downstairs-zrMnlo6G/00/000/03A.completed"
45468 Sep 22 23:22:23.127 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45469 Sep 22 23:22:23.127 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45470 Sep 22 23:22:23.127 DEBG [0] It's time to notify for 458
45471 Sep 22 23:22:23.128 INFO Completion from [0] id:458 status:true
45472 Sep 22 23:22:23.128 INFO [459/752] Repair commands completed
45473 Sep 22 23:22:23.128 INFO Pop front: ReconcileIO { id: ReconciliationId(459), op: ExtentReopen { repair_id: ReconciliationId(459), extent_id: 58 }, state: ClientData([New, New, New]) }
45474 Sep 22 23:22:23.128 INFO Sent repair work, now wait for resp
45475 Sep 22 23:22:23.128 INFO [0] received reconcile message
45476 Sep 22 23:22:23.128 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(459), op: ExtentReopen { repair_id: ReconciliationId(459), extent_id: 58 }, state: ClientData([InProgress, New, New]) }, : downstairs
45477 Sep 22 23:22:23.128 INFO [0] client ExtentReopen { repair_id: ReconciliationId(459), extent_id: 58 }
45478 Sep 22 23:22:23.128 INFO [1] received reconcile message
45479 Sep 22 23:22:23.128 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(459), op: ExtentReopen { repair_id: ReconciliationId(459), extent_id: 58 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45480 Sep 22 23:22:23.128 INFO [1] client ExtentReopen { repair_id: ReconciliationId(459), extent_id: 58 }
45481 Sep 22 23:22:23.128 INFO [2] received reconcile message
45482 Sep 22 23:22:23.128 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(459), op: ExtentReopen { repair_id: ReconciliationId(459), extent_id: 58 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45483 Sep 22 23:22:23.128 INFO [2] client ExtentReopen { repair_id: ReconciliationId(459), extent_id: 58 }
45484 Sep 22 23:22:23.128 DEBG 459 Reopen extent 58
45485 Sep 22 23:22:23.129 DEBG 459 Reopen extent 58
45486 Sep 22 23:22:23.129 DEBG 459 Reopen extent 58
45487 Sep 22 23:22:23.130 DEBG [2] It's time to notify for 459
45488 Sep 22 23:22:23.130 INFO Completion from [2] id:459 status:true
45489 Sep 22 23:22:23.130 INFO [460/752] Repair commands completed
45490 Sep 22 23:22:23.130 INFO Pop front: ReconcileIO { id: ReconciliationId(460), op: ExtentFlush { repair_id: ReconciliationId(460), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45491 Sep 22 23:22:23.130 INFO Sent repair work, now wait for resp
45492 Sep 22 23:22:23.130 INFO [0] received reconcile message
45493 Sep 22 23:22:23.130 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(460), op: ExtentFlush { repair_id: ReconciliationId(460), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45494 Sep 22 23:22:23.130 INFO [0] client ExtentFlush { repair_id: ReconciliationId(460), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45495 Sep 22 23:22:23.130 INFO [1] received reconcile message
45496 Sep 22 23:22:23.130 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(460), op: ExtentFlush { repair_id: ReconciliationId(460), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45497 Sep 22 23:22:23.130 INFO [1] client ExtentFlush { repair_id: ReconciliationId(460), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45498 Sep 22 23:22:23.130 INFO [2] received reconcile message
45499 Sep 22 23:22:23.130 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(460), op: ExtentFlush { repair_id: ReconciliationId(460), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45500 Sep 22 23:22:23.130 INFO [2] client ExtentFlush { repair_id: ReconciliationId(460), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45501 Sep 22 23:22:23.130 DEBG 460 Flush extent 96 with f:2 g:2
45502 Sep 22 23:22:23.130 DEBG Flush just extent 96 with f:2 and g:2
45503 Sep 22 23:22:23.130 DEBG [1] It's time to notify for 460
45504 Sep 22 23:22:23.130 INFO Completion from [1] id:460 status:true
45505 Sep 22 23:22:23.130 INFO [461/752] Repair commands completed
45506 Sep 22 23:22:23.130 INFO Pop front: ReconcileIO { id: ReconciliationId(461), op: ExtentClose { repair_id: ReconciliationId(461), extent_id: 96 }, state: ClientData([New, New, New]) }
45507 Sep 22 23:22:23.130 INFO Sent repair work, now wait for resp
45508 Sep 22 23:22:23.130 INFO [0] received reconcile message
45509 Sep 22 23:22:23.130 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(461), op: ExtentClose { repair_id: ReconciliationId(461), extent_id: 96 }, state: ClientData([InProgress, New, New]) }, : downstairs
45510 Sep 22 23:22:23.130 INFO [0] client ExtentClose { repair_id: ReconciliationId(461), extent_id: 96 }
45511 Sep 22 23:22:23.131 INFO [1] received reconcile message
45512 Sep 22 23:22:23.131 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(461), op: ExtentClose { repair_id: ReconciliationId(461), extent_id: 96 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45513 Sep 22 23:22:23.131 INFO [1] client ExtentClose { repair_id: ReconciliationId(461), extent_id: 96 }
45514 Sep 22 23:22:23.131 INFO [2] received reconcile message
45515 Sep 22 23:22:23.131 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(461), op: ExtentClose { repair_id: ReconciliationId(461), extent_id: 96 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45516 Sep 22 23:22:23.131 INFO [2] client ExtentClose { repair_id: ReconciliationId(461), extent_id: 96 }
45517 Sep 22 23:22:23.131 DEBG 461 Close extent 96
45518 Sep 22 23:22:23.131 DEBG 461 Close extent 96
45519 Sep 22 23:22:23.131 DEBG 461 Close extent 96
45520 Sep 22 23:22:23.132 DEBG [2] It's time to notify for 461
45521 Sep 22 23:22:23.132 INFO Completion from [2] id:461 status:true
45522 Sep 22 23:22:23.132 INFO [462/752] Repair commands completed
45523 Sep 22 23:22:23.132 INFO Pop front: ReconcileIO { id: ReconciliationId(462), op: ExtentRepair { repair_id: ReconciliationId(462), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45524 Sep 22 23:22:23.132 INFO Sent repair work, now wait for resp
45525 Sep 22 23:22:23.132 INFO [0] received reconcile message
45526 Sep 22 23:22:23.132 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(462), op: ExtentRepair { repair_id: ReconciliationId(462), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45527 Sep 22 23:22:23.132 INFO [0] client ExtentRepair { repair_id: ReconciliationId(462), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45528 Sep 22 23:22:23.132 INFO [0] Sending repair request ReconciliationId(462)
45529 Sep 22 23:22:23.132 INFO [1] received reconcile message
45530 Sep 22 23:22:23.132 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(462), op: ExtentRepair { repair_id: ReconciliationId(462), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45531 Sep 22 23:22:23.132 INFO [1] client ExtentRepair { repair_id: ReconciliationId(462), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45532 Sep 22 23:22:23.132 INFO [1] No action required ReconciliationId(462)
45533 Sep 22 23:22:23.132 INFO [2] received reconcile message
45534 Sep 22 23:22:23.132 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(462), op: ExtentRepair { repair_id: ReconciliationId(462), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45535 Sep 22 23:22:23.132 INFO [2] client ExtentRepair { repair_id: ReconciliationId(462), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45536 Sep 22 23:22:23.132 INFO [2] No action required ReconciliationId(462)
45537 Sep 22 23:22:23.132 DEBG 462 Repair extent 96 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
45538 Sep 22 23:22:23.132 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/060.copy"
45539 Sep 22 23:22:23.198 INFO accepted connection, remote_addr: 127.0.0.1:54809, local_addr: 127.0.0.1:52864, task: repair
45540 Sep 22 23:22:23.198 TRCE incoming request, uri: /extent/96/files, method: GET, req_id: a4c6c8dc-b70e-4eb6-b050-102bf7b0a29a, remote_addr: 127.0.0.1:54809, local_addr: 127.0.0.1:52864, task: repair
45541 Sep 22 23:22:23.198 INFO request completed, latency_us: 206, response_code: 200, uri: /extent/96/files, method: GET, req_id: a4c6c8dc-b70e-4eb6-b050-102bf7b0a29a, remote_addr: 127.0.0.1:54809, local_addr: 127.0.0.1:52864, task: repair
45542 Sep 22 23:22:23.198 INFO eid:96 Found repair files: ["060", "060.db"]
45543 Sep 22 23:22:23.199 TRCE incoming request, uri: /newextent/96/data, method: GET, req_id: 148cf95a-1c4d-4d50-9aeb-d9a7f0df55cc, remote_addr: 127.0.0.1:54809, local_addr: 127.0.0.1:52864, task: repair
45544 Sep 22 23:22:23.199 INFO request completed, latency_us: 261, response_code: 200, uri: /newextent/96/data, method: GET, req_id: 148cf95a-1c4d-4d50-9aeb-d9a7f0df55cc, remote_addr: 127.0.0.1:54809, local_addr: 127.0.0.1:52864, task: repair
45545 Sep 22 23:22:23.204 TRCE incoming request, uri: /newextent/96/db, method: GET, req_id: 1655e12a-5268-448a-aeae-739023cd5943, remote_addr: 127.0.0.1:54809, local_addr: 127.0.0.1:52864, task: repair
45546 Sep 22 23:22:23.205 INFO request completed, latency_us: 319, response_code: 200, uri: /newextent/96/db, method: GET, req_id: 1655e12a-5268-448a-aeae-739023cd5943, remote_addr: 127.0.0.1:54809, local_addr: 127.0.0.1:52864, task: repair
45547 Sep 22 23:22:23.206 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/060.copy" to "/tmp/downstairs-zrMnlo6G/00/000/060.replace"
45548 Sep 22 23:22:23.206 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45549 Sep 22 23:22:23.207 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/060.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
45550 Sep 22 23:22:23.207 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/060"
45551 Sep 22 23:22:23.207 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/060.db"
45552 Sep 22 23:22:23.208 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45553 Sep 22 23:22:23.208 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/060.replace" to "/tmp/downstairs-zrMnlo6G/00/000/060.completed"
45554 Sep 22 23:22:23.208 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45555 Sep 22 23:22:23.208 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45556 Sep 22 23:22:23.208 DEBG [0] It's time to notify for 462
45557 Sep 22 23:22:23.208 INFO Completion from [0] id:462 status:true
45558 Sep 22 23:22:23.208 INFO [463/752] Repair commands completed
45559 Sep 22 23:22:23.208 INFO Pop front: ReconcileIO { id: ReconciliationId(463), op: ExtentReopen { repair_id: ReconciliationId(463), extent_id: 96 }, state: ClientData([New, New, New]) }
45560 Sep 22 23:22:23.208 INFO Sent repair work, now wait for resp
45561 Sep 22 23:22:23.208 INFO [0] received reconcile message
45562 Sep 22 23:22:23.208 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(463), op: ExtentReopen { repair_id: ReconciliationId(463), extent_id: 96 }, state: ClientData([InProgress, New, New]) }, : downstairs
45563 Sep 22 23:22:23.208 INFO [0] client ExtentReopen { repair_id: ReconciliationId(463), extent_id: 96 }
45564 Sep 22 23:22:23.208 INFO [1] received reconcile message
45565 Sep 22 23:22:23.208 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(463), op: ExtentReopen { repair_id: ReconciliationId(463), extent_id: 96 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45566 Sep 22 23:22:23.208 INFO [1] client ExtentReopen { repair_id: ReconciliationId(463), extent_id: 96 }
45567 Sep 22 23:22:23.208 INFO [2] received reconcile message
45568 Sep 22 23:22:23.208 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(463), op: ExtentReopen { repair_id: ReconciliationId(463), extent_id: 96 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45569 Sep 22 23:22:23.208 INFO [2] client ExtentReopen { repair_id: ReconciliationId(463), extent_id: 96 }
45570 Sep 22 23:22:23.209 DEBG 463 Reopen extent 96
45571 Sep 22 23:22:23.209 DEBG 463 Reopen extent 96
45572 Sep 22 23:22:23.210 DEBG 463 Reopen extent 96
45573 Sep 22 23:22:23.210 DEBG [2] It's time to notify for 463
45574 Sep 22 23:22:23.211 INFO Completion from [2] id:463 status:true
45575 Sep 22 23:22:23.211 INFO [464/752] Repair commands completed
45576 Sep 22 23:22:23.211 INFO Pop front: ReconcileIO { id: ReconciliationId(464), op: ExtentFlush { repair_id: ReconciliationId(464), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45577 Sep 22 23:22:23.211 INFO Sent repair work, now wait for resp
45578 Sep 22 23:22:23.211 INFO [0] received reconcile message
45579 Sep 22 23:22:23.211 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(464), op: ExtentFlush { repair_id: ReconciliationId(464), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45580 Sep 22 23:22:23.211 INFO [0] client ExtentFlush { repair_id: ReconciliationId(464), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45581 Sep 22 23:22:23.211 INFO [1] received reconcile message
45582 Sep 22 23:22:23.211 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(464), op: ExtentFlush { repair_id: ReconciliationId(464), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45583 Sep 22 23:22:23.211 INFO [1] client ExtentFlush { repair_id: ReconciliationId(464), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45584 Sep 22 23:22:23.211 INFO [2] received reconcile message
45585 Sep 22 23:22:23.211 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(464), op: ExtentFlush { repair_id: ReconciliationId(464), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45586 Sep 22 23:22:23.211 INFO [2] client ExtentFlush { repair_id: ReconciliationId(464), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45587 Sep 22 23:22:23.211 DEBG 464 Flush extent 40 with f:2 g:2
45588 Sep 22 23:22:23.211 DEBG Flush just extent 40 with f:2 and g:2
45589 Sep 22 23:22:23.211 DEBG [1] It's time to notify for 464
45590 Sep 22 23:22:23.211 INFO Completion from [1] id:464 status:true
45591 Sep 22 23:22:23.211 INFO [465/752] Repair commands completed
45592 Sep 22 23:22:23.211 INFO Pop front: ReconcileIO { id: ReconciliationId(465), op: ExtentClose { repair_id: ReconciliationId(465), extent_id: 40 }, state: ClientData([New, New, New]) }
45593 Sep 22 23:22:23.211 INFO Sent repair work, now wait for resp
45594 Sep 22 23:22:23.211 INFO [0] received reconcile message
45595 Sep 22 23:22:23.211 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(465), op: ExtentClose { repair_id: ReconciliationId(465), extent_id: 40 }, state: ClientData([InProgress, New, New]) }, : downstairs
45596 Sep 22 23:22:23.211 INFO [0] client ExtentClose { repair_id: ReconciliationId(465), extent_id: 40 }
45597 Sep 22 23:22:23.211 INFO [1] received reconcile message
45598 Sep 22 23:22:23.211 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(465), op: ExtentClose { repair_id: ReconciliationId(465), extent_id: 40 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45599 Sep 22 23:22:23.211 INFO [1] client ExtentClose { repair_id: ReconciliationId(465), extent_id: 40 }
45600 Sep 22 23:22:23.211 INFO [2] received reconcile message
45601 Sep 22 23:22:23.211 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(465), op: ExtentClose { repair_id: ReconciliationId(465), extent_id: 40 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45602 Sep 22 23:22:23.211 INFO [2] client ExtentClose { repair_id: ReconciliationId(465), extent_id: 40 }
45603 Sep 22 23:22:23.212 DEBG 465 Close extent 40
45604 Sep 22 23:22:23.212 DEBG 465 Close extent 40
45605 Sep 22 23:22:23.212 DEBG 465 Close extent 40
45606 Sep 22 23:22:23.212 DEBG [2] It's time to notify for 465
45607 Sep 22 23:22:23.213 INFO Completion from [2] id:465 status:true
45608 Sep 22 23:22:23.213 INFO [466/752] Repair commands completed
45609 Sep 22 23:22:23.213 INFO Pop front: ReconcileIO { id: ReconciliationId(466), op: ExtentRepair { repair_id: ReconciliationId(466), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45610 Sep 22 23:22:23.213 INFO Sent repair work, now wait for resp
45611 Sep 22 23:22:23.213 INFO [0] received reconcile message
45612 Sep 22 23:22:23.213 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(466), op: ExtentRepair { repair_id: ReconciliationId(466), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45613 Sep 22 23:22:23.213 INFO [0] client ExtentRepair { repair_id: ReconciliationId(466), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45614 Sep 22 23:22:23.213 INFO [0] Sending repair request ReconciliationId(466)
45615 Sep 22 23:22:23.213 INFO [1] received reconcile message
45616 Sep 22 23:22:23.213 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(466), op: ExtentRepair { repair_id: ReconciliationId(466), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45617 Sep 22 23:22:23.213 INFO [1] client ExtentRepair { repair_id: ReconciliationId(466), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45618 Sep 22 23:22:23.213 INFO [1] No action required ReconciliationId(466)
45619 Sep 22 23:22:23.213 INFO [2] received reconcile message
45620 Sep 22 23:22:23.213 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(466), op: ExtentRepair { repair_id: ReconciliationId(466), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45621 Sep 22 23:22:23.213 INFO [2] client ExtentRepair { repair_id: ReconciliationId(466), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45622 Sep 22 23:22:23.213 INFO [2] No action required ReconciliationId(466)
45623 Sep 22 23:22:23.213 DEBG 466 Repair extent 40 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
45624 Sep 22 23:22:23.213 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/028.copy"
45625 Sep 22 23:22:23.277 INFO accepted connection, remote_addr: 127.0.0.1:49761, local_addr: 127.0.0.1:52864, task: repair
45626 Sep 22 23:22:23.277 TRCE incoming request, uri: /extent/40/files, method: GET, req_id: 358fc8b2-0693-49b6-9aff-b0a4aa32da26, remote_addr: 127.0.0.1:49761, local_addr: 127.0.0.1:52864, task: repair
45627 Sep 22 23:22:23.277 INFO request completed, latency_us: 278, response_code: 200, uri: /extent/40/files, method: GET, req_id: 358fc8b2-0693-49b6-9aff-b0a4aa32da26, remote_addr: 127.0.0.1:49761, local_addr: 127.0.0.1:52864, task: repair
45628 Sep 22 23:22:23.278 INFO eid:40 Found repair files: ["028", "028.db"]
45629 Sep 22 23:22:23.278 TRCE incoming request, uri: /newextent/40/data, method: GET, req_id: 0cc30ce9-9b8e-406b-b294-8db10eb7a615, remote_addr: 127.0.0.1:49761, local_addr: 127.0.0.1:52864, task: repair
45630 Sep 22 23:22:23.278 INFO request completed, latency_us: 371, response_code: 200, uri: /newextent/40/data, method: GET, req_id: 0cc30ce9-9b8e-406b-b294-8db10eb7a615, remote_addr: 127.0.0.1:49761, local_addr: 127.0.0.1:52864, task: repair
45631 Sep 22 23:22:23.283 TRCE incoming request, uri: /newextent/40/db, method: GET, req_id: c043eba6-ae14-46b1-a1e8-c0427fd8903f, remote_addr: 127.0.0.1:49761, local_addr: 127.0.0.1:52864, task: repair
45632 Sep 22 23:22:23.283 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/40/db, method: GET, req_id: c043eba6-ae14-46b1-a1e8-c0427fd8903f, remote_addr: 127.0.0.1:49761, local_addr: 127.0.0.1:52864, task: repair
45633 Sep 22 23:22:23.284 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/028.copy" to "/tmp/downstairs-zrMnlo6G/00/000/028.replace"
45634 Sep 22 23:22:23.284 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45635 Sep 22 23:22:23.286 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/028.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
45636 Sep 22 23:22:23.286 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/028"
45637 Sep 22 23:22:23.286 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/028.db"
45638 Sep 22 23:22:23.286 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45639 Sep 22 23:22:23.286 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/028.replace" to "/tmp/downstairs-zrMnlo6G/00/000/028.completed"
45640 Sep 22 23:22:23.286 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45641 Sep 22 23:22:23.286 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45642 Sep 22 23:22:23.286 DEBG [0] It's time to notify for 466
45643 Sep 22 23:22:23.286 INFO Completion from [0] id:466 status:true
45644 Sep 22 23:22:23.286 INFO [467/752] Repair commands completed
45645 Sep 22 23:22:23.286 INFO Pop front: ReconcileIO { id: ReconciliationId(467), op: ExtentReopen { repair_id: ReconciliationId(467), extent_id: 40 }, state: ClientData([New, New, New]) }
45646 Sep 22 23:22:23.287 INFO Sent repair work, now wait for resp
45647 Sep 22 23:22:23.287 INFO [0] received reconcile message
45648 Sep 22 23:22:23.287 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(467), op: ExtentReopen { repair_id: ReconciliationId(467), extent_id: 40 }, state: ClientData([InProgress, New, New]) }, : downstairs
45649 Sep 22 23:22:23.287 INFO [0] client ExtentReopen { repair_id: ReconciliationId(467), extent_id: 40 }
45650 Sep 22 23:22:23.287 INFO [1] received reconcile message
45651 Sep 22 23:22:23.287 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(467), op: ExtentReopen { repair_id: ReconciliationId(467), extent_id: 40 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45652 Sep 22 23:22:23.287 INFO [1] client ExtentReopen { repair_id: ReconciliationId(467), extent_id: 40 }
45653 Sep 22 23:22:23.287 INFO [2] received reconcile message
45654 Sep 22 23:22:23.287 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(467), op: ExtentReopen { repair_id: ReconciliationId(467), extent_id: 40 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45655 Sep 22 23:22:23.287 INFO [2] client ExtentReopen { repair_id: ReconciliationId(467), extent_id: 40 }
45656 Sep 22 23:22:23.287 DEBG 467 Reopen extent 40
45657 Sep 22 23:22:23.288 DEBG 467 Reopen extent 40
45658 Sep 22 23:22:23.288 DEBG 467 Reopen extent 40
45659 Sep 22 23:22:23.289 DEBG [2] It's time to notify for 467
45660 Sep 22 23:22:23.289 INFO Completion from [2] id:467 status:true
45661 Sep 22 23:22:23.289 INFO [468/752] Repair commands completed
45662 Sep 22 23:22:23.289 INFO Pop front: ReconcileIO { id: ReconciliationId(468), op: ExtentFlush { repair_id: ReconciliationId(468), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45663 Sep 22 23:22:23.289 INFO Sent repair work, now wait for resp
45664 Sep 22 23:22:23.289 INFO [0] received reconcile message
45665 Sep 22 23:22:23.289 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(468), op: ExtentFlush { repair_id: ReconciliationId(468), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45666 Sep 22 23:22:23.289 INFO [0] client ExtentFlush { repair_id: ReconciliationId(468), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45667 Sep 22 23:22:23.289 INFO [1] received reconcile message
45668 Sep 22 23:22:23.289 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(468), op: ExtentFlush { repair_id: ReconciliationId(468), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45669 Sep 22 23:22:23.289 INFO [1] client ExtentFlush { repair_id: ReconciliationId(468), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45670 Sep 22 23:22:23.289 INFO [2] received reconcile message
45671 Sep 22 23:22:23.289 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(468), op: ExtentFlush { repair_id: ReconciliationId(468), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45672 Sep 22 23:22:23.289 INFO [2] client ExtentFlush { repair_id: ReconciliationId(468), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45673 Sep 22 23:22:23.289 DEBG 468 Flush extent 80 with f:2 g:2
45674 Sep 22 23:22:23.289 DEBG Flush just extent 80 with f:2 and g:2
45675 Sep 22 23:22:23.289 DEBG [1] It's time to notify for 468
45676 Sep 22 23:22:23.290 INFO Completion from [1] id:468 status:true
45677 Sep 22 23:22:23.290 INFO [469/752] Repair commands completed
45678 Sep 22 23:22:23.290 INFO Pop front: ReconcileIO { id: ReconciliationId(469), op: ExtentClose { repair_id: ReconciliationId(469), extent_id: 80 }, state: ClientData([New, New, New]) }
45679 Sep 22 23:22:23.290 INFO Sent repair work, now wait for resp
45680 Sep 22 23:22:23.290 INFO [0] received reconcile message
45681 Sep 22 23:22:23.290 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(469), op: ExtentClose { repair_id: ReconciliationId(469), extent_id: 80 }, state: ClientData([InProgress, New, New]) }, : downstairs
45682 Sep 22 23:22:23.290 INFO [0] client ExtentClose { repair_id: ReconciliationId(469), extent_id: 80 }
45683 Sep 22 23:22:23.290 INFO [1] received reconcile message
45684 Sep 22 23:22:23.290 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(469), op: ExtentClose { repair_id: ReconciliationId(469), extent_id: 80 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45685 Sep 22 23:22:23.290 INFO [1] client ExtentClose { repair_id: ReconciliationId(469), extent_id: 80 }
45686 Sep 22 23:22:23.290 INFO [2] received reconcile message
45687 Sep 22 23:22:23.290 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(469), op: ExtentClose { repair_id: ReconciliationId(469), extent_id: 80 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45688 Sep 22 23:22:23.290 INFO [2] client ExtentClose { repair_id: ReconciliationId(469), extent_id: 80 }
45689 Sep 22 23:22:23.290 DEBG 469 Close extent 80
45690 Sep 22 23:22:23.290 DEBG 469 Close extent 80
45691 Sep 22 23:22:23.291 DEBG 469 Close extent 80
45692 Sep 22 23:22:23.291 DEBG [2] It's time to notify for 469
45693 Sep 22 23:22:23.291 INFO Completion from [2] id:469 status:true
45694 Sep 22 23:22:23.291 INFO [470/752] Repair commands completed
45695 Sep 22 23:22:23.291 INFO Pop front: ReconcileIO { id: ReconciliationId(470), op: ExtentRepair { repair_id: ReconciliationId(470), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45696 Sep 22 23:22:23.291 INFO Sent repair work, now wait for resp
45697 Sep 22 23:22:23.291 INFO [0] received reconcile message
45698 Sep 22 23:22:23.291 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(470), op: ExtentRepair { repair_id: ReconciliationId(470), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45699 Sep 22 23:22:23.291 INFO [0] client ExtentRepair { repair_id: ReconciliationId(470), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45700 Sep 22 23:22:23.291 INFO [0] Sending repair request ReconciliationId(470)
45701 Sep 22 23:22:23.291 INFO [1] received reconcile message
45702 Sep 22 23:22:23.291 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(470), op: ExtentRepair { repair_id: ReconciliationId(470), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45703 Sep 22 23:22:23.291 INFO [1] client ExtentRepair { repair_id: ReconciliationId(470), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45704 Sep 22 23:22:23.291 INFO [1] No action required ReconciliationId(470)
45705 Sep 22 23:22:23.291 INFO [2] received reconcile message
45706 Sep 22 23:22:23.291 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(470), op: ExtentRepair { repair_id: ReconciliationId(470), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45707 Sep 22 23:22:23.291 INFO [2] client ExtentRepair { repair_id: ReconciliationId(470), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45708 Sep 22 23:22:23.291 INFO [2] No action required ReconciliationId(470)
45709 Sep 22 23:22:23.291 DEBG 470 Repair extent 80 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
45710 Sep 22 23:22:23.292 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/050.copy"
45711 Sep 22 23:22:23.357 INFO accepted connection, remote_addr: 127.0.0.1:49808, local_addr: 127.0.0.1:52864, task: repair
45712 Sep 22 23:22:23.357 TRCE incoming request, uri: /extent/80/files, method: GET, req_id: ebfc2499-6f2b-4b77-a197-c6d778e3038a, remote_addr: 127.0.0.1:49808, local_addr: 127.0.0.1:52864, task: repair
45713 Sep 22 23:22:23.358 INFO request completed, latency_us: 227, response_code: 200, uri: /extent/80/files, method: GET, req_id: ebfc2499-6f2b-4b77-a197-c6d778e3038a, remote_addr: 127.0.0.1:49808, local_addr: 127.0.0.1:52864, task: repair
45714 Sep 22 23:22:23.358 INFO eid:80 Found repair files: ["050", "050.db"]
45715 Sep 22 23:22:23.358 TRCE incoming request, uri: /newextent/80/data, method: GET, req_id: 637edeab-0f07-4dc4-82f1-cae16a5a5a4d, remote_addr: 127.0.0.1:49808, local_addr: 127.0.0.1:52864, task: repair
45716 Sep 22 23:22:23.359 INFO request completed, latency_us: 319, response_code: 200, uri: /newextent/80/data, method: GET, req_id: 637edeab-0f07-4dc4-82f1-cae16a5a5a4d, remote_addr: 127.0.0.1:49808, local_addr: 127.0.0.1:52864, task: repair
45717 Sep 22 23:22:23.363 TRCE incoming request, uri: /newextent/80/db, method: GET, req_id: 31e856b4-e01b-4b04-9c15-6bc8c746bc20, remote_addr: 127.0.0.1:49808, local_addr: 127.0.0.1:52864, task: repair
45718 Sep 22 23:22:23.364 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/80/db, method: GET, req_id: 31e856b4-e01b-4b04-9c15-6bc8c746bc20, remote_addr: 127.0.0.1:49808, local_addr: 127.0.0.1:52864, task: repair
45719 Sep 22 23:22:23.365 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/050.copy" to "/tmp/downstairs-zrMnlo6G/00/000/050.replace"
45720 Sep 22 23:22:23.365 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45721 Sep 22 23:22:23.366 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/050.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
45722 Sep 22 23:22:23.366 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/050"
45723 Sep 22 23:22:23.366 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/050.db"
45724 Sep 22 23:22:23.366 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45725 Sep 22 23:22:23.366 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/050.replace" to "/tmp/downstairs-zrMnlo6G/00/000/050.completed"
45726 Sep 22 23:22:23.366 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45727 Sep 22 23:22:23.366 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45728 Sep 22 23:22:23.366 DEBG [0] It's time to notify for 470
45729 Sep 22 23:22:23.366 INFO Completion from [0] id:470 status:true
45730 Sep 22 23:22:23.366 INFO [471/752] Repair commands completed
45731 Sep 22 23:22:23.367 INFO Pop front: ReconcileIO { id: ReconciliationId(471), op: ExtentReopen { repair_id: ReconciliationId(471), extent_id: 80 }, state: ClientData([New, New, New]) }
45732 Sep 22 23:22:23.367 INFO Sent repair work, now wait for resp
45733 Sep 22 23:22:23.367 INFO [0] received reconcile message
45734 Sep 22 23:22:23.367 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(471), op: ExtentReopen { repair_id: ReconciliationId(471), extent_id: 80 }, state: ClientData([InProgress, New, New]) }, : downstairs
45735 Sep 22 23:22:23.367 INFO [0] client ExtentReopen { repair_id: ReconciliationId(471), extent_id: 80 }
45736 Sep 22 23:22:23.367 INFO [1] received reconcile message
45737 Sep 22 23:22:23.367 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(471), op: ExtentReopen { repair_id: ReconciliationId(471), extent_id: 80 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45738 Sep 22 23:22:23.367 INFO [1] client ExtentReopen { repair_id: ReconciliationId(471), extent_id: 80 }
45739 Sep 22 23:22:23.367 INFO [2] received reconcile message
45740 Sep 22 23:22:23.367 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(471), op: ExtentReopen { repair_id: ReconciliationId(471), extent_id: 80 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45741 Sep 22 23:22:23.367 INFO [2] client ExtentReopen { repair_id: ReconciliationId(471), extent_id: 80 }
45742 Sep 22 23:22:23.367 DEBG 471 Reopen extent 80
45743 Sep 22 23:22:23.368 DEBG 471 Reopen extent 80
45744 Sep 22 23:22:23.368 DEBG 471 Reopen extent 80
45745 Sep 22 23:22:23.369 DEBG [2] It's time to notify for 471
45746 Sep 22 23:22:23.369 INFO Completion from [2] id:471 status:true
45747 Sep 22 23:22:23.369 INFO [472/752] Repair commands completed
45748 Sep 22 23:22:23.369 INFO Pop front: ReconcileIO { id: ReconciliationId(472), op: ExtentFlush { repair_id: ReconciliationId(472), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45749 Sep 22 23:22:23.369 INFO Sent repair work, now wait for resp
45750 Sep 22 23:22:23.369 INFO [0] received reconcile message
45751 Sep 22 23:22:23.369 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(472), op: ExtentFlush { repair_id: ReconciliationId(472), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45752 Sep 22 23:22:23.369 INFO [0] client ExtentFlush { repair_id: ReconciliationId(472), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45753 Sep 22 23:22:23.369 INFO [1] received reconcile message
45754 Sep 22 23:22:23.369 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(472), op: ExtentFlush { repair_id: ReconciliationId(472), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45755 Sep 22 23:22:23.369 INFO [1] client ExtentFlush { repair_id: ReconciliationId(472), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45756 Sep 22 23:22:23.369 INFO [2] received reconcile message
45757 Sep 22 23:22:23.369 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(472), op: ExtentFlush { repair_id: ReconciliationId(472), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45758 Sep 22 23:22:23.369 INFO [2] client ExtentFlush { repair_id: ReconciliationId(472), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45759 Sep 22 23:22:23.369 DEBG 472 Flush extent 149 with f:2 g:2
45760 Sep 22 23:22:23.369 DEBG Flush just extent 149 with f:2 and g:2
45761 Sep 22 23:22:23.369 DEBG [1] It's time to notify for 472
45762 Sep 22 23:22:23.369 INFO Completion from [1] id:472 status:true
45763 Sep 22 23:22:23.369 INFO [473/752] Repair commands completed
45764 Sep 22 23:22:23.369 INFO Pop front: ReconcileIO { id: ReconciliationId(473), op: ExtentClose { repair_id: ReconciliationId(473), extent_id: 149 }, state: ClientData([New, New, New]) }
45765 Sep 22 23:22:23.369 INFO Sent repair work, now wait for resp
45766 Sep 22 23:22:23.369 INFO [0] received reconcile message
45767 Sep 22 23:22:23.370 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(473), op: ExtentClose { repair_id: ReconciliationId(473), extent_id: 149 }, state: ClientData([InProgress, New, New]) }, : downstairs
45768 Sep 22 23:22:23.370 INFO [0] client ExtentClose { repair_id: ReconciliationId(473), extent_id: 149 }
45769 Sep 22 23:22:23.370 INFO [1] received reconcile message
45770 Sep 22 23:22:23.370 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(473), op: ExtentClose { repair_id: ReconciliationId(473), extent_id: 149 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45771 Sep 22 23:22:23.370 INFO [1] client ExtentClose { repair_id: ReconciliationId(473), extent_id: 149 }
45772 Sep 22 23:22:23.370 INFO [2] received reconcile message
45773 Sep 22 23:22:23.370 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(473), op: ExtentClose { repair_id: ReconciliationId(473), extent_id: 149 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45774 Sep 22 23:22:23.370 INFO [2] client ExtentClose { repair_id: ReconciliationId(473), extent_id: 149 }
45775 Sep 22 23:22:23.370 DEBG 473 Close extent 149
45776 Sep 22 23:22:23.370 DEBG 473 Close extent 149
45777 Sep 22 23:22:23.370 DEBG 473 Close extent 149
45778 Sep 22 23:22:23.371 DEBG [2] It's time to notify for 473
45779 Sep 22 23:22:23.371 INFO Completion from [2] id:473 status:true
45780 Sep 22 23:22:23.371 INFO [474/752] Repair commands completed
45781 Sep 22 23:22:23.371 INFO Pop front: ReconcileIO { id: ReconciliationId(474), op: ExtentRepair { repair_id: ReconciliationId(474), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45782 Sep 22 23:22:23.371 INFO Sent repair work, now wait for resp
45783 Sep 22 23:22:23.371 INFO [0] received reconcile message
45784 Sep 22 23:22:23.371 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(474), op: ExtentRepair { repair_id: ReconciliationId(474), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45785 Sep 22 23:22:23.371 INFO [0] client ExtentRepair { repair_id: ReconciliationId(474), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45786 Sep 22 23:22:23.371 INFO [0] Sending repair request ReconciliationId(474)
45787 Sep 22 23:22:23.371 INFO [1] received reconcile message
45788 Sep 22 23:22:23.371 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(474), op: ExtentRepair { repair_id: ReconciliationId(474), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45789 Sep 22 23:22:23.371 INFO [1] client ExtentRepair { repair_id: ReconciliationId(474), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45790 Sep 22 23:22:23.371 INFO [1] No action required ReconciliationId(474)
45791 Sep 22 23:22:23.371 INFO [2] received reconcile message
45792 Sep 22 23:22:23.371 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(474), op: ExtentRepair { repair_id: ReconciliationId(474), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45793 Sep 22 23:22:23.371 INFO [2] client ExtentRepair { repair_id: ReconciliationId(474), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45794 Sep 22 23:22:23.371 INFO [2] No action required ReconciliationId(474)
45795 Sep 22 23:22:23.371 DEBG 474 Repair extent 149 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
45796 Sep 22 23:22:23.371 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/095.copy"
45797 Sep 22 23:22:23.436 INFO accepted connection, remote_addr: 127.0.0.1:59607, local_addr: 127.0.0.1:52864, task: repair
45798 Sep 22 23:22:23.437 TRCE incoming request, uri: /extent/149/files, method: GET, req_id: da31bb3f-0a5d-4149-96fa-a915d6851d35, remote_addr: 127.0.0.1:59607, local_addr: 127.0.0.1:52864, task: repair
45799 Sep 22 23:22:23.437 INFO request completed, latency_us: 200, response_code: 200, uri: /extent/149/files, method: GET, req_id: da31bb3f-0a5d-4149-96fa-a915d6851d35, remote_addr: 127.0.0.1:59607, local_addr: 127.0.0.1:52864, task: repair
45800 Sep 22 23:22:23.437 INFO eid:149 Found repair files: ["095", "095.db"]
45801 Sep 22 23:22:23.437 TRCE incoming request, uri: /newextent/149/data, method: GET, req_id: 1e084aaa-5e86-47f7-9fb6-cd9c3a5756aa, remote_addr: 127.0.0.1:59607, local_addr: 127.0.0.1:52864, task: repair
45802 Sep 22 23:22:23.438 INFO request completed, latency_us: 302, response_code: 200, uri: /newextent/149/data, method: GET, req_id: 1e084aaa-5e86-47f7-9fb6-cd9c3a5756aa, remote_addr: 127.0.0.1:59607, local_addr: 127.0.0.1:52864, task: repair
45803 Sep 22 23:22:23.442 ERRO [1] job id 1086 saw error GenericError("test error")
45804 Sep 22 23:22:23.442 DEBG IO Flush 1088 has deps [JobId(1087), JobId(1086)]
45805 Sep 22 23:22:23.442 DEBG Flush :1086 extent_limit None deps:[JobId(1085), JobId(1084)] res:true f:32 g:1
45806 Sep 22 23:22:23.442 TRCE incoming request, uri: /newextent/149/db, method: GET, req_id: 66eb705b-2e9b-4255-b889-671dd4a5c36b, remote_addr: 127.0.0.1:59607, local_addr: 127.0.0.1:52864, task: repair
45807 Sep 22 23:22:23.443 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/149/db, method: GET, req_id: 66eb705b-2e9b-4255-b889-671dd4a5c36b, remote_addr: 127.0.0.1:59607, local_addr: 127.0.0.1:52864, task: repair
45808 Sep 22 23:22:23.444 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/095.copy" to "/tmp/downstairs-zrMnlo6G/00/000/095.replace"
45809 Sep 22 23:22:23.444 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45810 Sep 22 23:22:23.445 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/095.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
45811 Sep 22 23:22:23.445 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/095"
45812 Sep 22 23:22:23.445 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/095.db"
45813 Sep 22 23:22:23.445 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45814 Sep 22 23:22:23.445 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/095.replace" to "/tmp/downstairs-zrMnlo6G/00/000/095.completed"
45815 Sep 22 23:22:23.445 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45816 Sep 22 23:22:23.445 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45817 Sep 22 23:22:23.445 DEBG [0] It's time to notify for 474
45818 Sep 22 23:22:23.445 INFO Completion from [0] id:474 status:true
45819 Sep 22 23:22:23.445 INFO [475/752] Repair commands completed
45820 Sep 22 23:22:23.445 INFO Pop front: ReconcileIO { id: ReconciliationId(475), op: ExtentReopen { repair_id: ReconciliationId(475), extent_id: 149 }, state: ClientData([New, New, New]) }
45821 Sep 22 23:22:23.445 INFO Sent repair work, now wait for resp
45822 Sep 22 23:22:23.445 INFO [0] received reconcile message
45823 Sep 22 23:22:23.445 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(475), op: ExtentReopen { repair_id: ReconciliationId(475), extent_id: 149 }, state: ClientData([InProgress, New, New]) }, : downstairs
45824 Sep 22 23:22:23.445 INFO [0] client ExtentReopen { repair_id: ReconciliationId(475), extent_id: 149 }
45825 Sep 22 23:22:23.446 INFO [1] received reconcile message
45826 Sep 22 23:22:23.446 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(475), op: ExtentReopen { repair_id: ReconciliationId(475), extent_id: 149 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45827 Sep 22 23:22:23.446 INFO [1] client ExtentReopen { repair_id: ReconciliationId(475), extent_id: 149 }
45828 Sep 22 23:22:23.446 INFO [2] received reconcile message
45829 Sep 22 23:22:23.446 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(475), op: ExtentReopen { repair_id: ReconciliationId(475), extent_id: 149 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45830 Sep 22 23:22:23.446 INFO [2] client ExtentReopen { repair_id: ReconciliationId(475), extent_id: 149 }
45831 Sep 22 23:22:23.446 DEBG 475 Reopen extent 149
45832 Sep 22 23:22:23.446 DEBG 475 Reopen extent 149
45833 Sep 22 23:22:23.447 DEBG 475 Reopen extent 149
45834 Sep 22 23:22:23.447 DEBG [2] It's time to notify for 475
45835 Sep 22 23:22:23.448 INFO Completion from [2] id:475 status:true
45836 Sep 22 23:22:23.448 INFO [476/752] Repair commands completed
45837 Sep 22 23:22:23.448 INFO Pop front: ReconcileIO { id: ReconciliationId(476), op: ExtentFlush { repair_id: ReconciliationId(476), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45838 Sep 22 23:22:23.448 INFO Sent repair work, now wait for resp
45839 Sep 22 23:22:23.448 INFO [0] received reconcile message
45840 Sep 22 23:22:23.448 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(476), op: ExtentFlush { repair_id: ReconciliationId(476), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45841 Sep 22 23:22:23.448 INFO [0] client ExtentFlush { repair_id: ReconciliationId(476), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45842 Sep 22 23:22:23.448 INFO [1] received reconcile message
45843 Sep 22 23:22:23.448 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(476), op: ExtentFlush { repair_id: ReconciliationId(476), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45844 Sep 22 23:22:23.448 DEBG Read :1087 deps:[JobId(1086)] res:true
45845 Sep 22 23:22:23.448 INFO [1] client ExtentFlush { repair_id: ReconciliationId(476), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45846 Sep 22 23:22:23.448 INFO [2] received reconcile message
45847 Sep 22 23:22:23.448 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(476), op: ExtentFlush { repair_id: ReconciliationId(476), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45848 Sep 22 23:22:23.448 INFO [2] client ExtentFlush { repair_id: ReconciliationId(476), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45849 Sep 22 23:22:23.448 DEBG 476 Flush extent 161 with f:2 g:2
45850 Sep 22 23:22:23.448 DEBG Flush just extent 161 with f:2 and g:2
45851 Sep 22 23:22:23.448 DEBG [1] It's time to notify for 476
45852 Sep 22 23:22:23.448 INFO Completion from [1] id:476 status:true
45853 Sep 22 23:22:23.448 INFO [477/752] Repair commands completed
45854 Sep 22 23:22:23.448 INFO Pop front: ReconcileIO { id: ReconciliationId(477), op: ExtentClose { repair_id: ReconciliationId(477), extent_id: 161 }, state: ClientData([New, New, New]) }
45855 Sep 22 23:22:23.448 INFO Sent repair work, now wait for resp
45856 Sep 22 23:22:23.448 INFO [0] received reconcile message
45857 Sep 22 23:22:23.448 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(477), op: ExtentClose { repair_id: ReconciliationId(477), extent_id: 161 }, state: ClientData([InProgress, New, New]) }, : downstairs
45858 Sep 22 23:22:23.448 INFO [0] client ExtentClose { repair_id: ReconciliationId(477), extent_id: 161 }
45859 Sep 22 23:22:23.448 INFO [1] received reconcile message
45860 Sep 22 23:22:23.448 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(477), op: ExtentClose { repair_id: ReconciliationId(477), extent_id: 161 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45861 Sep 22 23:22:23.448 INFO [1] client ExtentClose { repair_id: ReconciliationId(477), extent_id: 161 }
45862 Sep 22 23:22:23.448 INFO [2] received reconcile message
45863 Sep 22 23:22:23.448 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(477), op: ExtentClose { repair_id: ReconciliationId(477), extent_id: 161 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45864 Sep 22 23:22:23.448 INFO [2] client ExtentClose { repair_id: ReconciliationId(477), extent_id: 161 }
45865 Sep 22 23:22:23.449 DEBG 477 Close extent 161
45866 Sep 22 23:22:23.449 DEBG 477 Close extent 161
45867 Sep 22 23:22:23.449 DEBG 477 Close extent 161
45868 Sep 22 23:22:23.449 DEBG [2] It's time to notify for 477
45869 Sep 22 23:22:23.450 INFO Completion from [2] id:477 status:true
45870 Sep 22 23:22:23.450 INFO [478/752] Repair commands completed
45871 Sep 22 23:22:23.450 INFO Pop front: ReconcileIO { id: ReconciliationId(478), op: ExtentRepair { repair_id: ReconciliationId(478), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45872 Sep 22 23:22:23.450 INFO Sent repair work, now wait for resp
45873 Sep 22 23:22:23.450 INFO [0] received reconcile message
45874 Sep 22 23:22:23.450 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(478), op: ExtentRepair { repair_id: ReconciliationId(478), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45875 Sep 22 23:22:23.450 INFO [0] client ExtentRepair { repair_id: ReconciliationId(478), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45876 Sep 22 23:22:23.450 INFO [0] Sending repair request ReconciliationId(478)
45877 Sep 22 23:22:23.450 INFO [1] received reconcile message
45878 Sep 22 23:22:23.450 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(478), op: ExtentRepair { repair_id: ReconciliationId(478), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45879 Sep 22 23:22:23.450 INFO [1] client ExtentRepair { repair_id: ReconciliationId(478), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45880 Sep 22 23:22:23.450 INFO [1] No action required ReconciliationId(478)
45881 Sep 22 23:22:23.450 INFO [2] received reconcile message
45882 Sep 22 23:22:23.450 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(478), op: ExtentRepair { repair_id: ReconciliationId(478), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45883 Sep 22 23:22:23.450 INFO [2] client ExtentRepair { repair_id: ReconciliationId(478), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45884 Sep 22 23:22:23.450 INFO [2] No action required ReconciliationId(478)
45885 Sep 22 23:22:23.450 DEBG 478 Repair extent 161 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
45886 Sep 22 23:22:23.450 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0A1.copy"
45887 Sep 22 23:22:23.470 DEBG up_ds_listen was notified
45888 Sep 22 23:22:23.470 DEBG up_ds_listen process 1086
45889 Sep 22 23:22:23.470 DEBG [A] ack job 1086:87, : downstairs
45890 Sep 22 23:22:23.470 DEBG up_ds_listen checked 1 jobs, back to waiting
45891 Sep 22 23:22:23.472 WARN returning error on flush!
45892 Sep 22 23:22:23.472 DEBG Flush :1088 extent_limit None deps:[JobId(1087), JobId(1086)] res:false f:33 g:1
45893 Sep 22 23:22:23.472 INFO [lossy] skipping 1088
45894 Sep 22 23:22:23.472 INFO [lossy] skipping 1088
45895 Sep 22 23:22:23.472 DEBG Flush :1088 extent_limit None deps:[JobId(1087), JobId(1086)] res:true f:33 g:1
45896 Sep 22 23:22:23.472 INFO [lossy] sleeping 1 second
45897 Sep 22 23:22:23.515 INFO accepted connection, remote_addr: 127.0.0.1:47981, local_addr: 127.0.0.1:52864, task: repair
45898 Sep 22 23:22:23.515 TRCE incoming request, uri: /extent/161/files, method: GET, req_id: 5fbaf268-0432-48df-bfeb-03e97c4425a2, remote_addr: 127.0.0.1:47981, local_addr: 127.0.0.1:52864, task: repair
45899 Sep 22 23:22:23.515 INFO request completed, latency_us: 212, response_code: 200, uri: /extent/161/files, method: GET, req_id: 5fbaf268-0432-48df-bfeb-03e97c4425a2, remote_addr: 127.0.0.1:47981, local_addr: 127.0.0.1:52864, task: repair
45900 Sep 22 23:22:23.516 INFO eid:161 Found repair files: ["0A1", "0A1.db"]
45901 Sep 22 23:22:23.516 TRCE incoming request, uri: /newextent/161/data, method: GET, req_id: 20aee4b2-a557-4301-8c8c-adbedcea927b, remote_addr: 127.0.0.1:47981, local_addr: 127.0.0.1:52864, task: repair
45902 Sep 22 23:22:23.516 INFO request completed, latency_us: 330, response_code: 200, uri: /newextent/161/data, method: GET, req_id: 20aee4b2-a557-4301-8c8c-adbedcea927b, remote_addr: 127.0.0.1:47981, local_addr: 127.0.0.1:52864, task: repair
45903 Sep 22 23:22:23.521 TRCE incoming request, uri: /newextent/161/db, method: GET, req_id: 835d9d23-105d-494a-b302-a967e4efb1f1, remote_addr: 127.0.0.1:47981, local_addr: 127.0.0.1:52864, task: repair
45904 Sep 22 23:22:23.521 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/161/db, method: GET, req_id: 835d9d23-105d-494a-b302-a967e4efb1f1, remote_addr: 127.0.0.1:47981, local_addr: 127.0.0.1:52864, task: repair
45905 Sep 22 23:22:23.523 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0A1.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0A1.replace"
45906 Sep 22 23:22:23.523 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45907 Sep 22 23:22:23.523 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0A1.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
45908 Sep 22 23:22:23.524 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A1"
45909 Sep 22 23:22:23.524 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A1.db"
45910 Sep 22 23:22:23.524 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45911 Sep 22 23:22:23.524 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0A1.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0A1.completed"
45912 Sep 22 23:22:23.524 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45913 Sep 22 23:22:23.524 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45914 Sep 22 23:22:23.524 DEBG [0] It's time to notify for 478
45915 Sep 22 23:22:23.524 INFO Completion from [0] id:478 status:true
45916 Sep 22 23:22:23.524 INFO [479/752] Repair commands completed
45917 Sep 22 23:22:23.524 INFO Pop front: ReconcileIO { id: ReconciliationId(479), op: ExtentReopen { repair_id: ReconciliationId(479), extent_id: 161 }, state: ClientData([New, New, New]) }
45918 Sep 22 23:22:23.524 INFO Sent repair work, now wait for resp
45919 Sep 22 23:22:23.524 INFO [0] received reconcile message
45920 Sep 22 23:22:23.524 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(479), op: ExtentReopen { repair_id: ReconciliationId(479), extent_id: 161 }, state: ClientData([InProgress, New, New]) }, : downstairs
45921 Sep 22 23:22:23.524 INFO [0] client ExtentReopen { repair_id: ReconciliationId(479), extent_id: 161 }
45922 Sep 22 23:22:23.524 INFO [1] received reconcile message
45923 Sep 22 23:22:23.524 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(479), op: ExtentReopen { repair_id: ReconciliationId(479), extent_id: 161 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45924 Sep 22 23:22:23.524 INFO [1] client ExtentReopen { repair_id: ReconciliationId(479), extent_id: 161 }
45925 Sep 22 23:22:23.525 INFO [2] received reconcile message
45926 Sep 22 23:22:23.525 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(479), op: ExtentReopen { repair_id: ReconciliationId(479), extent_id: 161 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45927 Sep 22 23:22:23.525 INFO [2] client ExtentReopen { repair_id: ReconciliationId(479), extent_id: 161 }
45928 Sep 22 23:22:23.525 DEBG 479 Reopen extent 161
45929 Sep 22 23:22:23.525 DEBG 479 Reopen extent 161
45930 Sep 22 23:22:23.526 DEBG 479 Reopen extent 161
45931 Sep 22 23:22:23.526 DEBG [2] It's time to notify for 479
45932 Sep 22 23:22:23.527 INFO Completion from [2] id:479 status:true
45933 Sep 22 23:22:23.527 INFO [480/752] Repair commands completed
45934 Sep 22 23:22:23.527 INFO Pop front: ReconcileIO { id: ReconciliationId(480), op: ExtentFlush { repair_id: ReconciliationId(480), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45935 Sep 22 23:22:23.527 INFO Sent repair work, now wait for resp
45936 Sep 22 23:22:23.527 INFO [0] received reconcile message
45937 Sep 22 23:22:23.527 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(480), op: ExtentFlush { repair_id: ReconciliationId(480), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45938 Sep 22 23:22:23.527 INFO [0] client ExtentFlush { repair_id: ReconciliationId(480), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45939 Sep 22 23:22:23.527 INFO [1] received reconcile message
45940 Sep 22 23:22:23.527 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(480), op: ExtentFlush { repair_id: ReconciliationId(480), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45941 Sep 22 23:22:23.527 INFO [1] client ExtentFlush { repair_id: ReconciliationId(480), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45942 Sep 22 23:22:23.527 INFO [2] received reconcile message
45943 Sep 22 23:22:23.527 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(480), op: ExtentFlush { repair_id: ReconciliationId(480), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45944 Sep 22 23:22:23.527 INFO [2] client ExtentFlush { repair_id: ReconciliationId(480), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45945 Sep 22 23:22:23.527 DEBG 480 Flush extent 71 with f:2 g:2
45946 Sep 22 23:22:23.527 DEBG Flush just extent 71 with f:2 and g:2
45947 Sep 22 23:22:23.527 DEBG [1] It's time to notify for 480
45948 Sep 22 23:22:23.527 INFO Completion from [1] id:480 status:true
45949 Sep 22 23:22:23.527 INFO [481/752] Repair commands completed
45950 Sep 22 23:22:23.527 INFO Pop front: ReconcileIO { id: ReconciliationId(481), op: ExtentClose { repair_id: ReconciliationId(481), extent_id: 71 }, state: ClientData([New, New, New]) }
45951 Sep 22 23:22:23.527 INFO Sent repair work, now wait for resp
45952 Sep 22 23:22:23.527 INFO [0] received reconcile message
45953 Sep 22 23:22:23.527 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(481), op: ExtentClose { repair_id: ReconciliationId(481), extent_id: 71 }, state: ClientData([InProgress, New, New]) }, : downstairs
45954 Sep 22 23:22:23.527 INFO [0] client ExtentClose { repair_id: ReconciliationId(481), extent_id: 71 }
45955 Sep 22 23:22:23.527 INFO [1] received reconcile message
45956 Sep 22 23:22:23.527 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(481), op: ExtentClose { repair_id: ReconciliationId(481), extent_id: 71 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45957 Sep 22 23:22:23.527 INFO [1] client ExtentClose { repair_id: ReconciliationId(481), extent_id: 71 }
45958 Sep 22 23:22:23.527 INFO [2] received reconcile message
45959 Sep 22 23:22:23.527 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(481), op: ExtentClose { repair_id: ReconciliationId(481), extent_id: 71 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45960 Sep 22 23:22:23.527 INFO [2] client ExtentClose { repair_id: ReconciliationId(481), extent_id: 71 }
45961 Sep 22 23:22:23.528 DEBG 481 Close extent 71
45962 Sep 22 23:22:23.528 DEBG 481 Close extent 71
45963 Sep 22 23:22:23.528 DEBG 481 Close extent 71
45964 Sep 22 23:22:23.529 DEBG [2] It's time to notify for 481
45965 Sep 22 23:22:23.529 INFO Completion from [2] id:481 status:true
45966 Sep 22 23:22:23.529 INFO [482/752] Repair commands completed
45967 Sep 22 23:22:23.529 INFO Pop front: ReconcileIO { id: ReconciliationId(482), op: ExtentRepair { repair_id: ReconciliationId(482), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45968 Sep 22 23:22:23.529 INFO Sent repair work, now wait for resp
45969 Sep 22 23:22:23.529 INFO [0] received reconcile message
45970 Sep 22 23:22:23.529 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(482), op: ExtentRepair { repair_id: ReconciliationId(482), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45971 Sep 22 23:22:23.529 INFO [0] client ExtentRepair { repair_id: ReconciliationId(482), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45972 Sep 22 23:22:23.529 INFO [0] Sending repair request ReconciliationId(482)
45973 Sep 22 23:22:23.529 INFO [1] received reconcile message
45974 Sep 22 23:22:23.529 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(482), op: ExtentRepair { repair_id: ReconciliationId(482), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45975 Sep 22 23:22:23.529 INFO [1] client ExtentRepair { repair_id: ReconciliationId(482), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45976 Sep 22 23:22:23.529 INFO [1] No action required ReconciliationId(482)
45977 Sep 22 23:22:23.529 INFO [2] received reconcile message
45978 Sep 22 23:22:23.529 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(482), op: ExtentRepair { repair_id: ReconciliationId(482), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45979 Sep 22 23:22:23.529 INFO [2] client ExtentRepair { repair_id: ReconciliationId(482), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
45980 Sep 22 23:22:23.529 INFO [2] No action required ReconciliationId(482)
45981 Sep 22 23:22:23.529 DEBG 482 Repair extent 71 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
45982 Sep 22 23:22:23.529 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/047.copy"
45983 Sep 22 23:22:23.594 INFO accepted connection, remote_addr: 127.0.0.1:44221, local_addr: 127.0.0.1:52864, task: repair
45984 Sep 22 23:22:23.595 TRCE incoming request, uri: /extent/71/files, method: GET, req_id: e1fd86a5-91eb-42eb-9ada-e4eb94f37b9e, remote_addr: 127.0.0.1:44221, local_addr: 127.0.0.1:52864, task: repair
45985 Sep 22 23:22:23.595 INFO request completed, latency_us: 199, response_code: 200, uri: /extent/71/files, method: GET, req_id: e1fd86a5-91eb-42eb-9ada-e4eb94f37b9e, remote_addr: 127.0.0.1:44221, local_addr: 127.0.0.1:52864, task: repair
45986 Sep 22 23:22:23.595 INFO eid:71 Found repair files: ["047", "047.db"]
45987 Sep 22 23:22:23.595 TRCE incoming request, uri: /newextent/71/data, method: GET, req_id: a912a885-45fd-4956-be87-f454ec33c455, remote_addr: 127.0.0.1:44221, local_addr: 127.0.0.1:52864, task: repair
45988 Sep 22 23:22:23.596 INFO request completed, latency_us: 305, response_code: 200, uri: /newextent/71/data, method: GET, req_id: a912a885-45fd-4956-be87-f454ec33c455, remote_addr: 127.0.0.1:44221, local_addr: 127.0.0.1:52864, task: repair
45989 Sep 22 23:22:23.600 TRCE incoming request, uri: /newextent/71/db, method: GET, req_id: 8c7a52fe-72cc-4b2d-a00f-15b4c0e7a547, remote_addr: 127.0.0.1:44221, local_addr: 127.0.0.1:52864, task: repair
45990 Sep 22 23:22:23.601 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/71/db, method: GET, req_id: 8c7a52fe-72cc-4b2d-a00f-15b4c0e7a547, remote_addr: 127.0.0.1:44221, local_addr: 127.0.0.1:52864, task: repair
45991 Sep 22 23:22:23.602 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/047.copy" to "/tmp/downstairs-zrMnlo6G/00/000/047.replace"
45992 Sep 22 23:22:23.602 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45993 Sep 22 23:22:23.603 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/047.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
45994 Sep 22 23:22:23.603 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/047"
45995 Sep 22 23:22:23.603 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/047.db"
45996 Sep 22 23:22:23.603 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45997 Sep 22 23:22:23.603 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/047.replace" to "/tmp/downstairs-zrMnlo6G/00/000/047.completed"
45998 Sep 22 23:22:23.603 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
45999 Sep 22 23:22:23.603 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46000 Sep 22 23:22:23.603 DEBG [0] It's time to notify for 482
46001 Sep 22 23:22:23.604 INFO Completion from [0] id:482 status:true
46002 Sep 22 23:22:23.604 INFO [483/752] Repair commands completed
46003 Sep 22 23:22:23.604 INFO Pop front: ReconcileIO { id: ReconciliationId(483), op: ExtentReopen { repair_id: ReconciliationId(483), extent_id: 71 }, state: ClientData([New, New, New]) }
46004 Sep 22 23:22:23.604 INFO Sent repair work, now wait for resp
46005 Sep 22 23:22:23.604 INFO [0] received reconcile message
46006 Sep 22 23:22:23.604 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(483), op: ExtentReopen { repair_id: ReconciliationId(483), extent_id: 71 }, state: ClientData([InProgress, New, New]) }, : downstairs
46007 Sep 22 23:22:23.604 INFO [0] client ExtentReopen { repair_id: ReconciliationId(483), extent_id: 71 }
46008 Sep 22 23:22:23.604 INFO [1] received reconcile message
46009 Sep 22 23:22:23.604 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(483), op: ExtentReopen { repair_id: ReconciliationId(483), extent_id: 71 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46010 Sep 22 23:22:23.604 INFO [1] client ExtentReopen { repair_id: ReconciliationId(483), extent_id: 71 }
46011 Sep 22 23:22:23.604 INFO [2] received reconcile message
46012 Sep 22 23:22:23.604 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(483), op: ExtentReopen { repair_id: ReconciliationId(483), extent_id: 71 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46013 Sep 22 23:22:23.604 INFO [2] client ExtentReopen { repair_id: ReconciliationId(483), extent_id: 71 }
46014 Sep 22 23:22:23.604 DEBG 483 Reopen extent 71
46015 Sep 22 23:22:23.605 DEBG 483 Reopen extent 71
46016 Sep 22 23:22:23.605 DEBG 483 Reopen extent 71
46017 Sep 22 23:22:23.606 DEBG [2] It's time to notify for 483
46018 Sep 22 23:22:23.606 INFO Completion from [2] id:483 status:true
46019 Sep 22 23:22:23.606 INFO [484/752] Repair commands completed
46020 Sep 22 23:22:23.606 INFO Pop front: ReconcileIO { id: ReconciliationId(484), op: ExtentFlush { repair_id: ReconciliationId(484), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46021 Sep 22 23:22:23.606 INFO Sent repair work, now wait for resp
46022 Sep 22 23:22:23.606 INFO [0] received reconcile message
46023 Sep 22 23:22:23.606 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(484), op: ExtentFlush { repair_id: ReconciliationId(484), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46024 Sep 22 23:22:23.606 INFO [0] client ExtentFlush { repair_id: ReconciliationId(484), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46025 Sep 22 23:22:23.606 INFO [1] received reconcile message
46026 Sep 22 23:22:23.606 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(484), op: ExtentFlush { repair_id: ReconciliationId(484), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46027 Sep 22 23:22:23.606 INFO [1] client ExtentFlush { repair_id: ReconciliationId(484), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46028 Sep 22 23:22:23.606 INFO [2] received reconcile message
46029 Sep 22 23:22:23.606 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(484), op: ExtentFlush { repair_id: ReconciliationId(484), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46030 Sep 22 23:22:23.606 INFO [2] client ExtentFlush { repair_id: ReconciliationId(484), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46031 Sep 22 23:22:23.606 DEBG 484 Flush extent 5 with f:2 g:2
46032 Sep 22 23:22:23.606 DEBG Flush just extent 5 with f:2 and g:2
46033 Sep 22 23:22:23.606 DEBG [1] It's time to notify for 484
46034 Sep 22 23:22:23.606 INFO Completion from [1] id:484 status:true
46035 Sep 22 23:22:23.606 INFO [485/752] Repair commands completed
46036 Sep 22 23:22:23.606 INFO Pop front: ReconcileIO { id: ReconciliationId(485), op: ExtentClose { repair_id: ReconciliationId(485), extent_id: 5 }, state: ClientData([New, New, New]) }
46037 Sep 22 23:22:23.606 INFO Sent repair work, now wait for resp
46038 Sep 22 23:22:23.606 INFO [0] received reconcile message
46039 Sep 22 23:22:23.606 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(485), op: ExtentClose { repair_id: ReconciliationId(485), extent_id: 5 }, state: ClientData([InProgress, New, New]) }, : downstairs
46040 Sep 22 23:22:23.606 INFO [0] client ExtentClose { repair_id: ReconciliationId(485), extent_id: 5 }
46041 Sep 22 23:22:23.606 INFO [1] received reconcile message
46042 Sep 22 23:22:23.606 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(485), op: ExtentClose { repair_id: ReconciliationId(485), extent_id: 5 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46043 Sep 22 23:22:23.606 INFO [1] client ExtentClose { repair_id: ReconciliationId(485), extent_id: 5 }
46044 Sep 22 23:22:23.606 INFO [2] received reconcile message
46045 Sep 22 23:22:23.607 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(485), op: ExtentClose { repair_id: ReconciliationId(485), extent_id: 5 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46046 Sep 22 23:22:23.607 INFO [2] client ExtentClose { repair_id: ReconciliationId(485), extent_id: 5 }
46047 Sep 22 23:22:23.607 DEBG 485 Close extent 5
46048 Sep 22 23:22:23.607 DEBG 485 Close extent 5
46049 Sep 22 23:22:23.607 DEBG 485 Close extent 5
46050 Sep 22 23:22:23.608 DEBG [2] It's time to notify for 485
46051 Sep 22 23:22:23.608 INFO Completion from [2] id:485 status:true
46052 Sep 22 23:22:23.608 INFO [486/752] Repair commands completed
46053 Sep 22 23:22:23.608 INFO Pop front: ReconcileIO { id: ReconciliationId(486), op: ExtentRepair { repair_id: ReconciliationId(486), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46054 Sep 22 23:22:23.608 INFO Sent repair work, now wait for resp
46055 Sep 22 23:22:23.608 INFO [0] received reconcile message
46056 Sep 22 23:22:23.608 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(486), op: ExtentRepair { repair_id: ReconciliationId(486), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46057 Sep 22 23:22:23.608 INFO [0] client ExtentRepair { repair_id: ReconciliationId(486), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46058 Sep 22 23:22:23.608 INFO [0] Sending repair request ReconciliationId(486)
46059 Sep 22 23:22:23.608 INFO [1] received reconcile message
46060 Sep 22 23:22:23.608 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(486), op: ExtentRepair { repair_id: ReconciliationId(486), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46061 Sep 22 23:22:23.608 INFO [1] client ExtentRepair { repair_id: ReconciliationId(486), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46062 Sep 22 23:22:23.608 INFO [1] No action required ReconciliationId(486)
46063 Sep 22 23:22:23.608 INFO [2] received reconcile message
46064 Sep 22 23:22:23.608 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(486), op: ExtentRepair { repair_id: ReconciliationId(486), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46065 Sep 22 23:22:23.608 INFO [2] client ExtentRepair { repair_id: ReconciliationId(486), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46066 Sep 22 23:22:23.608 INFO [2] No action required ReconciliationId(486)
46067 Sep 22 23:22:23.608 DEBG 486 Repair extent 5 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
46068 Sep 22 23:22:23.608 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/005.copy"
46069 Sep 22 23:22:23.672 INFO accepted connection, remote_addr: 127.0.0.1:33534, local_addr: 127.0.0.1:52864, task: repair
46070 Sep 22 23:22:23.672 TRCE incoming request, uri: /extent/5/files, method: GET, req_id: e05d4c00-66e0-4df3-b3b1-8d07125abd88, remote_addr: 127.0.0.1:33534, local_addr: 127.0.0.1:52864, task: repair
46071 Sep 22 23:22:23.672 INFO request completed, latency_us: 196, response_code: 200, uri: /extent/5/files, method: GET, req_id: e05d4c00-66e0-4df3-b3b1-8d07125abd88, remote_addr: 127.0.0.1:33534, local_addr: 127.0.0.1:52864, task: repair
46072 Sep 22 23:22:23.673 INFO eid:5 Found repair files: ["005", "005.db"]
46073 Sep 22 23:22:23.673 TRCE incoming request, uri: /newextent/5/data, method: GET, req_id: 45657933-a49f-416e-802c-bd038a986667, remote_addr: 127.0.0.1:33534, local_addr: 127.0.0.1:52864, task: repair
46074 Sep 22 23:22:23.673 INFO request completed, latency_us: 321, response_code: 200, uri: /newextent/5/data, method: GET, req_id: 45657933-a49f-416e-802c-bd038a986667, remote_addr: 127.0.0.1:33534, local_addr: 127.0.0.1:52864, task: repair
46075 Sep 22 23:22:23.678 TRCE incoming request, uri: /newextent/5/db, method: GET, req_id: 64fa2149-d80d-482e-abc7-cc6e1050d373, remote_addr: 127.0.0.1:33534, local_addr: 127.0.0.1:52864, task: repair
46076 Sep 22 23:22:23.679 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/5/db, method: GET, req_id: 64fa2149-d80d-482e-abc7-cc6e1050d373, remote_addr: 127.0.0.1:33534, local_addr: 127.0.0.1:52864, task: repair
46077 Sep 22 23:22:23.680 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/005.copy" to "/tmp/downstairs-zrMnlo6G/00/000/005.replace"
46078 Sep 22 23:22:23.680 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46079 Sep 22 23:22:23.680 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/005.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
46080 Sep 22 23:22:23.681 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/005"
46081 Sep 22 23:22:23.681 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/005.db"
46082 Sep 22 23:22:23.681 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46083 Sep 22 23:22:23.681 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/005.replace" to "/tmp/downstairs-zrMnlo6G/00/000/005.completed"
46084 Sep 22 23:22:23.681 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46085 Sep 22 23:22:23.681 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46086 Sep 22 23:22:23.681 DEBG [0] It's time to notify for 486
46087 Sep 22 23:22:23.681 INFO Completion from [0] id:486 status:true
46088 Sep 22 23:22:23.681 INFO [487/752] Repair commands completed
46089 Sep 22 23:22:23.681 INFO Pop front: ReconcileIO { id: ReconciliationId(487), op: ExtentReopen { repair_id: ReconciliationId(487), extent_id: 5 }, state: ClientData([New, New, New]) }
46090 Sep 22 23:22:23.681 INFO Sent repair work, now wait for resp
46091 Sep 22 23:22:23.681 INFO [0] received reconcile message
46092 Sep 22 23:22:23.681 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(487), op: ExtentReopen { repair_id: ReconciliationId(487), extent_id: 5 }, state: ClientData([InProgress, New, New]) }, : downstairs
46093 Sep 22 23:22:23.681 INFO [0] client ExtentReopen { repair_id: ReconciliationId(487), extent_id: 5 }
46094 Sep 22 23:22:23.681 INFO [1] received reconcile message
46095 Sep 22 23:22:23.681 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(487), op: ExtentReopen { repair_id: ReconciliationId(487), extent_id: 5 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46096 Sep 22 23:22:23.681 INFO [1] client ExtentReopen { repair_id: ReconciliationId(487), extent_id: 5 }
46097 Sep 22 23:22:23.682 INFO [2] received reconcile message
46098 Sep 22 23:22:23.682 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(487), op: ExtentReopen { repair_id: ReconciliationId(487), extent_id: 5 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46099 Sep 22 23:22:23.682 INFO [2] client ExtentReopen { repair_id: ReconciliationId(487), extent_id: 5 }
46100 Sep 22 23:22:23.682 DEBG 487 Reopen extent 5
46101 Sep 22 23:22:23.682 DEBG 487 Reopen extent 5
46102 Sep 22 23:22:23.683 DEBG 487 Reopen extent 5
46103 Sep 22 23:22:23.683 DEBG [2] It's time to notify for 487
46104 Sep 22 23:22:23.683 INFO Completion from [2] id:487 status:true
46105 Sep 22 23:22:23.683 INFO [488/752] Repair commands completed
46106 Sep 22 23:22:23.683 INFO Pop front: ReconcileIO { id: ReconciliationId(488), op: ExtentFlush { repair_id: ReconciliationId(488), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46107 Sep 22 23:22:23.684 INFO Sent repair work, now wait for resp
46108 Sep 22 23:22:23.684 INFO [0] received reconcile message
46109 Sep 22 23:22:23.684 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(488), op: ExtentFlush { repair_id: ReconciliationId(488), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46110 Sep 22 23:22:23.684 INFO [0] client ExtentFlush { repair_id: ReconciliationId(488), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46111 Sep 22 23:22:23.684 INFO [1] received reconcile message
46112 Sep 22 23:22:23.684 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(488), op: ExtentFlush { repair_id: ReconciliationId(488), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46113 Sep 22 23:22:23.684 INFO [1] client ExtentFlush { repair_id: ReconciliationId(488), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46114 Sep 22 23:22:23.684 INFO [2] received reconcile message
46115 Sep 22 23:22:23.684 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(488), op: ExtentFlush { repair_id: ReconciliationId(488), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46116 Sep 22 23:22:23.684 INFO [2] client ExtentFlush { repair_id: ReconciliationId(488), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46117 Sep 22 23:22:23.684 DEBG 488 Flush extent 23 with f:2 g:2
46118 Sep 22 23:22:23.684 DEBG Flush just extent 23 with f:2 and g:2
46119 Sep 22 23:22:23.684 DEBG [1] It's time to notify for 488
46120 Sep 22 23:22:23.684 INFO Completion from [1] id:488 status:true
46121 Sep 22 23:22:23.684 INFO [489/752] Repair commands completed
46122 Sep 22 23:22:23.684 INFO Pop front: ReconcileIO { id: ReconciliationId(489), op: ExtentClose { repair_id: ReconciliationId(489), extent_id: 23 }, state: ClientData([New, New, New]) }
46123 Sep 22 23:22:23.684 INFO Sent repair work, now wait for resp
46124 Sep 22 23:22:23.684 INFO [0] received reconcile message
46125 Sep 22 23:22:23.684 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(489), op: ExtentClose { repair_id: ReconciliationId(489), extent_id: 23 }, state: ClientData([InProgress, New, New]) }, : downstairs
46126 Sep 22 23:22:23.684 INFO [0] client ExtentClose { repair_id: ReconciliationId(489), extent_id: 23 }
46127 Sep 22 23:22:23.684 INFO [1] received reconcile message
46128 Sep 22 23:22:23.684 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(489), op: ExtentClose { repair_id: ReconciliationId(489), extent_id: 23 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46129 Sep 22 23:22:23.684 INFO [1] client ExtentClose { repair_id: ReconciliationId(489), extent_id: 23 }
46130 Sep 22 23:22:23.684 INFO [2] received reconcile message
46131 Sep 22 23:22:23.684 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(489), op: ExtentClose { repair_id: ReconciliationId(489), extent_id: 23 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46132 Sep 22 23:22:23.684 INFO [2] client ExtentClose { repair_id: ReconciliationId(489), extent_id: 23 }
46133 Sep 22 23:22:23.684 DEBG 489 Close extent 23
46134 Sep 22 23:22:23.685 DEBG 489 Close extent 23
46135 Sep 22 23:22:23.685 DEBG 489 Close extent 23
46136 Sep 22 23:22:23.685 DEBG [2] It's time to notify for 489
46137 Sep 22 23:22:23.685 INFO Completion from [2] id:489 status:true
46138 Sep 22 23:22:23.685 INFO [490/752] Repair commands completed
46139 Sep 22 23:22:23.685 INFO Pop front: ReconcileIO { id: ReconciliationId(490), op: ExtentRepair { repair_id: ReconciliationId(490), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46140 Sep 22 23:22:23.685 INFO Sent repair work, now wait for resp
46141 Sep 22 23:22:23.686 INFO [0] received reconcile message
46142 Sep 22 23:22:23.686 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(490), op: ExtentRepair { repair_id: ReconciliationId(490), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46143 Sep 22 23:22:23.686 INFO [0] client ExtentRepair { repair_id: ReconciliationId(490), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46144 Sep 22 23:22:23.686 INFO [0] Sending repair request ReconciliationId(490)
46145 Sep 22 23:22:23.686 INFO [1] received reconcile message
46146 Sep 22 23:22:23.686 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(490), op: ExtentRepair { repair_id: ReconciliationId(490), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46147 Sep 22 23:22:23.686 INFO [1] client ExtentRepair { repair_id: ReconciliationId(490), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46148 Sep 22 23:22:23.686 INFO [1] No action required ReconciliationId(490)
46149 Sep 22 23:22:23.686 INFO [2] received reconcile message
46150 Sep 22 23:22:23.686 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(490), op: ExtentRepair { repair_id: ReconciliationId(490), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46151 Sep 22 23:22:23.686 INFO [2] client ExtentRepair { repair_id: ReconciliationId(490), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46152 Sep 22 23:22:23.686 INFO [2] No action required ReconciliationId(490)
46153 Sep 22 23:22:23.686 DEBG 490 Repair extent 23 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
46154 Sep 22 23:22:23.686 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/017.copy"
46155 Sep 22 23:22:23.750 INFO accepted connection, remote_addr: 127.0.0.1:65203, local_addr: 127.0.0.1:52864, task: repair
46156 Sep 22 23:22:23.750 TRCE incoming request, uri: /extent/23/files, method: GET, req_id: 95f04b47-842f-42e5-826c-c16c6d7218cf, remote_addr: 127.0.0.1:65203, local_addr: 127.0.0.1:52864, task: repair
46157 Sep 22 23:22:23.751 INFO request completed, latency_us: 197, response_code: 200, uri: /extent/23/files, method: GET, req_id: 95f04b47-842f-42e5-826c-c16c6d7218cf, remote_addr: 127.0.0.1:65203, local_addr: 127.0.0.1:52864, task: repair
46158 Sep 22 23:22:23.751 INFO eid:23 Found repair files: ["017", "017.db"]
46159 Sep 22 23:22:23.751 TRCE incoming request, uri: /newextent/23/data, method: GET, req_id: 8f1ac1f2-06a0-4f5a-8548-ad5f0a952093, remote_addr: 127.0.0.1:65203, local_addr: 127.0.0.1:52864, task: repair
46160 Sep 22 23:22:23.751 INFO request completed, latency_us: 322, response_code: 200, uri: /newextent/23/data, method: GET, req_id: 8f1ac1f2-06a0-4f5a-8548-ad5f0a952093, remote_addr: 127.0.0.1:65203, local_addr: 127.0.0.1:52864, task: repair
46161 Sep 22 23:22:23.756 TRCE incoming request, uri: /newextent/23/db, method: GET, req_id: ddca5c4a-2afc-4921-a66b-3e0f64fa81f9, remote_addr: 127.0.0.1:65203, local_addr: 127.0.0.1:52864, task: repair
46162 Sep 22 23:22:23.757 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/23/db, method: GET, req_id: ddca5c4a-2afc-4921-a66b-3e0f64fa81f9, remote_addr: 127.0.0.1:65203, local_addr: 127.0.0.1:52864, task: repair
46163 Sep 22 23:22:23.758 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/017.copy" to "/tmp/downstairs-zrMnlo6G/00/000/017.replace"
46164 Sep 22 23:22:23.758 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46165 Sep 22 23:22:23.758 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/017.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
46166 Sep 22 23:22:23.759 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/017"
46167 Sep 22 23:22:23.759 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/017.db"
46168 Sep 22 23:22:23.759 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46169 Sep 22 23:22:23.759 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/017.replace" to "/tmp/downstairs-zrMnlo6G/00/000/017.completed"
46170 Sep 22 23:22:23.759 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46171 Sep 22 23:22:23.759 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46172 Sep 22 23:22:23.759 DEBG [0] It's time to notify for 490
46173 Sep 22 23:22:23.759 INFO Completion from [0] id:490 status:true
46174 Sep 22 23:22:23.759 INFO [491/752] Repair commands completed
46175 Sep 22 23:22:23.759 INFO Pop front: ReconcileIO { id: ReconciliationId(491), op: ExtentReopen { repair_id: ReconciliationId(491), extent_id: 23 }, state: ClientData([New, New, New]) }
46176 Sep 22 23:22:23.759 INFO Sent repair work, now wait for resp
46177 Sep 22 23:22:23.759 INFO [0] received reconcile message
46178 Sep 22 23:22:23.759 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(491), op: ExtentReopen { repair_id: ReconciliationId(491), extent_id: 23 }, state: ClientData([InProgress, New, New]) }, : downstairs
46179 Sep 22 23:22:23.759 INFO [0] client ExtentReopen { repair_id: ReconciliationId(491), extent_id: 23 }
46180 Sep 22 23:22:23.759 INFO [1] received reconcile message
46181 Sep 22 23:22:23.759 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(491), op: ExtentReopen { repair_id: ReconciliationId(491), extent_id: 23 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46182 Sep 22 23:22:23.759 INFO [1] client ExtentReopen { repair_id: ReconciliationId(491), extent_id: 23 }
46183 Sep 22 23:22:23.760 INFO [2] received reconcile message
46184 Sep 22 23:22:23.760 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(491), op: ExtentReopen { repair_id: ReconciliationId(491), extent_id: 23 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46185 Sep 22 23:22:23.760 INFO [2] client ExtentReopen { repair_id: ReconciliationId(491), extent_id: 23 }
46186 Sep 22 23:22:23.760 DEBG 491 Reopen extent 23
46187 Sep 22 23:22:23.760 DEBG 491 Reopen extent 23
46188 Sep 22 23:22:23.761 DEBG 491 Reopen extent 23
46189 Sep 22 23:22:23.761 DEBG [2] It's time to notify for 491
46190 Sep 22 23:22:23.761 INFO Completion from [2] id:491 status:true
46191 Sep 22 23:22:23.761 INFO [492/752] Repair commands completed
46192 Sep 22 23:22:23.761 INFO Pop front: ReconcileIO { id: ReconciliationId(492), op: ExtentFlush { repair_id: ReconciliationId(492), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46193 Sep 22 23:22:23.761 INFO Sent repair work, now wait for resp
46194 Sep 22 23:22:23.761 INFO [0] received reconcile message
46195 Sep 22 23:22:23.761 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(492), op: ExtentFlush { repair_id: ReconciliationId(492), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46196 Sep 22 23:22:23.762 INFO [0] client ExtentFlush { repair_id: ReconciliationId(492), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46197 Sep 22 23:22:23.762 INFO [1] received reconcile message
46198 Sep 22 23:22:23.762 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(492), op: ExtentFlush { repair_id: ReconciliationId(492), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46199 Sep 22 23:22:23.762 INFO [1] client ExtentFlush { repair_id: ReconciliationId(492), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46200 Sep 22 23:22:23.762 INFO [2] received reconcile message
46201 Sep 22 23:22:23.762 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(492), op: ExtentFlush { repair_id: ReconciliationId(492), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46202 Sep 22 23:22:23.762 INFO [2] client ExtentFlush { repair_id: ReconciliationId(492), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46203 Sep 22 23:22:23.762 DEBG 492 Flush extent 98 with f:2 g:2
46204 Sep 22 23:22:23.762 DEBG Flush just extent 98 with f:2 and g:2
46205 Sep 22 23:22:23.762 DEBG [1] It's time to notify for 492
46206 Sep 22 23:22:23.762 INFO Completion from [1] id:492 status:true
46207 Sep 22 23:22:23.762 INFO [493/752] Repair commands completed
46208 Sep 22 23:22:23.762 INFO Pop front: ReconcileIO { id: ReconciliationId(493), op: ExtentClose { repair_id: ReconciliationId(493), extent_id: 98 }, state: ClientData([New, New, New]) }
46209 Sep 22 23:22:23.762 INFO Sent repair work, now wait for resp
46210 Sep 22 23:22:23.762 INFO [0] received reconcile message
46211 Sep 22 23:22:23.762 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(493), op: ExtentClose { repair_id: ReconciliationId(493), extent_id: 98 }, state: ClientData([InProgress, New, New]) }, : downstairs
46212 Sep 22 23:22:23.762 INFO [0] client ExtentClose { repair_id: ReconciliationId(493), extent_id: 98 }
46213 Sep 22 23:22:23.762 INFO [1] received reconcile message
46214 Sep 22 23:22:23.762 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(493), op: ExtentClose { repair_id: ReconciliationId(493), extent_id: 98 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46215 Sep 22 23:22:23.762 INFO [1] client ExtentClose { repair_id: ReconciliationId(493), extent_id: 98 }
46216 Sep 22 23:22:23.762 INFO [2] received reconcile message
46217 Sep 22 23:22:23.762 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(493), op: ExtentClose { repair_id: ReconciliationId(493), extent_id: 98 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46218 Sep 22 23:22:23.762 INFO [2] client ExtentClose { repair_id: ReconciliationId(493), extent_id: 98 }
46219 Sep 22 23:22:23.762 DEBG 493 Close extent 98
46220 Sep 22 23:22:23.763 DEBG 493 Close extent 98
46221 Sep 22 23:22:23.763 DEBG 493 Close extent 98
46222 Sep 22 23:22:23.763 DEBG [2] It's time to notify for 493
46223 Sep 22 23:22:23.763 INFO Completion from [2] id:493 status:true
46224 Sep 22 23:22:23.763 INFO [494/752] Repair commands completed
46225 Sep 22 23:22:23.763 INFO Pop front: ReconcileIO { id: ReconciliationId(494), op: ExtentRepair { repair_id: ReconciliationId(494), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46226 Sep 22 23:22:23.763 INFO Sent repair work, now wait for resp
46227 Sep 22 23:22:23.763 INFO [0] received reconcile message
46228 Sep 22 23:22:23.764 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(494), op: ExtentRepair { repair_id: ReconciliationId(494), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46229 Sep 22 23:22:23.764 INFO [0] client ExtentRepair { repair_id: ReconciliationId(494), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46230 Sep 22 23:22:23.764 INFO [0] Sending repair request ReconciliationId(494)
46231 Sep 22 23:22:23.764 INFO [1] received reconcile message
46232 Sep 22 23:22:23.764 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(494), op: ExtentRepair { repair_id: ReconciliationId(494), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46233 Sep 22 23:22:23.764 INFO [1] client ExtentRepair { repair_id: ReconciliationId(494), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46234 Sep 22 23:22:23.764 INFO [1] No action required ReconciliationId(494)
46235 Sep 22 23:22:23.764 INFO [2] received reconcile message
46236 Sep 22 23:22:23.764 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(494), op: ExtentRepair { repair_id: ReconciliationId(494), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46237 Sep 22 23:22:23.764 INFO [2] client ExtentRepair { repair_id: ReconciliationId(494), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46238 Sep 22 23:22:23.764 INFO [2] No action required ReconciliationId(494)
46239 Sep 22 23:22:23.764 DEBG 494 Repair extent 98 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
46240 Sep 22 23:22:23.764 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/062.copy"
46241 Sep 22 23:22:23.825 INFO accepted connection, remote_addr: 127.0.0.1:49104, local_addr: 127.0.0.1:52864, task: repair
46242 Sep 22 23:22:23.826 TRCE incoming request, uri: /extent/98/files, method: GET, req_id: de2be4e1-9ffb-464f-b9e2-0eb78e7c5949, remote_addr: 127.0.0.1:49104, local_addr: 127.0.0.1:52864, task: repair
46243 Sep 22 23:22:23.826 INFO request completed, latency_us: 201, response_code: 200, uri: /extent/98/files, method: GET, req_id: de2be4e1-9ffb-464f-b9e2-0eb78e7c5949, remote_addr: 127.0.0.1:49104, local_addr: 127.0.0.1:52864, task: repair
46244 Sep 22 23:22:23.826 INFO eid:98 Found repair files: ["062", "062.db"]
46245 Sep 22 23:22:23.826 TRCE incoming request, uri: /newextent/98/data, method: GET, req_id: 1c007ddd-e9f5-49f8-bba8-e5211363203f, remote_addr: 127.0.0.1:49104, local_addr: 127.0.0.1:52864, task: repair
46246 Sep 22 23:22:23.827 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/98/data, method: GET, req_id: 1c007ddd-e9f5-49f8-bba8-e5211363203f, remote_addr: 127.0.0.1:49104, local_addr: 127.0.0.1:52864, task: repair
46247 Sep 22 23:22:23.832 TRCE incoming request, uri: /newextent/98/db, method: GET, req_id: a5e79f21-ce0b-4065-9420-ac9b1ccd1aa9, remote_addr: 127.0.0.1:49104, local_addr: 127.0.0.1:52864, task: repair
46248 Sep 22 23:22:23.832 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/98/db, method: GET, req_id: a5e79f21-ce0b-4065-9420-ac9b1ccd1aa9, remote_addr: 127.0.0.1:49104, local_addr: 127.0.0.1:52864, task: repair
46249 Sep 22 23:22:23.833 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/062.copy" to "/tmp/downstairs-zrMnlo6G/00/000/062.replace"
46250 Sep 22 23:22:23.833 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46251 Sep 22 23:22:23.834 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/062.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
46252 Sep 22 23:22:23.834 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/062"
46253 Sep 22 23:22:23.834 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/062.db"
46254 Sep 22 23:22:23.834 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46255 Sep 22 23:22:23.834 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/062.replace" to "/tmp/downstairs-zrMnlo6G/00/000/062.completed"
46256 Sep 22 23:22:23.834 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46257 Sep 22 23:22:23.835 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46258 Sep 22 23:22:23.835 DEBG [0] It's time to notify for 494
46259 Sep 22 23:22:23.835 INFO Completion from [0] id:494 status:true
46260 Sep 22 23:22:23.835 INFO [495/752] Repair commands completed
46261 Sep 22 23:22:23.835 INFO Pop front: ReconcileIO { id: ReconciliationId(495), op: ExtentReopen { repair_id: ReconciliationId(495), extent_id: 98 }, state: ClientData([New, New, New]) }
46262 Sep 22 23:22:23.835 INFO Sent repair work, now wait for resp
46263 Sep 22 23:22:23.835 INFO [0] received reconcile message
46264 Sep 22 23:22:23.835 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(495), op: ExtentReopen { repair_id: ReconciliationId(495), extent_id: 98 }, state: ClientData([InProgress, New, New]) }, : downstairs
46265 Sep 22 23:22:23.835 INFO [0] client ExtentReopen { repair_id: ReconciliationId(495), extent_id: 98 }
46266 Sep 22 23:22:23.835 INFO [1] received reconcile message
46267 Sep 22 23:22:23.835 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(495), op: ExtentReopen { repair_id: ReconciliationId(495), extent_id: 98 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46268 Sep 22 23:22:23.835 INFO [1] client ExtentReopen { repair_id: ReconciliationId(495), extent_id: 98 }
46269 Sep 22 23:22:23.835 INFO [2] received reconcile message
46270 Sep 22 23:22:23.835 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(495), op: ExtentReopen { repair_id: ReconciliationId(495), extent_id: 98 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46271 Sep 22 23:22:23.835 INFO [2] client ExtentReopen { repair_id: ReconciliationId(495), extent_id: 98 }
46272 Sep 22 23:22:23.835 DEBG 495 Reopen extent 98
46273 Sep 22 23:22:23.836 DEBG 495 Reopen extent 98
46274 Sep 22 23:22:23.836 DEBG 495 Reopen extent 98
46275 Sep 22 23:22:23.837 DEBG [2] It's time to notify for 495
46276 Sep 22 23:22:23.837 INFO Completion from [2] id:495 status:true
46277 Sep 22 23:22:23.837 INFO [496/752] Repair commands completed
46278 Sep 22 23:22:23.837 INFO Pop front: ReconcileIO { id: ReconciliationId(496), op: ExtentFlush { repair_id: ReconciliationId(496), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46279 Sep 22 23:22:23.837 INFO Sent repair work, now wait for resp
46280 Sep 22 23:22:23.837 INFO [0] received reconcile message
46281 Sep 22 23:22:23.837 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(496), op: ExtentFlush { repair_id: ReconciliationId(496), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46282 Sep 22 23:22:23.837 INFO [0] client ExtentFlush { repair_id: ReconciliationId(496), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46283 Sep 22 23:22:23.837 INFO [1] received reconcile message
46284 Sep 22 23:22:23.837 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(496), op: ExtentFlush { repair_id: ReconciliationId(496), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46285 Sep 22 23:22:23.837 INFO [1] client ExtentFlush { repair_id: ReconciliationId(496), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46286 Sep 22 23:22:23.837 INFO [2] received reconcile message
46287 Sep 22 23:22:23.837 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(496), op: ExtentFlush { repair_id: ReconciliationId(496), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46288 Sep 22 23:22:23.837 INFO [2] client ExtentFlush { repair_id: ReconciliationId(496), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46289 Sep 22 23:22:23.837 DEBG 496 Flush extent 1 with f:2 g:2
46290 Sep 22 23:22:23.837 DEBG Flush just extent 1 with f:2 and g:2
46291 Sep 22 23:22:23.838 DEBG [1] It's time to notify for 496
46292 Sep 22 23:22:23.838 INFO Completion from [1] id:496 status:true
46293 Sep 22 23:22:23.838 INFO [497/752] Repair commands completed
46294 Sep 22 23:22:23.838 INFO Pop front: ReconcileIO { id: ReconciliationId(497), op: ExtentClose { repair_id: ReconciliationId(497), extent_id: 1 }, state: ClientData([New, New, New]) }
46295 Sep 22 23:22:23.838 INFO Sent repair work, now wait for resp
46296 Sep 22 23:22:23.838 INFO [0] received reconcile message
46297 Sep 22 23:22:23.838 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(497), op: ExtentClose { repair_id: ReconciliationId(497), extent_id: 1 }, state: ClientData([InProgress, New, New]) }, : downstairs
46298 Sep 22 23:22:23.838 INFO [0] client ExtentClose { repair_id: ReconciliationId(497), extent_id: 1 }
46299 Sep 22 23:22:23.838 INFO [1] received reconcile message
46300 Sep 22 23:22:23.838 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(497), op: ExtentClose { repair_id: ReconciliationId(497), extent_id: 1 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46301 Sep 22 23:22:23.838 INFO [1] client ExtentClose { repair_id: ReconciliationId(497), extent_id: 1 }
46302 Sep 22 23:22:23.838 INFO [2] received reconcile message
46303 Sep 22 23:22:23.838 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(497), op: ExtentClose { repair_id: ReconciliationId(497), extent_id: 1 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46304 Sep 22 23:22:23.838 INFO [2] client ExtentClose { repair_id: ReconciliationId(497), extent_id: 1 }
46305 Sep 22 23:22:23.838 DEBG 497 Close extent 1
46306 Sep 22 23:22:23.838 DEBG 497 Close extent 1
46307 Sep 22 23:22:23.839 DEBG 497 Close extent 1
46308 Sep 22 23:22:23.839 DEBG [2] It's time to notify for 497
46309 Sep 22 23:22:23.839 INFO Completion from [2] id:497 status:true
46310 Sep 22 23:22:23.839 INFO [498/752] Repair commands completed
46311 Sep 22 23:22:23.839 INFO Pop front: ReconcileIO { id: ReconciliationId(498), op: ExtentRepair { repair_id: ReconciliationId(498), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46312 Sep 22 23:22:23.839 INFO Sent repair work, now wait for resp
46313 Sep 22 23:22:23.839 INFO [0] received reconcile message
46314 Sep 22 23:22:23.839 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(498), op: ExtentRepair { repair_id: ReconciliationId(498), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46315 Sep 22 23:22:23.839 INFO [0] client ExtentRepair { repair_id: ReconciliationId(498), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46316 Sep 22 23:22:23.839 INFO [0] Sending repair request ReconciliationId(498)
46317 Sep 22 23:22:23.839 INFO [1] received reconcile message
46318 Sep 22 23:22:23.839 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(498), op: ExtentRepair { repair_id: ReconciliationId(498), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46319 Sep 22 23:22:23.839 INFO [1] client ExtentRepair { repair_id: ReconciliationId(498), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46320 Sep 22 23:22:23.839 INFO [1] No action required ReconciliationId(498)
46321 Sep 22 23:22:23.839 INFO [2] received reconcile message
46322 Sep 22 23:22:23.839 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(498), op: ExtentRepair { repair_id: ReconciliationId(498), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46323 Sep 22 23:22:23.839 INFO [2] client ExtentRepair { repair_id: ReconciliationId(498), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46324 Sep 22 23:22:23.839 INFO [2] No action required ReconciliationId(498)
46325 Sep 22 23:22:23.839 DEBG 498 Repair extent 1 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
46326 Sep 22 23:22:23.840 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/001.copy"
46327 Sep 22 23:22:23.849 DEBG [1] Read AckReady 1087, : downstairs
46328 Sep 22 23:22:23.850 ERRO [1] job id 1088 saw error GenericError("test error")
46329 Sep 22 23:22:23.851 DEBG up_ds_listen was notified
46330 Sep 22 23:22:23.851 DEBG up_ds_listen process 1087
46331 Sep 22 23:22:23.851 DEBG [A] ack job 1087:88, : downstairs
46332 Sep 22 23:22:23.904 DEBG up_ds_listen checked 1 jobs, back to waiting
46333 Sep 22 23:22:23.904 INFO accepted connection, remote_addr: 127.0.0.1:58071, local_addr: 127.0.0.1:52864, task: repair
46334 Sep 22 23:22:23.904 TRCE incoming request, uri: /extent/1/files, method: GET, req_id: 371c8c00-324e-4a4e-8d74-c3bf7b0c1bfd, remote_addr: 127.0.0.1:58071, local_addr: 127.0.0.1:52864, task: repair
46335 Sep 22 23:22:23.904 INFO request completed, latency_us: 202, response_code: 200, uri: /extent/1/files, method: GET, req_id: 371c8c00-324e-4a4e-8d74-c3bf7b0c1bfd, remote_addr: 127.0.0.1:58071, local_addr: 127.0.0.1:52864, task: repair
46336 Sep 22 23:22:23.904 INFO eid:1 Found repair files: ["001", "001.db"]
46337 Sep 22 23:22:23.905 TRCE incoming request, uri: /newextent/1/data, method: GET, req_id: a2cb3bd5-9998-4a8c-b07f-6f4f912d4866, remote_addr: 127.0.0.1:58071, local_addr: 127.0.0.1:52864, task: repair
46338 Sep 22 23:22:23.905 INFO request completed, latency_us: 261, response_code: 200, uri: /newextent/1/data, method: GET, req_id: a2cb3bd5-9998-4a8c-b07f-6f4f912d4866, remote_addr: 127.0.0.1:58071, local_addr: 127.0.0.1:52864, task: repair
46339 Sep 22 23:22:23.906 DEBG Flush :1088 extent_limit None deps:[JobId(1087), JobId(1086)] res:true f:33 g:1
46340 Sep 22 23:22:23.906 INFO [lossy] sleeping 1 second
46341 Sep 22 23:22:23.906 WARN returning error on read!
46342 Sep 22 23:22:23.906 DEBG Read :1085 deps:[JobId(1084)] res:false
46343 Sep 22 23:22:23.906 INFO [lossy] skipping 1087
46344 Sep 22 23:22:23.906 WARN returning error on read!
46345 Sep 22 23:22:23.906 DEBG Read :1085 deps:[JobId(1084)] res:false
46346 Sep 22 23:22:23.910 TRCE incoming request, uri: /newextent/1/db, method: GET, req_id: 64337320-c947-435f-84bd-b0d32d25e5f9, remote_addr: 127.0.0.1:58071, local_addr: 127.0.0.1:52864, task: repair
46347 Sep 22 23:22:23.911 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/1/db, method: GET, req_id: 64337320-c947-435f-84bd-b0d32d25e5f9, remote_addr: 127.0.0.1:58071, local_addr: 127.0.0.1:52864, task: repair
46348 Sep 22 23:22:23.912 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/001.copy" to "/tmp/downstairs-zrMnlo6G/00/000/001.replace"
46349 Sep 22 23:22:23.912 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46350 Sep 22 23:22:23.912 DEBG Read :1085 deps:[JobId(1084)] res:true
46351 Sep 22 23:22:23.913 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/001.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
46352 Sep 22 23:22:23.913 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/001"
46353 Sep 22 23:22:23.913 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/001.db"
46354 Sep 22 23:22:23.913 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46355 Sep 22 23:22:23.913 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/001.replace" to "/tmp/downstairs-zrMnlo6G/00/000/001.completed"
46356 Sep 22 23:22:23.913 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46357 Sep 22 23:22:23.913 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46358 Sep 22 23:22:23.913 DEBG [0] It's time to notify for 498
46359 Sep 22 23:22:23.913 INFO Completion from [0] id:498 status:true
46360 Sep 22 23:22:23.913 INFO [499/752] Repair commands completed
46361 Sep 22 23:22:23.913 INFO Pop front: ReconcileIO { id: ReconciliationId(499), op: ExtentReopen { repair_id: ReconciliationId(499), extent_id: 1 }, state: ClientData([New, New, New]) }
46362 Sep 22 23:22:23.913 INFO Sent repair work, now wait for resp
46363 Sep 22 23:22:23.914 INFO [0] received reconcile message
46364 Sep 22 23:22:23.914 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(499), op: ExtentReopen { repair_id: ReconciliationId(499), extent_id: 1 }, state: ClientData([InProgress, New, New]) }, : downstairs
46365 Sep 22 23:22:23.914 INFO [0] client ExtentReopen { repair_id: ReconciliationId(499), extent_id: 1 }
46366 Sep 22 23:22:23.914 INFO [1] received reconcile message
46367 Sep 22 23:22:23.914 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(499), op: ExtentReopen { repair_id: ReconciliationId(499), extent_id: 1 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46368 Sep 22 23:22:23.914 INFO [1] client ExtentReopen { repair_id: ReconciliationId(499), extent_id: 1 }
46369 Sep 22 23:22:23.914 INFO [2] received reconcile message
46370 Sep 22 23:22:23.914 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(499), op: ExtentReopen { repair_id: ReconciliationId(499), extent_id: 1 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46371 Sep 22 23:22:23.914 INFO [2] client ExtentReopen { repair_id: ReconciliationId(499), extent_id: 1 }
46372 Sep 22 23:22:23.914 DEBG 499 Reopen extent 1
46373 Sep 22 23:22:23.914 DEBG 499 Reopen extent 1
46374 Sep 22 23:22:23.915 DEBG 499 Reopen extent 1
46375 Sep 22 23:22:23.916 DEBG [2] It's time to notify for 499
46376 Sep 22 23:22:23.916 INFO Completion from [2] id:499 status:true
46377 Sep 22 23:22:23.916 INFO [500/752] Repair commands completed
46378 Sep 22 23:22:23.916 INFO Pop front: ReconcileIO { id: ReconciliationId(500), op: ExtentFlush { repair_id: ReconciliationId(500), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46379 Sep 22 23:22:23.916 INFO Sent repair work, now wait for resp
46380 Sep 22 23:22:23.916 INFO [0] received reconcile message
46381 Sep 22 23:22:23.916 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(500), op: ExtentFlush { repair_id: ReconciliationId(500), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46382 Sep 22 23:22:23.916 INFO [0] client ExtentFlush { repair_id: ReconciliationId(500), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46383 Sep 22 23:22:23.916 INFO [1] received reconcile message
46384 Sep 22 23:22:23.916 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(500), op: ExtentFlush { repair_id: ReconciliationId(500), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46385 Sep 22 23:22:23.916 INFO [1] client ExtentFlush { repair_id: ReconciliationId(500), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46386 Sep 22 23:22:23.916 INFO [2] received reconcile message
46387 Sep 22 23:22:23.916 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(500), op: ExtentFlush { repair_id: ReconciliationId(500), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46388 Sep 22 23:22:23.916 INFO [2] client ExtentFlush { repair_id: ReconciliationId(500), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46389 Sep 22 23:22:23.916 DEBG 500 Flush extent 166 with f:2 g:2
46390 Sep 22 23:22:23.916 DEBG Flush just extent 166 with f:2 and g:2
46391 Sep 22 23:22:23.916 DEBG [1] It's time to notify for 500
46392 Sep 22 23:22:23.916 INFO Completion from [1] id:500 status:true
46393 Sep 22 23:22:23.916 INFO [501/752] Repair commands completed
46394 Sep 22 23:22:23.916 INFO Pop front: ReconcileIO { id: ReconciliationId(501), op: ExtentClose { repair_id: ReconciliationId(501), extent_id: 166 }, state: ClientData([New, New, New]) }
46395 Sep 22 23:22:23.916 INFO Sent repair work, now wait for resp
46396 Sep 22 23:22:23.916 INFO [0] received reconcile message
46397 Sep 22 23:22:23.916 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(501), op: ExtentClose { repair_id: ReconciliationId(501), extent_id: 166 }, state: ClientData([InProgress, New, New]) }, : downstairs
46398 Sep 22 23:22:23.916 INFO [0] client ExtentClose { repair_id: ReconciliationId(501), extent_id: 166 }
46399 Sep 22 23:22:23.916 INFO [1] received reconcile message
46400 Sep 22 23:22:23.916 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(501), op: ExtentClose { repair_id: ReconciliationId(501), extent_id: 166 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46401 Sep 22 23:22:23.916 INFO [1] client ExtentClose { repair_id: ReconciliationId(501), extent_id: 166 }
46402 Sep 22 23:22:23.916 INFO [2] received reconcile message
46403 Sep 22 23:22:23.917 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(501), op: ExtentClose { repair_id: ReconciliationId(501), extent_id: 166 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46404 Sep 22 23:22:23.917 INFO [2] client ExtentClose { repair_id: ReconciliationId(501), extent_id: 166 }
46405 Sep 22 23:22:23.917 DEBG 501 Close extent 166
46406 Sep 22 23:22:23.917 DEBG 501 Close extent 166
46407 Sep 22 23:22:23.917 DEBG 501 Close extent 166
46408 Sep 22 23:22:23.918 DEBG [2] It's time to notify for 501
46409 Sep 22 23:22:23.918 INFO Completion from [2] id:501 status:true
46410 Sep 22 23:22:23.918 INFO [502/752] Repair commands completed
46411 Sep 22 23:22:23.918 INFO Pop front: ReconcileIO { id: ReconciliationId(502), op: ExtentRepair { repair_id: ReconciliationId(502), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46412 Sep 22 23:22:23.918 INFO Sent repair work, now wait for resp
46413 Sep 22 23:22:23.918 INFO [0] received reconcile message
46414 Sep 22 23:22:23.918 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(502), op: ExtentRepair { repair_id: ReconciliationId(502), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46415 Sep 22 23:22:23.918 INFO [0] client ExtentRepair { repair_id: ReconciliationId(502), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46416 Sep 22 23:22:23.918 INFO [0] Sending repair request ReconciliationId(502)
46417 Sep 22 23:22:23.918 INFO [1] received reconcile message
46418 Sep 22 23:22:23.918 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(502), op: ExtentRepair { repair_id: ReconciliationId(502), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46419 Sep 22 23:22:23.918 INFO [1] client ExtentRepair { repair_id: ReconciliationId(502), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46420 Sep 22 23:22:23.918 INFO [1] No action required ReconciliationId(502)
46421 Sep 22 23:22:23.918 INFO [2] received reconcile message
46422 Sep 22 23:22:23.918 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(502), op: ExtentRepair { repair_id: ReconciliationId(502), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46423 Sep 22 23:22:23.918 INFO [2] client ExtentRepair { repair_id: ReconciliationId(502), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46424 Sep 22 23:22:23.918 INFO [2] No action required ReconciliationId(502)
46425 Sep 22 23:22:23.918 DEBG 502 Repair extent 166 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
46426 Sep 22 23:22:23.918 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0A6.copy"
46427 Sep 22 23:22:23.981 INFO accepted connection, remote_addr: 127.0.0.1:54799, local_addr: 127.0.0.1:52864, task: repair
46428 Sep 22 23:22:23.982 TRCE incoming request, uri: /extent/166/files, method: GET, req_id: e1e93694-168c-42b9-89a1-097cf9321150, remote_addr: 127.0.0.1:54799, local_addr: 127.0.0.1:52864, task: repair
46429 Sep 22 23:22:23.982 INFO request completed, latency_us: 199, response_code: 200, uri: /extent/166/files, method: GET, req_id: e1e93694-168c-42b9-89a1-097cf9321150, remote_addr: 127.0.0.1:54799, local_addr: 127.0.0.1:52864, task: repair
46430 Sep 22 23:22:23.982 INFO eid:166 Found repair files: ["0A6", "0A6.db"]
46431 Sep 22 23:22:23.982 DEBG IO Read 1089 has deps [JobId(1088)]
46432 Sep 22 23:22:23.982 TRCE incoming request, uri: /newextent/166/data, method: GET, req_id: 0f3b0f27-38b7-4baf-8f1c-7120939c0811, remote_addr: 127.0.0.1:54799, local_addr: 127.0.0.1:52864, task: repair
46433 Sep 22 23:22:23.983 INFO request completed, latency_us: 308, response_code: 200, uri: /newextent/166/data, method: GET, req_id: 0f3b0f27-38b7-4baf-8f1c-7120939c0811, remote_addr: 127.0.0.1:54799, local_addr: 127.0.0.1:52864, task: repair
46434 Sep 22 23:22:23.987 TRCE incoming request, uri: /newextent/166/db, method: GET, req_id: 10f416ee-7917-4c50-8eea-76ec84ec4913, remote_addr: 127.0.0.1:54799, local_addr: 127.0.0.1:52864, task: repair
46435 Sep 22 23:22:23.987 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/166/db, method: GET, req_id: 10f416ee-7917-4c50-8eea-76ec84ec4913, remote_addr: 127.0.0.1:54799, local_addr: 127.0.0.1:52864, task: repair
46436 Sep 22 23:22:23.989 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0A6.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0A6.replace"
46437 Sep 22 23:22:23.989 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46438 Sep 22 23:22:23.989 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0A6.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
46439 Sep 22 23:22:23.990 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A6"
46440 Sep 22 23:22:23.990 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A6.db"
46441 Sep 22 23:22:23.990 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46442 Sep 22 23:22:23.990 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0A6.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0A6.completed"
46443 Sep 22 23:22:23.990 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46444 Sep 22 23:22:23.990 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46445 Sep 22 23:22:23.990 DEBG [0] It's time to notify for 502
46446 Sep 22 23:22:23.990 INFO Completion from [0] id:502 status:true
46447 Sep 22 23:22:23.990 INFO [503/752] Repair commands completed
46448 Sep 22 23:22:23.990 INFO Pop front: ReconcileIO { id: ReconciliationId(503), op: ExtentReopen { repair_id: ReconciliationId(503), extent_id: 166 }, state: ClientData([New, New, New]) }
46449 Sep 22 23:22:23.990 INFO Sent repair work, now wait for resp
46450 Sep 22 23:22:23.990 INFO [0] received reconcile message
46451 Sep 22 23:22:23.990 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(503), op: ExtentReopen { repair_id: ReconciliationId(503), extent_id: 166 }, state: ClientData([InProgress, New, New]) }, : downstairs
46452 Sep 22 23:22:23.990 INFO [0] client ExtentReopen { repair_id: ReconciliationId(503), extent_id: 166 }
46453 Sep 22 23:22:23.990 INFO [1] received reconcile message
46454 Sep 22 23:22:23.990 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(503), op: ExtentReopen { repair_id: ReconciliationId(503), extent_id: 166 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46455 Sep 22 23:22:23.990 INFO [1] client ExtentReopen { repair_id: ReconciliationId(503), extent_id: 166 }
46456 Sep 22 23:22:23.990 INFO [2] received reconcile message
46457 Sep 22 23:22:23.990 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(503), op: ExtentReopen { repair_id: ReconciliationId(503), extent_id: 166 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46458 Sep 22 23:22:23.990 INFO [2] client ExtentReopen { repair_id: ReconciliationId(503), extent_id: 166 }
46459 Sep 22 23:22:23.991 DEBG 503 Reopen extent 166
46460 Sep 22 23:22:23.991 DEBG 503 Reopen extent 166
46461 Sep 22 23:22:23.992 DEBG 503 Reopen extent 166
46462 Sep 22 23:22:23.992 DEBG [2] It's time to notify for 503
46463 Sep 22 23:22:23.992 INFO Completion from [2] id:503 status:true
46464 Sep 22 23:22:23.992 INFO [504/752] Repair commands completed
46465 Sep 22 23:22:23.992 INFO Pop front: ReconcileIO { id: ReconciliationId(504), op: ExtentFlush { repair_id: ReconciliationId(504), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46466 Sep 22 23:22:23.992 INFO Sent repair work, now wait for resp
46467 Sep 22 23:22:23.992 INFO [0] received reconcile message
46468 Sep 22 23:22:23.992 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(504), op: ExtentFlush { repair_id: ReconciliationId(504), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46469 Sep 22 23:22:23.992 INFO [0] client ExtentFlush { repair_id: ReconciliationId(504), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46470 Sep 22 23:22:23.993 INFO [1] received reconcile message
46471 Sep 22 23:22:23.993 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(504), op: ExtentFlush { repair_id: ReconciliationId(504), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46472 Sep 22 23:22:23.993 INFO [1] client ExtentFlush { repair_id: ReconciliationId(504), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46473 Sep 22 23:22:23.993 INFO [2] received reconcile message
46474 Sep 22 23:22:23.993 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(504), op: ExtentFlush { repair_id: ReconciliationId(504), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46475 Sep 22 23:22:23.993 INFO [2] client ExtentFlush { repair_id: ReconciliationId(504), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46476 Sep 22 23:22:23.993 DEBG 504 Flush extent 185 with f:2 g:2
46477 Sep 22 23:22:23.993 DEBG Flush just extent 185 with f:2 and g:2
46478 Sep 22 23:22:23.993 DEBG [1] It's time to notify for 504
46479 Sep 22 23:22:23.993 INFO Completion from [1] id:504 status:true
46480 Sep 22 23:22:23.993 INFO [505/752] Repair commands completed
46481 Sep 22 23:22:23.993 INFO Pop front: ReconcileIO { id: ReconciliationId(505), op: ExtentClose { repair_id: ReconciliationId(505), extent_id: 185 }, state: ClientData([New, New, New]) }
46482 Sep 22 23:22:23.993 INFO Sent repair work, now wait for resp
46483 Sep 22 23:22:23.993 INFO [0] received reconcile message
46484 Sep 22 23:22:23.993 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(505), op: ExtentClose { repair_id: ReconciliationId(505), extent_id: 185 }, state: ClientData([InProgress, New, New]) }, : downstairs
46485 Sep 22 23:22:23.993 INFO [0] client ExtentClose { repair_id: ReconciliationId(505), extent_id: 185 }
46486 Sep 22 23:22:23.993 INFO [1] received reconcile message
46487 Sep 22 23:22:23.993 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(505), op: ExtentClose { repair_id: ReconciliationId(505), extent_id: 185 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46488 Sep 22 23:22:23.993 INFO [1] client ExtentClose { repair_id: ReconciliationId(505), extent_id: 185 }
46489 Sep 22 23:22:23.993 INFO [2] received reconcile message
46490 Sep 22 23:22:23.993 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(505), op: ExtentClose { repair_id: ReconciliationId(505), extent_id: 185 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46491 Sep 22 23:22:23.993 INFO [2] client ExtentClose { repair_id: ReconciliationId(505), extent_id: 185 }
46492 Sep 22 23:22:23.993 DEBG 505 Close extent 185
46493 Sep 22 23:22:23.994 DEBG 505 Close extent 185
46494 Sep 22 23:22:23.994 DEBG 505 Close extent 185
46495 Sep 22 23:22:23.994 DEBG [2] It's time to notify for 505
46496 Sep 22 23:22:23.994 INFO Completion from [2] id:505 status:true
46497 Sep 22 23:22:23.994 INFO [506/752] Repair commands completed
46498 Sep 22 23:22:23.994 INFO Pop front: ReconcileIO { id: ReconciliationId(506), op: ExtentRepair { repair_id: ReconciliationId(506), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46499 Sep 22 23:22:23.994 INFO Sent repair work, now wait for resp
46500 Sep 22 23:22:23.994 INFO [0] received reconcile message
46501 Sep 22 23:22:23.994 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(506), op: ExtentRepair { repair_id: ReconciliationId(506), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46502 Sep 22 23:22:23.994 INFO [0] client ExtentRepair { repair_id: ReconciliationId(506), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46503 Sep 22 23:22:23.994 INFO [0] Sending repair request ReconciliationId(506)
46504 Sep 22 23:22:23.995 INFO [1] received reconcile message
46505 Sep 22 23:22:23.995 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(506), op: ExtentRepair { repair_id: ReconciliationId(506), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46506 Sep 22 23:22:23.995 INFO [1] client ExtentRepair { repair_id: ReconciliationId(506), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46507 Sep 22 23:22:23.995 INFO [1] No action required ReconciliationId(506)
46508 Sep 22 23:22:23.995 INFO [2] received reconcile message
46509 Sep 22 23:22:23.995 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(506), op: ExtentRepair { repair_id: ReconciliationId(506), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46510 Sep 22 23:22:23.995 INFO [2] client ExtentRepair { repair_id: ReconciliationId(506), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46511 Sep 22 23:22:23.995 INFO [2] No action required ReconciliationId(506)
46512 Sep 22 23:22:23.995 DEBG 506 Repair extent 185 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
46513 Sep 22 23:22:23.995 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0B9.copy"
46514 Sep 22 23:22:24.061 INFO accepted connection, remote_addr: 127.0.0.1:59376, local_addr: 127.0.0.1:52864, task: repair
46515 Sep 22 23:22:24.061 TRCE incoming request, uri: /extent/185/files, method: GET, req_id: 79df7ec7-655d-4607-8f43-b981edf49c45, remote_addr: 127.0.0.1:59376, local_addr: 127.0.0.1:52864, task: repair
46516 Sep 22 23:22:24.061 INFO request completed, latency_us: 249, response_code: 200, uri: /extent/185/files, method: GET, req_id: 79df7ec7-655d-4607-8f43-b981edf49c45, remote_addr: 127.0.0.1:59376, local_addr: 127.0.0.1:52864, task: repair
46517 Sep 22 23:22:24.061 INFO eid:185 Found repair files: ["0B9", "0B9.db"]
46518 Sep 22 23:22:24.062 TRCE incoming request, uri: /newextent/185/data, method: GET, req_id: 186dfc66-2023-4f50-bc1d-0a358b585f8b, remote_addr: 127.0.0.1:59376, local_addr: 127.0.0.1:52864, task: repair
46519 Sep 22 23:22:24.062 INFO request completed, latency_us: 352, response_code: 200, uri: /newextent/185/data, method: GET, req_id: 186dfc66-2023-4f50-bc1d-0a358b585f8b, remote_addr: 127.0.0.1:59376, local_addr: 127.0.0.1:52864, task: repair
46520 Sep 22 23:22:24.067 TRCE incoming request, uri: /newextent/185/db, method: GET, req_id: c5e5bc20-675e-4d5c-a85f-1a882baa737c, remote_addr: 127.0.0.1:59376, local_addr: 127.0.0.1:52864, task: repair
46521 Sep 22 23:22:24.067 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/185/db, method: GET, req_id: c5e5bc20-675e-4d5c-a85f-1a882baa737c, remote_addr: 127.0.0.1:59376, local_addr: 127.0.0.1:52864, task: repair
46522 Sep 22 23:22:24.068 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0B9.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0B9.replace"
46523 Sep 22 23:22:24.068 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46524 Sep 22 23:22:24.069 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0B9.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
46525 Sep 22 23:22:24.069 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B9"
46526 Sep 22 23:22:24.069 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B9.db"
46527 Sep 22 23:22:24.069 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46528 Sep 22 23:22:24.069 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0B9.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0B9.completed"
46529 Sep 22 23:22:24.069 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46530 Sep 22 23:22:24.070 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46531 Sep 22 23:22:24.070 DEBG [0] It's time to notify for 506
46532 Sep 22 23:22:24.070 INFO Completion from [0] id:506 status:true
46533 Sep 22 23:22:24.070 INFO [507/752] Repair commands completed
46534 Sep 22 23:22:24.070 INFO Pop front: ReconcileIO { id: ReconciliationId(507), op: ExtentReopen { repair_id: ReconciliationId(507), extent_id: 185 }, state: ClientData([New, New, New]) }
46535 Sep 22 23:22:24.070 INFO Sent repair work, now wait for resp
46536 Sep 22 23:22:24.070 INFO [0] received reconcile message
46537 Sep 22 23:22:24.070 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(507), op: ExtentReopen { repair_id: ReconciliationId(507), extent_id: 185 }, state: ClientData([InProgress, New, New]) }, : downstairs
46538 Sep 22 23:22:24.070 INFO [0] client ExtentReopen { repair_id: ReconciliationId(507), extent_id: 185 }
46539 Sep 22 23:22:24.070 INFO [1] received reconcile message
46540 Sep 22 23:22:24.070 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(507), op: ExtentReopen { repair_id: ReconciliationId(507), extent_id: 185 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46541 Sep 22 23:22:24.070 INFO [1] client ExtentReopen { repair_id: ReconciliationId(507), extent_id: 185 }
46542 Sep 22 23:22:24.070 INFO [2] received reconcile message
46543 Sep 22 23:22:24.070 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(507), op: ExtentReopen { repair_id: ReconciliationId(507), extent_id: 185 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46544 Sep 22 23:22:24.070 INFO [2] client ExtentReopen { repair_id: ReconciliationId(507), extent_id: 185 }
46545 Sep 22 23:22:24.070 DEBG 507 Reopen extent 185
46546 Sep 22 23:22:24.071 DEBG 507 Reopen extent 185
46547 Sep 22 23:22:24.072 DEBG 507 Reopen extent 185
46548 Sep 22 23:22:24.072 DEBG [2] It's time to notify for 507
46549 Sep 22 23:22:24.072 INFO Completion from [2] id:507 status:true
46550 Sep 22 23:22:24.072 INFO [508/752] Repair commands completed
46551 Sep 22 23:22:24.072 INFO Pop front: ReconcileIO { id: ReconciliationId(508), op: ExtentFlush { repair_id: ReconciliationId(508), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46552 Sep 22 23:22:24.072 INFO Sent repair work, now wait for resp
46553 Sep 22 23:22:24.072 INFO [0] received reconcile message
46554 Sep 22 23:22:24.072 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(508), op: ExtentFlush { repair_id: ReconciliationId(508), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46555 Sep 22 23:22:24.072 INFO [0] client ExtentFlush { repair_id: ReconciliationId(508), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46556 Sep 22 23:22:24.072 INFO [1] received reconcile message
46557 Sep 22 23:22:24.072 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(508), op: ExtentFlush { repair_id: ReconciliationId(508), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46558 Sep 22 23:22:24.073 INFO [1] client ExtentFlush { repair_id: ReconciliationId(508), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46559 Sep 22 23:22:24.073 INFO [2] received reconcile message
46560 Sep 22 23:22:24.073 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(508), op: ExtentFlush { repair_id: ReconciliationId(508), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46561 Sep 22 23:22:24.073 INFO [2] client ExtentFlush { repair_id: ReconciliationId(508), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46562 Sep 22 23:22:24.073 DEBG 508 Flush extent 86 with f:2 g:2
46563 Sep 22 23:22:24.073 DEBG Flush just extent 86 with f:2 and g:2
46564 Sep 22 23:22:24.073 DEBG [1] It's time to notify for 508
46565 Sep 22 23:22:24.073 INFO Completion from [1] id:508 status:true
46566 Sep 22 23:22:24.073 INFO [509/752] Repair commands completed
46567 Sep 22 23:22:24.073 INFO Pop front: ReconcileIO { id: ReconciliationId(509), op: ExtentClose { repair_id: ReconciliationId(509), extent_id: 86 }, state: ClientData([New, New, New]) }
46568 Sep 22 23:22:24.073 INFO Sent repair work, now wait for resp
46569 Sep 22 23:22:24.073 INFO [0] received reconcile message
46570 Sep 22 23:22:24.073 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(509), op: ExtentClose { repair_id: ReconciliationId(509), extent_id: 86 }, state: ClientData([InProgress, New, New]) }, : downstairs
46571 Sep 22 23:22:24.073 INFO [0] client ExtentClose { repair_id: ReconciliationId(509), extent_id: 86 }
46572 Sep 22 23:22:24.073 INFO [1] received reconcile message
46573 Sep 22 23:22:24.073 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(509), op: ExtentClose { repair_id: ReconciliationId(509), extent_id: 86 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46574 Sep 22 23:22:24.073 INFO [1] client ExtentClose { repair_id: ReconciliationId(509), extent_id: 86 }
46575 Sep 22 23:22:24.073 INFO [2] received reconcile message
46576 Sep 22 23:22:24.073 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(509), op: ExtentClose { repair_id: ReconciliationId(509), extent_id: 86 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46577 Sep 22 23:22:24.073 INFO [2] client ExtentClose { repair_id: ReconciliationId(509), extent_id: 86 }
46578 Sep 22 23:22:24.073 DEBG 509 Close extent 86
46579 Sep 22 23:22:24.074 DEBG 509 Close extent 86
46580 Sep 22 23:22:24.074 DEBG 509 Close extent 86
46581 Sep 22 23:22:24.074 DEBG [2] It's time to notify for 509
46582 Sep 22 23:22:24.074 INFO Completion from [2] id:509 status:true
46583 Sep 22 23:22:24.074 INFO [510/752] Repair commands completed
46584 Sep 22 23:22:24.074 INFO Pop front: ReconcileIO { id: ReconciliationId(510), op: ExtentRepair { repair_id: ReconciliationId(510), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46585 Sep 22 23:22:24.074 INFO Sent repair work, now wait for resp
46586 Sep 22 23:22:24.074 INFO [0] received reconcile message
46587 Sep 22 23:22:24.074 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(510), op: ExtentRepair { repair_id: ReconciliationId(510), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46588 Sep 22 23:22:24.074 INFO [0] client ExtentRepair { repair_id: ReconciliationId(510), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46589 Sep 22 23:22:24.074 INFO [0] Sending repair request ReconciliationId(510)
46590 Sep 22 23:22:24.075 INFO [1] received reconcile message
46591 Sep 22 23:22:24.075 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(510), op: ExtentRepair { repair_id: ReconciliationId(510), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46592 Sep 22 23:22:24.075 INFO [1] client ExtentRepair { repair_id: ReconciliationId(510), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46593 Sep 22 23:22:24.075 INFO [1] No action required ReconciliationId(510)
46594 Sep 22 23:22:24.075 INFO [2] received reconcile message
46595 Sep 22 23:22:24.075 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(510), op: ExtentRepair { repair_id: ReconciliationId(510), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46596 Sep 22 23:22:24.075 INFO [2] client ExtentRepair { repair_id: ReconciliationId(510), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46597 Sep 22 23:22:24.075 INFO [2] No action required ReconciliationId(510)
46598 Sep 22 23:22:24.075 DEBG 510 Repair extent 86 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
46599 Sep 22 23:22:24.075 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/056.copy"
46600 Sep 22 23:22:24.138 INFO accepted connection, remote_addr: 127.0.0.1:41753, local_addr: 127.0.0.1:52864, task: repair
46601 Sep 22 23:22:24.139 TRCE incoming request, uri: /extent/86/files, method: GET, req_id: ebe66f86-cf9a-4324-9ba1-07a32bc89b64, remote_addr: 127.0.0.1:41753, local_addr: 127.0.0.1:52864, task: repair
46602 Sep 22 23:22:24.139 INFO request completed, latency_us: 207, response_code: 200, uri: /extent/86/files, method: GET, req_id: ebe66f86-cf9a-4324-9ba1-07a32bc89b64, remote_addr: 127.0.0.1:41753, local_addr: 127.0.0.1:52864, task: repair
46603 Sep 22 23:22:24.139 INFO eid:86 Found repair files: ["056", "056.db"]
46604 Sep 22 23:22:24.139 TRCE incoming request, uri: /newextent/86/data, method: GET, req_id: 77642855-a184-4b7d-b8c0-6cbb8ee3bd46, remote_addr: 127.0.0.1:41753, local_addr: 127.0.0.1:52864, task: repair
46605 Sep 22 23:22:24.140 INFO request completed, latency_us: 317, response_code: 200, uri: /newextent/86/data, method: GET, req_id: 77642855-a184-4b7d-b8c0-6cbb8ee3bd46, remote_addr: 127.0.0.1:41753, local_addr: 127.0.0.1:52864, task: repair
46606 Sep 22 23:22:24.145 TRCE incoming request, uri: /newextent/86/db, method: GET, req_id: 749d84a4-1e3c-42b8-8a13-e650609ea139, remote_addr: 127.0.0.1:41753, local_addr: 127.0.0.1:52864, task: repair
46607 Sep 22 23:22:24.145 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/86/db, method: GET, req_id: 749d84a4-1e3c-42b8-8a13-e650609ea139, remote_addr: 127.0.0.1:41753, local_addr: 127.0.0.1:52864, task: repair
46608 Sep 22 23:22:24.146 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/056.copy" to "/tmp/downstairs-zrMnlo6G/00/000/056.replace"
46609 Sep 22 23:22:24.146 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46610 Sep 22 23:22:24.147 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/056.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
46611 Sep 22 23:22:24.147 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/056"
46612 Sep 22 23:22:24.147 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/056.db"
46613 Sep 22 23:22:24.147 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46614 Sep 22 23:22:24.147 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/056.replace" to "/tmp/downstairs-zrMnlo6G/00/000/056.completed"
46615 Sep 22 23:22:24.147 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46616 Sep 22 23:22:24.147 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46617 Sep 22 23:22:24.147 DEBG [0] It's time to notify for 510
46618 Sep 22 23:22:24.148 INFO Completion from [0] id:510 status:true
46619 Sep 22 23:22:24.148 INFO [511/752] Repair commands completed
46620 Sep 22 23:22:24.148 INFO Pop front: ReconcileIO { id: ReconciliationId(511), op: ExtentReopen { repair_id: ReconciliationId(511), extent_id: 86 }, state: ClientData([New, New, New]) }
46621 Sep 22 23:22:24.148 INFO Sent repair work, now wait for resp
46622 Sep 22 23:22:24.148 INFO [0] received reconcile message
46623 Sep 22 23:22:24.148 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(511), op: ExtentReopen { repair_id: ReconciliationId(511), extent_id: 86 }, state: ClientData([InProgress, New, New]) }, : downstairs
46624 Sep 22 23:22:24.148 INFO [0] client ExtentReopen { repair_id: ReconciliationId(511), extent_id: 86 }
46625 Sep 22 23:22:24.148 INFO [1] received reconcile message
46626 Sep 22 23:22:24.148 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(511), op: ExtentReopen { repair_id: ReconciliationId(511), extent_id: 86 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46627 Sep 22 23:22:24.148 INFO [1] client ExtentReopen { repair_id: ReconciliationId(511), extent_id: 86 }
46628 Sep 22 23:22:24.148 INFO [2] received reconcile message
46629 Sep 22 23:22:24.148 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(511), op: ExtentReopen { repair_id: ReconciliationId(511), extent_id: 86 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46630 Sep 22 23:22:24.148 INFO [2] client ExtentReopen { repair_id: ReconciliationId(511), extent_id: 86 }
46631 Sep 22 23:22:24.148 DEBG 511 Reopen extent 86
46632 Sep 22 23:22:24.149 DEBG 511 Reopen extent 86
46633 Sep 22 23:22:24.149 DEBG 511 Reopen extent 86
46634 Sep 22 23:22:24.150 DEBG [2] It's time to notify for 511
46635 Sep 22 23:22:24.150 INFO Completion from [2] id:511 status:true
46636 Sep 22 23:22:24.150 INFO [512/752] Repair commands completed
46637 Sep 22 23:22:24.150 INFO Pop front: ReconcileIO { id: ReconciliationId(512), op: ExtentFlush { repair_id: ReconciliationId(512), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46638 Sep 22 23:22:24.150 INFO Sent repair work, now wait for resp
46639 Sep 22 23:22:24.150 INFO [0] received reconcile message
46640 Sep 22 23:22:24.150 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(512), op: ExtentFlush { repair_id: ReconciliationId(512), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46641 Sep 22 23:22:24.150 INFO [0] client ExtentFlush { repair_id: ReconciliationId(512), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46642 Sep 22 23:22:24.150 INFO [1] received reconcile message
46643 Sep 22 23:22:24.150 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(512), op: ExtentFlush { repair_id: ReconciliationId(512), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46644 Sep 22 23:22:24.150 INFO [1] client ExtentFlush { repair_id: ReconciliationId(512), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46645 Sep 22 23:22:24.150 INFO [2] received reconcile message
46646 Sep 22 23:22:24.150 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(512), op: ExtentFlush { repair_id: ReconciliationId(512), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46647 Sep 22 23:22:24.150 INFO [2] client ExtentFlush { repair_id: ReconciliationId(512), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46648 Sep 22 23:22:24.150 DEBG 512 Flush extent 135 with f:2 g:2
46649 Sep 22 23:22:24.150 DEBG Flush just extent 135 with f:2 and g:2
46650 Sep 22 23:22:24.150 DEBG [1] It's time to notify for 512
46651 Sep 22 23:22:24.150 INFO Completion from [1] id:512 status:true
46652 Sep 22 23:22:24.150 INFO [513/752] Repair commands completed
46653 Sep 22 23:22:24.150 INFO Pop front: ReconcileIO { id: ReconciliationId(513), op: ExtentClose { repair_id: ReconciliationId(513), extent_id: 135 }, state: ClientData([New, New, New]) }
46654 Sep 22 23:22:24.150 INFO Sent repair work, now wait for resp
46655 Sep 22 23:22:24.151 INFO [0] received reconcile message
46656 Sep 22 23:22:24.151 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(513), op: ExtentClose { repair_id: ReconciliationId(513), extent_id: 135 }, state: ClientData([InProgress, New, New]) }, : downstairs
46657 Sep 22 23:22:24.151 INFO [0] client ExtentClose { repair_id: ReconciliationId(513), extent_id: 135 }
46658 Sep 22 23:22:24.151 INFO [1] received reconcile message
46659 Sep 22 23:22:24.151 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(513), op: ExtentClose { repair_id: ReconciliationId(513), extent_id: 135 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46660 Sep 22 23:22:24.151 INFO [1] client ExtentClose { repair_id: ReconciliationId(513), extent_id: 135 }
46661 Sep 22 23:22:24.151 INFO [2] received reconcile message
46662 Sep 22 23:22:24.151 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(513), op: ExtentClose { repair_id: ReconciliationId(513), extent_id: 135 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46663 Sep 22 23:22:24.151 INFO [2] client ExtentClose { repair_id: ReconciliationId(513), extent_id: 135 }
46664 Sep 22 23:22:24.151 DEBG 513 Close extent 135
46665 Sep 22 23:22:24.151 DEBG 513 Close extent 135
46666 Sep 22 23:22:24.151 DEBG 513 Close extent 135
46667 Sep 22 23:22:24.152 DEBG [2] It's time to notify for 513
46668 Sep 22 23:22:24.152 INFO Completion from [2] id:513 status:true
46669 Sep 22 23:22:24.152 INFO [514/752] Repair commands completed
46670 Sep 22 23:22:24.152 INFO Pop front: ReconcileIO { id: ReconciliationId(514), op: ExtentRepair { repair_id: ReconciliationId(514), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46671 Sep 22 23:22:24.152 INFO Sent repair work, now wait for resp
46672 Sep 22 23:22:24.152 INFO [0] received reconcile message
46673 Sep 22 23:22:24.152 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(514), op: ExtentRepair { repair_id: ReconciliationId(514), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46674 Sep 22 23:22:24.152 INFO [0] client ExtentRepair { repair_id: ReconciliationId(514), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46675 Sep 22 23:22:24.152 INFO [0] Sending repair request ReconciliationId(514)
46676 Sep 22 23:22:24.152 INFO [1] received reconcile message
46677 Sep 22 23:22:24.152 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(514), op: ExtentRepair { repair_id: ReconciliationId(514), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46678 Sep 22 23:22:24.152 INFO [1] client ExtentRepair { repair_id: ReconciliationId(514), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46679 Sep 22 23:22:24.152 INFO [1] No action required ReconciliationId(514)
46680 Sep 22 23:22:24.152 INFO [2] received reconcile message
46681 Sep 22 23:22:24.152 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(514), op: ExtentRepair { repair_id: ReconciliationId(514), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46682 Sep 22 23:22:24.152 INFO [2] client ExtentRepair { repair_id: ReconciliationId(514), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46683 Sep 22 23:22:24.152 INFO [2] No action required ReconciliationId(514)
46684 Sep 22 23:22:24.152 DEBG 514 Repair extent 135 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
46685 Sep 22 23:22:24.152 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/087.copy"
46686 Sep 22 23:22:24.217 INFO accepted connection, remote_addr: 127.0.0.1:45769, local_addr: 127.0.0.1:52864, task: repair
46687 Sep 22 23:22:24.217 TRCE incoming request, uri: /extent/135/files, method: GET, req_id: 47f8941a-a98a-4e7b-844b-c2ace649dba0, remote_addr: 127.0.0.1:45769, local_addr: 127.0.0.1:52864, task: repair
46688 Sep 22 23:22:24.217 INFO request completed, latency_us: 220, response_code: 200, uri: /extent/135/files, method: GET, req_id: 47f8941a-a98a-4e7b-844b-c2ace649dba0, remote_addr: 127.0.0.1:45769, local_addr: 127.0.0.1:52864, task: repair
46689 Sep 22 23:22:24.217 INFO eid:135 Found repair files: ["087", "087.db"]
46690 Sep 22 23:22:24.218 TRCE incoming request, uri: /newextent/135/data, method: GET, req_id: 05ec7851-46bd-4448-8e7f-599ec16aec8e, remote_addr: 127.0.0.1:45769, local_addr: 127.0.0.1:52864, task: repair
46691 Sep 22 23:22:24.218 INFO request completed, latency_us: 266, response_code: 200, uri: /newextent/135/data, method: GET, req_id: 05ec7851-46bd-4448-8e7f-599ec16aec8e, remote_addr: 127.0.0.1:45769, local_addr: 127.0.0.1:52864, task: repair
46692 Sep 22 23:22:24.223 TRCE incoming request, uri: /newextent/135/db, method: GET, req_id: 89bb2f99-4483-456e-abf4-684cde383111, remote_addr: 127.0.0.1:45769, local_addr: 127.0.0.1:52864, task: repair
46693 Sep 22 23:22:24.223 INFO request completed, latency_us: 273, response_code: 200, uri: /newextent/135/db, method: GET, req_id: 89bb2f99-4483-456e-abf4-684cde383111, remote_addr: 127.0.0.1:45769, local_addr: 127.0.0.1:52864, task: repair
46694 Sep 22 23:22:24.225 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/087.copy" to "/tmp/downstairs-zrMnlo6G/00/000/087.replace"
46695 Sep 22 23:22:24.225 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46696 Sep 22 23:22:24.226 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/087.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
46697 Sep 22 23:22:24.226 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/087"
46698 Sep 22 23:22:24.226 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/087.db"
46699 Sep 22 23:22:24.226 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46700 Sep 22 23:22:24.226 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/087.replace" to "/tmp/downstairs-zrMnlo6G/00/000/087.completed"
46701 Sep 22 23:22:24.226 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46702 Sep 22 23:22:24.226 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46703 Sep 22 23:22:24.226 DEBG [0] It's time to notify for 514
46704 Sep 22 23:22:24.226 INFO Completion from [0] id:514 status:true
46705 Sep 22 23:22:24.226 INFO [515/752] Repair commands completed
46706 Sep 22 23:22:24.226 INFO Pop front: ReconcileIO { id: ReconciliationId(515), op: ExtentReopen { repair_id: ReconciliationId(515), extent_id: 135 }, state: ClientData([New, New, New]) }
46707 Sep 22 23:22:24.227 INFO Sent repair work, now wait for resp
46708 Sep 22 23:22:24.227 INFO [0] received reconcile message
46709 Sep 22 23:22:24.227 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(515), op: ExtentReopen { repair_id: ReconciliationId(515), extent_id: 135 }, state: ClientData([InProgress, New, New]) }, : downstairs
46710 Sep 22 23:22:24.227 INFO [0] client ExtentReopen { repair_id: ReconciliationId(515), extent_id: 135 }
46711 Sep 22 23:22:24.227 INFO [1] received reconcile message
46712 Sep 22 23:22:24.227 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(515), op: ExtentReopen { repair_id: ReconciliationId(515), extent_id: 135 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46713 Sep 22 23:22:24.227 INFO [1] client ExtentReopen { repair_id: ReconciliationId(515), extent_id: 135 }
46714 Sep 22 23:22:24.227 INFO [2] received reconcile message
46715 Sep 22 23:22:24.227 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(515), op: ExtentReopen { repair_id: ReconciliationId(515), extent_id: 135 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46716 Sep 22 23:22:24.227 INFO [2] client ExtentReopen { repair_id: ReconciliationId(515), extent_id: 135 }
46717 Sep 22 23:22:24.227 DEBG 515 Reopen extent 135
46718 Sep 22 23:22:24.228 DEBG 515 Reopen extent 135
46719 Sep 22 23:22:24.228 DEBG 515 Reopen extent 135
46720 Sep 22 23:22:24.229 DEBG [2] It's time to notify for 515
46721 Sep 22 23:22:24.229 INFO Completion from [2] id:515 status:true
46722 Sep 22 23:22:24.229 INFO [516/752] Repair commands completed
46723 Sep 22 23:22:24.229 INFO Pop front: ReconcileIO { id: ReconciliationId(516), op: ExtentFlush { repair_id: ReconciliationId(516), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46724 Sep 22 23:22:24.229 INFO Sent repair work, now wait for resp
46725 Sep 22 23:22:24.229 INFO [0] received reconcile message
46726 Sep 22 23:22:24.229 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(516), op: ExtentFlush { repair_id: ReconciliationId(516), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46727 Sep 22 23:22:24.229 INFO [0] client ExtentFlush { repair_id: ReconciliationId(516), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46728 Sep 22 23:22:24.229 INFO [1] received reconcile message
46729 Sep 22 23:22:24.229 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(516), op: ExtentFlush { repair_id: ReconciliationId(516), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46730 Sep 22 23:22:24.229 INFO [1] client ExtentFlush { repair_id: ReconciliationId(516), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46731 Sep 22 23:22:24.229 INFO [2] received reconcile message
46732 Sep 22 23:22:24.229 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(516), op: ExtentFlush { repair_id: ReconciliationId(516), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46733 Sep 22 23:22:24.229 INFO [2] client ExtentFlush { repair_id: ReconciliationId(516), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46734 Sep 22 23:22:24.229 DEBG 516 Flush extent 13 with f:2 g:2
46735 Sep 22 23:22:24.229 DEBG Flush just extent 13 with f:2 and g:2
46736 Sep 22 23:22:24.229 DEBG [1] It's time to notify for 516
46737 Sep 22 23:22:24.229 INFO Completion from [1] id:516 status:true
46738 Sep 22 23:22:24.229 INFO [517/752] Repair commands completed
46739 Sep 22 23:22:24.229 INFO Pop front: ReconcileIO { id: ReconciliationId(517), op: ExtentClose { repair_id: ReconciliationId(517), extent_id: 13 }, state: ClientData([New, New, New]) }
46740 Sep 22 23:22:24.229 INFO Sent repair work, now wait for resp
46741 Sep 22 23:22:24.229 INFO [0] received reconcile message
46742 Sep 22 23:22:24.229 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(517), op: ExtentClose { repair_id: ReconciliationId(517), extent_id: 13 }, state: ClientData([InProgress, New, New]) }, : downstairs
46743 Sep 22 23:22:24.229 INFO [0] client ExtentClose { repair_id: ReconciliationId(517), extent_id: 13 }
46744 Sep 22 23:22:24.230 INFO [1] received reconcile message
46745 Sep 22 23:22:24.230 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(517), op: ExtentClose { repair_id: ReconciliationId(517), extent_id: 13 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46746 Sep 22 23:22:24.230 INFO [1] client ExtentClose { repair_id: ReconciliationId(517), extent_id: 13 }
46747 Sep 22 23:22:24.230 INFO [2] received reconcile message
46748 Sep 22 23:22:24.230 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(517), op: ExtentClose { repair_id: ReconciliationId(517), extent_id: 13 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46749 Sep 22 23:22:24.230 INFO [2] client ExtentClose { repair_id: ReconciliationId(517), extent_id: 13 }
46750 Sep 22 23:22:24.230 DEBG 517 Close extent 13
46751 Sep 22 23:22:24.230 DEBG 517 Close extent 13
46752 Sep 22 23:22:24.230 DEBG 517 Close extent 13
46753 Sep 22 23:22:24.231 DEBG [2] It's time to notify for 517
46754 Sep 22 23:22:24.231 INFO Completion from [2] id:517 status:true
46755 Sep 22 23:22:24.231 INFO [518/752] Repair commands completed
46756 Sep 22 23:22:24.231 INFO Pop front: ReconcileIO { id: ReconciliationId(518), op: ExtentRepair { repair_id: ReconciliationId(518), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46757 Sep 22 23:22:24.231 INFO Sent repair work, now wait for resp
46758 Sep 22 23:22:24.231 INFO [0] received reconcile message
46759 Sep 22 23:22:24.231 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(518), op: ExtentRepair { repair_id: ReconciliationId(518), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46760 Sep 22 23:22:24.231 INFO [0] client ExtentRepair { repair_id: ReconciliationId(518), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46761 Sep 22 23:22:24.231 INFO [0] Sending repair request ReconciliationId(518)
46762 Sep 22 23:22:24.231 INFO [1] received reconcile message
46763 Sep 22 23:22:24.231 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(518), op: ExtentRepair { repair_id: ReconciliationId(518), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46764 Sep 22 23:22:24.231 INFO [1] client ExtentRepair { repair_id: ReconciliationId(518), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46765 Sep 22 23:22:24.231 INFO [1] No action required ReconciliationId(518)
46766 Sep 22 23:22:24.231 INFO [2] received reconcile message
46767 Sep 22 23:22:24.231 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(518), op: ExtentRepair { repair_id: ReconciliationId(518), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46768 Sep 22 23:22:24.231 INFO [2] client ExtentRepair { repair_id: ReconciliationId(518), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46769 Sep 22 23:22:24.231 INFO [2] No action required ReconciliationId(518)
46770 Sep 22 23:22:24.231 DEBG 518 Repair extent 13 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
46771 Sep 22 23:22:24.231 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/00D.copy"
46772 Sep 22 23:22:24.294 INFO accepted connection, remote_addr: 127.0.0.1:41702, local_addr: 127.0.0.1:52864, task: repair
46773 Sep 22 23:22:24.294 TRCE incoming request, uri: /extent/13/files, method: GET, req_id: 66e50e4a-25af-4e93-9e9c-693963a83856, remote_addr: 127.0.0.1:41702, local_addr: 127.0.0.1:52864, task: repair
46774 Sep 22 23:22:24.294 INFO request completed, latency_us: 212, response_code: 200, uri: /extent/13/files, method: GET, req_id: 66e50e4a-25af-4e93-9e9c-693963a83856, remote_addr: 127.0.0.1:41702, local_addr: 127.0.0.1:52864, task: repair
46775 Sep 22 23:22:24.294 INFO eid:13 Found repair files: ["00D", "00D.db"]
46776 Sep 22 23:22:24.295 TRCE incoming request, uri: /newextent/13/data, method: GET, req_id: 69b94a42-ffb2-4a3c-a4da-842d66aa8f9b, remote_addr: 127.0.0.1:41702, local_addr: 127.0.0.1:52864, task: repair
46777 Sep 22 23:22:24.295 INFO request completed, latency_us: 270, response_code: 200, uri: /newextent/13/data, method: GET, req_id: 69b94a42-ffb2-4a3c-a4da-842d66aa8f9b, remote_addr: 127.0.0.1:41702, local_addr: 127.0.0.1:52864, task: repair
46778 Sep 22 23:22:24.300 TRCE incoming request, uri: /newextent/13/db, method: GET, req_id: 733f1b24-65b4-4712-a39d-e45b8a863647, remote_addr: 127.0.0.1:41702, local_addr: 127.0.0.1:52864, task: repair
46779 Sep 22 23:22:24.300 INFO request completed, latency_us: 302, response_code: 200, uri: /newextent/13/db, method: GET, req_id: 733f1b24-65b4-4712-a39d-e45b8a863647, remote_addr: 127.0.0.1:41702, local_addr: 127.0.0.1:52864, task: repair
46780 Sep 22 23:22:24.301 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/00D.copy" to "/tmp/downstairs-zrMnlo6G/00/000/00D.replace"
46781 Sep 22 23:22:24.301 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46782 Sep 22 23:22:24.302 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/00D.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
46783 Sep 22 23:22:24.302 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/00D"
46784 Sep 22 23:22:24.302 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/00D.db"
46785 Sep 22 23:22:24.302 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46786 Sep 22 23:22:24.302 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/00D.replace" to "/tmp/downstairs-zrMnlo6G/00/000/00D.completed"
46787 Sep 22 23:22:24.302 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46788 Sep 22 23:22:24.302 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46789 Sep 22 23:22:24.302 DEBG [0] It's time to notify for 518
46790 Sep 22 23:22:24.303 INFO Completion from [0] id:518 status:true
46791 Sep 22 23:22:24.303 INFO [519/752] Repair commands completed
46792 Sep 22 23:22:24.303 INFO Pop front: ReconcileIO { id: ReconciliationId(519), op: ExtentReopen { repair_id: ReconciliationId(519), extent_id: 13 }, state: ClientData([New, New, New]) }
46793 Sep 22 23:22:24.303 INFO Sent repair work, now wait for resp
46794 Sep 22 23:22:24.303 INFO [0] received reconcile message
46795 Sep 22 23:22:24.303 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(519), op: ExtentReopen { repair_id: ReconciliationId(519), extent_id: 13 }, state: ClientData([InProgress, New, New]) }, : downstairs
46796 Sep 22 23:22:24.303 INFO [0] client ExtentReopen { repair_id: ReconciliationId(519), extent_id: 13 }
46797 Sep 22 23:22:24.303 INFO [1] received reconcile message
46798 Sep 22 23:22:24.303 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(519), op: ExtentReopen { repair_id: ReconciliationId(519), extent_id: 13 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46799 Sep 22 23:22:24.303 INFO [1] client ExtentReopen { repair_id: ReconciliationId(519), extent_id: 13 }
46800 Sep 22 23:22:24.303 INFO [2] received reconcile message
46801 Sep 22 23:22:24.303 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(519), op: ExtentReopen { repair_id: ReconciliationId(519), extent_id: 13 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46802 Sep 22 23:22:24.303 INFO [2] client ExtentReopen { repair_id: ReconciliationId(519), extent_id: 13 }
46803 Sep 22 23:22:24.303 DEBG 519 Reopen extent 13
46804 Sep 22 23:22:24.304 DEBG 519 Reopen extent 13
46805 Sep 22 23:22:24.304 DEBG 519 Reopen extent 13
46806 Sep 22 23:22:24.305 DEBG [2] It's time to notify for 519
46807 Sep 22 23:22:24.305 INFO Completion from [2] id:519 status:true
46808 Sep 22 23:22:24.305 INFO [520/752] Repair commands completed
46809 Sep 22 23:22:24.305 INFO Pop front: ReconcileIO { id: ReconciliationId(520), op: ExtentFlush { repair_id: ReconciliationId(520), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46810 Sep 22 23:22:24.305 INFO Sent repair work, now wait for resp
46811 Sep 22 23:22:24.305 INFO [0] received reconcile message
46812 Sep 22 23:22:24.305 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(520), op: ExtentFlush { repair_id: ReconciliationId(520), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46813 Sep 22 23:22:24.305 INFO [0] client ExtentFlush { repair_id: ReconciliationId(520), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46814 Sep 22 23:22:24.305 INFO [1] received reconcile message
46815 Sep 22 23:22:24.305 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(520), op: ExtentFlush { repair_id: ReconciliationId(520), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46816 Sep 22 23:22:24.305 INFO [1] client ExtentFlush { repair_id: ReconciliationId(520), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46817 Sep 22 23:22:24.305 INFO [2] received reconcile message
46818 Sep 22 23:22:24.305 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(520), op: ExtentFlush { repair_id: ReconciliationId(520), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46819 Sep 22 23:22:24.305 INFO [2] client ExtentFlush { repair_id: ReconciliationId(520), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46820 Sep 22 23:22:24.305 DEBG 520 Flush extent 29 with f:2 g:2
46821 Sep 22 23:22:24.305 DEBG Flush just extent 29 with f:2 and g:2
46822 Sep 22 23:22:24.305 DEBG [1] It's time to notify for 520
46823 Sep 22 23:22:24.305 INFO Completion from [1] id:520 status:true
46824 Sep 22 23:22:24.306 INFO [521/752] Repair commands completed
46825 Sep 22 23:22:24.306 INFO Pop front: ReconcileIO { id: ReconciliationId(521), op: ExtentClose { repair_id: ReconciliationId(521), extent_id: 29 }, state: ClientData([New, New, New]) }
46826 Sep 22 23:22:24.306 INFO Sent repair work, now wait for resp
46827 Sep 22 23:22:24.306 INFO [0] received reconcile message
46828 Sep 22 23:22:24.306 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(521), op: ExtentClose { repair_id: ReconciliationId(521), extent_id: 29 }, state: ClientData([InProgress, New, New]) }, : downstairs
46829 Sep 22 23:22:24.306 INFO [0] client ExtentClose { repair_id: ReconciliationId(521), extent_id: 29 }
46830 Sep 22 23:22:24.306 INFO [1] received reconcile message
46831 Sep 22 23:22:24.306 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(521), op: ExtentClose { repair_id: ReconciliationId(521), extent_id: 29 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46832 Sep 22 23:22:24.306 INFO [1] client ExtentClose { repair_id: ReconciliationId(521), extent_id: 29 }
46833 Sep 22 23:22:24.306 INFO [2] received reconcile message
46834 Sep 22 23:22:24.306 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(521), op: ExtentClose { repair_id: ReconciliationId(521), extent_id: 29 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46835 Sep 22 23:22:24.306 INFO [2] client ExtentClose { repair_id: ReconciliationId(521), extent_id: 29 }
46836 Sep 22 23:22:24.306 DEBG 521 Close extent 29
46837 Sep 22 23:22:24.306 DEBG 521 Close extent 29
46838 Sep 22 23:22:24.306 DEBG 521 Close extent 29
46839 Sep 22 23:22:24.307 DEBG [2] It's time to notify for 521
46840 Sep 22 23:22:24.307 INFO Completion from [2] id:521 status:true
46841 Sep 22 23:22:24.307 INFO [522/752] Repair commands completed
46842 Sep 22 23:22:24.307 INFO Pop front: ReconcileIO { id: ReconciliationId(522), op: ExtentRepair { repair_id: ReconciliationId(522), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46843 Sep 22 23:22:24.307 INFO Sent repair work, now wait for resp
46844 Sep 22 23:22:24.307 INFO [0] received reconcile message
46845 Sep 22 23:22:24.307 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(522), op: ExtentRepair { repair_id: ReconciliationId(522), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46846 Sep 22 23:22:24.307 INFO [0] client ExtentRepair { repair_id: ReconciliationId(522), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46847 Sep 22 23:22:24.307 INFO [0] Sending repair request ReconciliationId(522)
46848 Sep 22 23:22:24.307 INFO [1] received reconcile message
46849 Sep 22 23:22:24.307 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(522), op: ExtentRepair { repair_id: ReconciliationId(522), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46850 Sep 22 23:22:24.307 INFO [1] client ExtentRepair { repair_id: ReconciliationId(522), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46851 Sep 22 23:22:24.307 INFO [1] No action required ReconciliationId(522)
46852 Sep 22 23:22:24.307 INFO [2] received reconcile message
46853 Sep 22 23:22:24.307 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(522), op: ExtentRepair { repair_id: ReconciliationId(522), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46854 Sep 22 23:22:24.307 INFO [2] client ExtentRepair { repair_id: ReconciliationId(522), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46855 Sep 22 23:22:24.307 INFO [2] No action required ReconciliationId(522)
46856 Sep 22 23:22:24.307 DEBG 522 Repair extent 29 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
46857 Sep 22 23:22:24.307 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/01D.copy"
46858 Sep 22 23:22:24.314 ERRO [2] job id 1085 saw error GenericError("test error")
46859 Sep 22 23:22:24.314 ERRO [2] job id 1085 saw error GenericError("test error")
46860 Sep 22 23:22:24.320 DEBG up_ds_listen was notified
46861 Sep 22 23:22:24.320 DEBG up_ds_listen process 1088
46862 Sep 22 23:22:24.320 DEBG [A] ack job 1088:89, : downstairs
46863 Sep 22 23:22:24.320 DEBG up_ds_listen checked 1 jobs, back to waiting
46864 Sep 22 23:22:24.327 DEBG IO Flush 1090 has deps [JobId(1089), JobId(1088)]
46865 Sep 22 23:22:24.329 INFO [lossy] sleeping 1 second
46866 Sep 22 23:22:24.370 INFO accepted connection, remote_addr: 127.0.0.1:58044, local_addr: 127.0.0.1:52864, task: repair
46867 Sep 22 23:22:24.370 TRCE incoming request, uri: /extent/29/files, method: GET, req_id: 5b24ac0d-6762-4c06-bdbc-9c46d0044fe1, remote_addr: 127.0.0.1:58044, local_addr: 127.0.0.1:52864, task: repair
46868 Sep 22 23:22:24.370 INFO request completed, latency_us: 279, response_code: 200, uri: /extent/29/files, method: GET, req_id: 5b24ac0d-6762-4c06-bdbc-9c46d0044fe1, remote_addr: 127.0.0.1:58044, local_addr: 127.0.0.1:52864, task: repair
46869 Sep 22 23:22:24.371 INFO eid:29 Found repair files: ["01D", "01D.db"]
46870 Sep 22 23:22:24.371 TRCE incoming request, uri: /newextent/29/data, method: GET, req_id: 6ea349d4-e9b2-406f-ae0b-65c5cfde5944, remote_addr: 127.0.0.1:58044, local_addr: 127.0.0.1:52864, task: repair
46871 Sep 22 23:22:24.371 INFO request completed, latency_us: 376, response_code: 200, uri: /newextent/29/data, method: GET, req_id: 6ea349d4-e9b2-406f-ae0b-65c5cfde5944, remote_addr: 127.0.0.1:58044, local_addr: 127.0.0.1:52864, task: repair
46872 Sep 22 23:22:24.377 TRCE incoming request, uri: /newextent/29/db, method: GET, req_id: 9833405e-3a0c-4085-8337-831aa02485cf, remote_addr: 127.0.0.1:58044, local_addr: 127.0.0.1:52864, task: repair
46873 Sep 22 23:22:24.377 INFO request completed, latency_us: 302, response_code: 200, uri: /newextent/29/db, method: GET, req_id: 9833405e-3a0c-4085-8337-831aa02485cf, remote_addr: 127.0.0.1:58044, local_addr: 127.0.0.1:52864, task: repair
46874 Sep 22 23:22:24.378 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/01D.copy" to "/tmp/downstairs-zrMnlo6G/00/000/01D.replace"
46875 Sep 22 23:22:24.378 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46876 Sep 22 23:22:24.380 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/01D.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
46877 Sep 22 23:22:24.380 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/01D"
46878 Sep 22 23:22:24.380 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/01D.db"
46879 Sep 22 23:22:24.380 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46880 Sep 22 23:22:24.380 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/01D.replace" to "/tmp/downstairs-zrMnlo6G/00/000/01D.completed"
46881 Sep 22 23:22:24.380 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46882 Sep 22 23:22:24.380 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46883 Sep 22 23:22:24.380 DEBG [0] It's time to notify for 522
46884 Sep 22 23:22:24.380 INFO Completion from [0] id:522 status:true
46885 Sep 22 23:22:24.381 INFO [523/752] Repair commands completed
46886 Sep 22 23:22:24.381 INFO Pop front: ReconcileIO { id: ReconciliationId(523), op: ExtentReopen { repair_id: ReconciliationId(523), extent_id: 29 }, state: ClientData([New, New, New]) }
46887 Sep 22 23:22:24.381 INFO Sent repair work, now wait for resp
46888 Sep 22 23:22:24.381 INFO [0] received reconcile message
46889 Sep 22 23:22:24.381 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(523), op: ExtentReopen { repair_id: ReconciliationId(523), extent_id: 29 }, state: ClientData([InProgress, New, New]) }, : downstairs
46890 Sep 22 23:22:24.381 INFO [0] client ExtentReopen { repair_id: ReconciliationId(523), extent_id: 29 }
46891 Sep 22 23:22:24.381 INFO [1] received reconcile message
46892 Sep 22 23:22:24.381 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(523), op: ExtentReopen { repair_id: ReconciliationId(523), extent_id: 29 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46893 Sep 22 23:22:24.381 INFO [1] client ExtentReopen { repair_id: ReconciliationId(523), extent_id: 29 }
46894 Sep 22 23:22:24.381 INFO [2] received reconcile message
46895 Sep 22 23:22:24.381 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(523), op: ExtentReopen { repair_id: ReconciliationId(523), extent_id: 29 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46896 Sep 22 23:22:24.381 INFO [2] client ExtentReopen { repair_id: ReconciliationId(523), extent_id: 29 }
46897 Sep 22 23:22:24.381 DEBG 523 Reopen extent 29
46898 Sep 22 23:22:24.382 DEBG 523 Reopen extent 29
46899 Sep 22 23:22:24.382 DEBG 523 Reopen extent 29
46900 Sep 22 23:22:24.383 DEBG [2] It's time to notify for 523
46901 Sep 22 23:22:24.383 INFO Completion from [2] id:523 status:true
46902 Sep 22 23:22:24.383 INFO [524/752] Repair commands completed
46903 Sep 22 23:22:24.383 INFO Pop front: ReconcileIO { id: ReconciliationId(524), op: ExtentFlush { repair_id: ReconciliationId(524), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46904 Sep 22 23:22:24.383 INFO Sent repair work, now wait for resp
46905 Sep 22 23:22:24.383 INFO [0] received reconcile message
46906 Sep 22 23:22:24.383 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(524), op: ExtentFlush { repair_id: ReconciliationId(524), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46907 Sep 22 23:22:24.383 INFO [0] client ExtentFlush { repair_id: ReconciliationId(524), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46908 Sep 22 23:22:24.383 INFO [1] received reconcile message
46909 Sep 22 23:22:24.383 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(524), op: ExtentFlush { repair_id: ReconciliationId(524), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46910 Sep 22 23:22:24.383 INFO [1] client ExtentFlush { repair_id: ReconciliationId(524), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46911 Sep 22 23:22:24.383 INFO [2] received reconcile message
46912 Sep 22 23:22:24.383 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(524), op: ExtentFlush { repair_id: ReconciliationId(524), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46913 Sep 22 23:22:24.383 INFO [2] client ExtentFlush { repair_id: ReconciliationId(524), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46914 Sep 22 23:22:24.383 DEBG 524 Flush extent 27 with f:2 g:2
46915 Sep 22 23:22:24.383 DEBG Flush just extent 27 with f:2 and g:2
46916 Sep 22 23:22:24.384 DEBG [1] It's time to notify for 524
46917 Sep 22 23:22:24.384 INFO Completion from [1] id:524 status:true
46918 Sep 22 23:22:24.384 INFO [525/752] Repair commands completed
46919 Sep 22 23:22:24.384 INFO Pop front: ReconcileIO { id: ReconciliationId(525), op: ExtentClose { repair_id: ReconciliationId(525), extent_id: 27 }, state: ClientData([New, New, New]) }
46920 Sep 22 23:22:24.384 INFO Sent repair work, now wait for resp
46921 Sep 22 23:22:24.384 INFO [0] received reconcile message
46922 Sep 22 23:22:24.384 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(525), op: ExtentClose { repair_id: ReconciliationId(525), extent_id: 27 }, state: ClientData([InProgress, New, New]) }, : downstairs
46923 Sep 22 23:22:24.384 INFO [0] client ExtentClose { repair_id: ReconciliationId(525), extent_id: 27 }
46924 Sep 22 23:22:24.384 INFO [1] received reconcile message
46925 Sep 22 23:22:24.384 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(525), op: ExtentClose { repair_id: ReconciliationId(525), extent_id: 27 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46926 Sep 22 23:22:24.384 INFO [1] client ExtentClose { repair_id: ReconciliationId(525), extent_id: 27 }
46927 Sep 22 23:22:24.384 INFO [2] received reconcile message
46928 Sep 22 23:22:24.384 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(525), op: ExtentClose { repair_id: ReconciliationId(525), extent_id: 27 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46929 Sep 22 23:22:24.384 INFO [2] client ExtentClose { repair_id: ReconciliationId(525), extent_id: 27 }
46930 Sep 22 23:22:24.384 DEBG 525 Close extent 27
46931 Sep 22 23:22:24.384 DEBG 525 Close extent 27
46932 Sep 22 23:22:24.385 DEBG 525 Close extent 27
46933 Sep 22 23:22:24.385 DEBG [2] It's time to notify for 525
46934 Sep 22 23:22:24.385 INFO Completion from [2] id:525 status:true
46935 Sep 22 23:22:24.385 INFO [526/752] Repair commands completed
46936 Sep 22 23:22:24.385 INFO Pop front: ReconcileIO { id: ReconciliationId(526), op: ExtentRepair { repair_id: ReconciliationId(526), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46937 Sep 22 23:22:24.385 INFO Sent repair work, now wait for resp
46938 Sep 22 23:22:24.385 INFO [0] received reconcile message
46939 Sep 22 23:22:24.385 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(526), op: ExtentRepair { repair_id: ReconciliationId(526), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46940 Sep 22 23:22:24.385 INFO [0] client ExtentRepair { repair_id: ReconciliationId(526), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46941 Sep 22 23:22:24.385 INFO [0] Sending repair request ReconciliationId(526)
46942 Sep 22 23:22:24.385 INFO [1] received reconcile message
46943 Sep 22 23:22:24.385 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(526), op: ExtentRepair { repair_id: ReconciliationId(526), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46944 Sep 22 23:22:24.385 INFO [1] client ExtentRepair { repair_id: ReconciliationId(526), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46945 Sep 22 23:22:24.385 INFO [1] No action required ReconciliationId(526)
46946 Sep 22 23:22:24.385 INFO [2] received reconcile message
46947 Sep 22 23:22:24.385 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(526), op: ExtentRepair { repair_id: ReconciliationId(526), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46948 Sep 22 23:22:24.385 INFO [2] client ExtentRepair { repair_id: ReconciliationId(526), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
46949 Sep 22 23:22:24.385 INFO [2] No action required ReconciliationId(526)
46950 Sep 22 23:22:24.386 DEBG 526 Repair extent 27 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
46951 Sep 22 23:22:24.386 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/01B.copy"
46952 Sep 22 23:22:24.451 INFO accepted connection, remote_addr: 127.0.0.1:40549, local_addr: 127.0.0.1:52864, task: repair
46953 Sep 22 23:22:24.451 TRCE incoming request, uri: /extent/27/files, method: GET, req_id: 0bbb56be-851b-4d16-83b4-6920e2d4d8e9, remote_addr: 127.0.0.1:40549, local_addr: 127.0.0.1:52864, task: repair
46954 Sep 22 23:22:24.451 INFO request completed, latency_us: 280, response_code: 200, uri: /extent/27/files, method: GET, req_id: 0bbb56be-851b-4d16-83b4-6920e2d4d8e9, remote_addr: 127.0.0.1:40549, local_addr: 127.0.0.1:52864, task: repair
46955 Sep 22 23:22:24.451 INFO eid:27 Found repair files: ["01B", "01B.db"]
46956 Sep 22 23:22:24.452 TRCE incoming request, uri: /newextent/27/data, method: GET, req_id: 4675541f-33ad-4c07-b124-2ea7596493ba, remote_addr: 127.0.0.1:40549, local_addr: 127.0.0.1:52864, task: repair
46957 Sep 22 23:22:24.452 INFO request completed, latency_us: 371, response_code: 200, uri: /newextent/27/data, method: GET, req_id: 4675541f-33ad-4c07-b124-2ea7596493ba, remote_addr: 127.0.0.1:40549, local_addr: 127.0.0.1:52864, task: repair
46958 Sep 22 23:22:24.457 TRCE incoming request, uri: /newextent/27/db, method: GET, req_id: dd697937-a181-4f85-93cf-a1ae2707c48f, remote_addr: 127.0.0.1:40549, local_addr: 127.0.0.1:52864, task: repair
46959 Sep 22 23:22:24.458 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/27/db, method: GET, req_id: dd697937-a181-4f85-93cf-a1ae2707c48f, remote_addr: 127.0.0.1:40549, local_addr: 127.0.0.1:52864, task: repair
46960 Sep 22 23:22:24.459 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/01B.copy" to "/tmp/downstairs-zrMnlo6G/00/000/01B.replace"
46961 Sep 22 23:22:24.459 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46962 Sep 22 23:22:24.460 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/01B.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
46963 Sep 22 23:22:24.460 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/01B"
46964 Sep 22 23:22:24.460 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/01B.db"
46965 Sep 22 23:22:24.460 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46966 Sep 22 23:22:24.460 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/01B.replace" to "/tmp/downstairs-zrMnlo6G/00/000/01B.completed"
46967 Sep 22 23:22:24.460 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46968 Sep 22 23:22:24.460 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
46969 Sep 22 23:22:24.461 DEBG [0] It's time to notify for 526
46970 Sep 22 23:22:24.461 INFO Completion from [0] id:526 status:true
46971 Sep 22 23:22:24.461 INFO [527/752] Repair commands completed
46972 Sep 22 23:22:24.461 INFO Pop front: ReconcileIO { id: ReconciliationId(527), op: ExtentReopen { repair_id: ReconciliationId(527), extent_id: 27 }, state: ClientData([New, New, New]) }
46973 Sep 22 23:22:24.461 INFO Sent repair work, now wait for resp
46974 Sep 22 23:22:24.461 INFO [0] received reconcile message
46975 Sep 22 23:22:24.461 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(527), op: ExtentReopen { repair_id: ReconciliationId(527), extent_id: 27 }, state: ClientData([InProgress, New, New]) }, : downstairs
46976 Sep 22 23:22:24.461 INFO [0] client ExtentReopen { repair_id: ReconciliationId(527), extent_id: 27 }
46977 Sep 22 23:22:24.461 INFO [1] received reconcile message
46978 Sep 22 23:22:24.461 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(527), op: ExtentReopen { repair_id: ReconciliationId(527), extent_id: 27 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46979 Sep 22 23:22:24.461 INFO [1] client ExtentReopen { repair_id: ReconciliationId(527), extent_id: 27 }
46980 Sep 22 23:22:24.461 INFO [2] received reconcile message
46981 Sep 22 23:22:24.461 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(527), op: ExtentReopen { repair_id: ReconciliationId(527), extent_id: 27 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46982 Sep 22 23:22:24.461 INFO [2] client ExtentReopen { repair_id: ReconciliationId(527), extent_id: 27 }
46983 Sep 22 23:22:24.461 DEBG 527 Reopen extent 27
46984 Sep 22 23:22:24.462 DEBG 527 Reopen extent 27
46985 Sep 22 23:22:24.463 DEBG 527 Reopen extent 27
46986 Sep 22 23:22:24.463 DEBG [2] It's time to notify for 527
46987 Sep 22 23:22:24.463 INFO Completion from [2] id:527 status:true
46988 Sep 22 23:22:24.463 INFO [528/752] Repair commands completed
46989 Sep 22 23:22:24.463 INFO Pop front: ReconcileIO { id: ReconciliationId(528), op: ExtentFlush { repair_id: ReconciliationId(528), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46990 Sep 22 23:22:24.463 INFO Sent repair work, now wait for resp
46991 Sep 22 23:22:24.463 INFO [0] received reconcile message
46992 Sep 22 23:22:24.463 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(528), op: ExtentFlush { repair_id: ReconciliationId(528), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46993 Sep 22 23:22:24.463 INFO [0] client ExtentFlush { repair_id: ReconciliationId(528), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46994 Sep 22 23:22:24.464 INFO [1] received reconcile message
46995 Sep 22 23:22:24.464 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(528), op: ExtentFlush { repair_id: ReconciliationId(528), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46996 Sep 22 23:22:24.464 INFO [1] client ExtentFlush { repair_id: ReconciliationId(528), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46997 Sep 22 23:22:24.464 INFO [2] received reconcile message
46998 Sep 22 23:22:24.464 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(528), op: ExtentFlush { repair_id: ReconciliationId(528), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46999 Sep 22 23:22:24.464 INFO [2] client ExtentFlush { repair_id: ReconciliationId(528), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47000 Sep 22 23:22:24.464 DEBG 528 Flush extent 187 with f:2 g:2
47001 Sep 22 23:22:24.464 DEBG Flush just extent 187 with f:2 and g:2
47002 Sep 22 23:22:24.464 DEBG [1] It's time to notify for 528
47003 Sep 22 23:22:24.464 INFO Completion from [1] id:528 status:true
47004 Sep 22 23:22:24.464 INFO [529/752] Repair commands completed
47005 Sep 22 23:22:24.464 INFO Pop front: ReconcileIO { id: ReconciliationId(529), op: ExtentClose { repair_id: ReconciliationId(529), extent_id: 187 }, state: ClientData([New, New, New]) }
47006 Sep 22 23:22:24.464 INFO Sent repair work, now wait for resp
47007 Sep 22 23:22:24.464 INFO [0] received reconcile message
47008 Sep 22 23:22:24.464 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(529), op: ExtentClose { repair_id: ReconciliationId(529), extent_id: 187 }, state: ClientData([InProgress, New, New]) }, : downstairs
47009 Sep 22 23:22:24.464 INFO [0] client ExtentClose { repair_id: ReconciliationId(529), extent_id: 187 }
47010 Sep 22 23:22:24.464 INFO [1] received reconcile message
47011 Sep 22 23:22:24.464 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(529), op: ExtentClose { repair_id: ReconciliationId(529), extent_id: 187 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47012 Sep 22 23:22:24.464 INFO [1] client ExtentClose { repair_id: ReconciliationId(529), extent_id: 187 }
47013 Sep 22 23:22:24.464 INFO [2] received reconcile message
47014 Sep 22 23:22:24.464 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(529), op: ExtentClose { repair_id: ReconciliationId(529), extent_id: 187 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47015 Sep 22 23:22:24.464 INFO [2] client ExtentClose { repair_id: ReconciliationId(529), extent_id: 187 }
47016 Sep 22 23:22:24.464 DEBG 529 Close extent 187
47017 Sep 22 23:22:24.465 DEBG 529 Close extent 187
47018 Sep 22 23:22:24.465 DEBG 529 Close extent 187
47019 Sep 22 23:22:24.465 DEBG [2] It's time to notify for 529
47020 Sep 22 23:22:24.465 INFO Completion from [2] id:529 status:true
47021 Sep 22 23:22:24.465 INFO [530/752] Repair commands completed
47022 Sep 22 23:22:24.465 INFO Pop front: ReconcileIO { id: ReconciliationId(530), op: ExtentRepair { repair_id: ReconciliationId(530), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47023 Sep 22 23:22:24.465 INFO Sent repair work, now wait for resp
47024 Sep 22 23:22:24.465 INFO [0] received reconcile message
47025 Sep 22 23:22:24.465 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(530), op: ExtentRepair { repair_id: ReconciliationId(530), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47026 Sep 22 23:22:24.465 INFO [0] client ExtentRepair { repair_id: ReconciliationId(530), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47027 Sep 22 23:22:24.466 INFO [0] Sending repair request ReconciliationId(530)
47028 Sep 22 23:22:24.466 INFO [1] received reconcile message
47029 Sep 22 23:22:24.466 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(530), op: ExtentRepair { repair_id: ReconciliationId(530), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47030 Sep 22 23:22:24.466 INFO [1] client ExtentRepair { repair_id: ReconciliationId(530), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47031 Sep 22 23:22:24.466 INFO [1] No action required ReconciliationId(530)
47032 Sep 22 23:22:24.466 INFO [2] received reconcile message
47033 Sep 22 23:22:24.466 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(530), op: ExtentRepair { repair_id: ReconciliationId(530), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47034 Sep 22 23:22:24.466 INFO [2] client ExtentRepair { repair_id: ReconciliationId(530), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47035 Sep 22 23:22:24.466 INFO [2] No action required ReconciliationId(530)
47036 Sep 22 23:22:24.466 DEBG 530 Repair extent 187 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
47037 Sep 22 23:22:24.466 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0BB.copy"
47038 Sep 22 23:22:24.530 INFO accepted connection, remote_addr: 127.0.0.1:47833, local_addr: 127.0.0.1:52864, task: repair
47039 Sep 22 23:22:24.530 TRCE incoming request, uri: /extent/187/files, method: GET, req_id: 6a5beff3-cbf4-4056-a311-fc7eb812ddb5, remote_addr: 127.0.0.1:47833, local_addr: 127.0.0.1:52864, task: repair
47040 Sep 22 23:22:24.531 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/187/files, method: GET, req_id: 6a5beff3-cbf4-4056-a311-fc7eb812ddb5, remote_addr: 127.0.0.1:47833, local_addr: 127.0.0.1:52864, task: repair
47041 Sep 22 23:22:24.531 INFO eid:187 Found repair files: ["0BB", "0BB.db"]
47042 Sep 22 23:22:24.531 TRCE incoming request, uri: /newextent/187/data, method: GET, req_id: 654d7456-cdad-459c-8377-e3ad0cd45a8e, remote_addr: 127.0.0.1:47833, local_addr: 127.0.0.1:52864, task: repair
47043 Sep 22 23:22:24.531 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/187/data, method: GET, req_id: 654d7456-cdad-459c-8377-e3ad0cd45a8e, remote_addr: 127.0.0.1:47833, local_addr: 127.0.0.1:52864, task: repair
47044 Sep 22 23:22:24.536 TRCE incoming request, uri: /newextent/187/db, method: GET, req_id: d0be47de-4df2-4e8b-a627-5f840b365d52, remote_addr: 127.0.0.1:47833, local_addr: 127.0.0.1:52864, task: repair
47045 Sep 22 23:22:24.537 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/187/db, method: GET, req_id: d0be47de-4df2-4e8b-a627-5f840b365d52, remote_addr: 127.0.0.1:47833, local_addr: 127.0.0.1:52864, task: repair
47046 Sep 22 23:22:24.538 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0BB.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0BB.replace"
47047 Sep 22 23:22:24.538 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47048 Sep 22 23:22:24.538 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0BB.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
47049 Sep 22 23:22:24.539 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0BB"
47050 Sep 22 23:22:24.539 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0BB.db"
47051 Sep 22 23:22:24.539 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47052 Sep 22 23:22:24.539 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0BB.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0BB.completed"
47053 Sep 22 23:22:24.539 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47054 Sep 22 23:22:24.539 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47055 Sep 22 23:22:24.539 DEBG [0] It's time to notify for 530
47056 Sep 22 23:22:24.539 INFO Completion from [0] id:530 status:true
47057 Sep 22 23:22:24.539 INFO [531/752] Repair commands completed
47058 Sep 22 23:22:24.539 INFO Pop front: ReconcileIO { id: ReconciliationId(531), op: ExtentReopen { repair_id: ReconciliationId(531), extent_id: 187 }, state: ClientData([New, New, New]) }
47059 Sep 22 23:22:24.539 INFO Sent repair work, now wait for resp
47060 Sep 22 23:22:24.539 INFO [0] received reconcile message
47061 Sep 22 23:22:24.539 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(531), op: ExtentReopen { repair_id: ReconciliationId(531), extent_id: 187 }, state: ClientData([InProgress, New, New]) }, : downstairs
47062 Sep 22 23:22:24.539 INFO [0] client ExtentReopen { repair_id: ReconciliationId(531), extent_id: 187 }
47063 Sep 22 23:22:24.539 INFO [1] received reconcile message
47064 Sep 22 23:22:24.539 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(531), op: ExtentReopen { repair_id: ReconciliationId(531), extent_id: 187 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47065 Sep 22 23:22:24.539 INFO [1] client ExtentReopen { repair_id: ReconciliationId(531), extent_id: 187 }
47066 Sep 22 23:22:24.539 INFO [2] received reconcile message
47067 Sep 22 23:22:24.540 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(531), op: ExtentReopen { repair_id: ReconciliationId(531), extent_id: 187 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47068 Sep 22 23:22:24.540 INFO [2] client ExtentReopen { repair_id: ReconciliationId(531), extent_id: 187 }
47069 Sep 22 23:22:24.540 DEBG 531 Reopen extent 187
47070 Sep 22 23:22:24.540 DEBG 531 Reopen extent 187
47071 Sep 22 23:22:24.541 DEBG 531 Reopen extent 187
47072 Sep 22 23:22:24.541 DEBG [2] It's time to notify for 531
47073 Sep 22 23:22:24.541 INFO Completion from [2] id:531 status:true
47074 Sep 22 23:22:24.541 INFO [532/752] Repair commands completed
47075 Sep 22 23:22:24.541 INFO Pop front: ReconcileIO { id: ReconciliationId(532), op: ExtentFlush { repair_id: ReconciliationId(532), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47076 Sep 22 23:22:24.541 INFO Sent repair work, now wait for resp
47077 Sep 22 23:22:24.541 INFO [0] received reconcile message
47078 Sep 22 23:22:24.541 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(532), op: ExtentFlush { repair_id: ReconciliationId(532), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47079 Sep 22 23:22:24.541 INFO [0] client ExtentFlush { repair_id: ReconciliationId(532), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47080 Sep 22 23:22:24.541 INFO [1] received reconcile message
47081 Sep 22 23:22:24.542 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(532), op: ExtentFlush { repair_id: ReconciliationId(532), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47082 Sep 22 23:22:24.542 INFO [1] client ExtentFlush { repair_id: ReconciliationId(532), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47083 Sep 22 23:22:24.542 INFO [2] received reconcile message
47084 Sep 22 23:22:24.542 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(532), op: ExtentFlush { repair_id: ReconciliationId(532), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47085 Sep 22 23:22:24.542 INFO [2] client ExtentFlush { repair_id: ReconciliationId(532), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47086 Sep 22 23:22:24.542 DEBG 532 Flush extent 10 with f:2 g:2
47087 Sep 22 23:22:24.542 DEBG Flush just extent 10 with f:2 and g:2
47088 Sep 22 23:22:24.542 DEBG [1] It's time to notify for 532
47089 Sep 22 23:22:24.542 INFO Completion from [1] id:532 status:true
47090 Sep 22 23:22:24.542 INFO [533/752] Repair commands completed
47091 Sep 22 23:22:24.542 INFO Pop front: ReconcileIO { id: ReconciliationId(533), op: ExtentClose { repair_id: ReconciliationId(533), extent_id: 10 }, state: ClientData([New, New, New]) }
47092 Sep 22 23:22:24.542 INFO Sent repair work, now wait for resp
47093 Sep 22 23:22:24.542 INFO [0] received reconcile message
47094 Sep 22 23:22:24.542 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(533), op: ExtentClose { repair_id: ReconciliationId(533), extent_id: 10 }, state: ClientData([InProgress, New, New]) }, : downstairs
47095 Sep 22 23:22:24.542 INFO [0] client ExtentClose { repair_id: ReconciliationId(533), extent_id: 10 }
47096 Sep 22 23:22:24.542 INFO [1] received reconcile message
47097 Sep 22 23:22:24.542 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(533), op: ExtentClose { repair_id: ReconciliationId(533), extent_id: 10 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47098 Sep 22 23:22:24.542 INFO [1] client ExtentClose { repair_id: ReconciliationId(533), extent_id: 10 }
47099 Sep 22 23:22:24.542 INFO [2] received reconcile message
47100 Sep 22 23:22:24.542 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(533), op: ExtentClose { repair_id: ReconciliationId(533), extent_id: 10 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47101 Sep 22 23:22:24.542 INFO [2] client ExtentClose { repair_id: ReconciliationId(533), extent_id: 10 }
47102 Sep 22 23:22:24.542 DEBG 533 Close extent 10
47103 Sep 22 23:22:24.543 DEBG 533 Close extent 10
47104 Sep 22 23:22:24.543 DEBG 533 Close extent 10
47105 Sep 22 23:22:24.543 DEBG [2] It's time to notify for 533
47106 Sep 22 23:22:24.543 INFO Completion from [2] id:533 status:true
47107 Sep 22 23:22:24.543 INFO [534/752] Repair commands completed
47108 Sep 22 23:22:24.543 INFO Pop front: ReconcileIO { id: ReconciliationId(534), op: ExtentRepair { repair_id: ReconciliationId(534), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47109 Sep 22 23:22:24.543 INFO Sent repair work, now wait for resp
47110 Sep 22 23:22:24.543 INFO [0] received reconcile message
47111 Sep 22 23:22:24.543 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(534), op: ExtentRepair { repair_id: ReconciliationId(534), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47112 Sep 22 23:22:24.543 INFO [0] client ExtentRepair { repair_id: ReconciliationId(534), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47113 Sep 22 23:22:24.543 INFO [0] Sending repair request ReconciliationId(534)
47114 Sep 22 23:22:24.544 INFO [1] received reconcile message
47115 Sep 22 23:22:24.544 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(534), op: ExtentRepair { repair_id: ReconciliationId(534), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47116 Sep 22 23:22:24.544 INFO [1] client ExtentRepair { repair_id: ReconciliationId(534), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47117 Sep 22 23:22:24.544 INFO [1] No action required ReconciliationId(534)
47118 Sep 22 23:22:24.544 INFO [2] received reconcile message
47119 Sep 22 23:22:24.544 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(534), op: ExtentRepair { repair_id: ReconciliationId(534), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47120 Sep 22 23:22:24.544 INFO [2] client ExtentRepair { repair_id: ReconciliationId(534), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47121 Sep 22 23:22:24.544 INFO [2] No action required ReconciliationId(534)
47122 Sep 22 23:22:24.544 DEBG 534 Repair extent 10 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
47123 Sep 22 23:22:24.544 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/00A.copy"
47124 Sep 22 23:22:24.609 INFO accepted connection, remote_addr: 127.0.0.1:36657, local_addr: 127.0.0.1:52864, task: repair
47125 Sep 22 23:22:24.609 TRCE incoming request, uri: /extent/10/files, method: GET, req_id: 0138adee-1167-4148-8bca-ac2c5a796e4d, remote_addr: 127.0.0.1:36657, local_addr: 127.0.0.1:52864, task: repair
47126 Sep 22 23:22:24.610 INFO request completed, latency_us: 204, response_code: 200, uri: /extent/10/files, method: GET, req_id: 0138adee-1167-4148-8bca-ac2c5a796e4d, remote_addr: 127.0.0.1:36657, local_addr: 127.0.0.1:52864, task: repair
47127 Sep 22 23:22:24.610 INFO eid:10 Found repair files: ["00A", "00A.db"]
47128 Sep 22 23:22:24.610 TRCE incoming request, uri: /newextent/10/data, method: GET, req_id: a193fb1d-9cb5-4ff9-9c71-1cc771a39bea, remote_addr: 127.0.0.1:36657, local_addr: 127.0.0.1:52864, task: repair
47129 Sep 22 23:22:24.610 INFO request completed, latency_us: 310, response_code: 200, uri: /newextent/10/data, method: GET, req_id: a193fb1d-9cb5-4ff9-9c71-1cc771a39bea, remote_addr: 127.0.0.1:36657, local_addr: 127.0.0.1:52864, task: repair
47130 Sep 22 23:22:24.615 TRCE incoming request, uri: /newextent/10/db, method: GET, req_id: d464a382-75d2-4a9c-9d2b-f257a9c17983, remote_addr: 127.0.0.1:36657, local_addr: 127.0.0.1:52864, task: repair
47131 Sep 22 23:22:24.616 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/10/db, method: GET, req_id: d464a382-75d2-4a9c-9d2b-f257a9c17983, remote_addr: 127.0.0.1:36657, local_addr: 127.0.0.1:52864, task: repair
47132 Sep 22 23:22:24.617 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/00A.copy" to "/tmp/downstairs-zrMnlo6G/00/000/00A.replace"
47133 Sep 22 23:22:24.617 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47134 Sep 22 23:22:24.617 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/00A.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
47135 Sep 22 23:22:24.618 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/00A"
47136 Sep 22 23:22:24.618 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/00A.db"
47137 Sep 22 23:22:24.618 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47138 Sep 22 23:22:24.618 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/00A.replace" to "/tmp/downstairs-zrMnlo6G/00/000/00A.completed"
47139 Sep 22 23:22:24.618 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47140 Sep 22 23:22:24.618 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47141 Sep 22 23:22:24.618 DEBG [0] It's time to notify for 534
47142 Sep 22 23:22:24.618 INFO Completion from [0] id:534 status:true
47143 Sep 22 23:22:24.618 INFO [535/752] Repair commands completed
47144 Sep 22 23:22:24.618 INFO Pop front: ReconcileIO { id: ReconciliationId(535), op: ExtentReopen { repair_id: ReconciliationId(535), extent_id: 10 }, state: ClientData([New, New, New]) }
47145 Sep 22 23:22:24.618 INFO Sent repair work, now wait for resp
47146 Sep 22 23:22:24.618 INFO [0] received reconcile message
47147 Sep 22 23:22:24.618 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(535), op: ExtentReopen { repair_id: ReconciliationId(535), extent_id: 10 }, state: ClientData([InProgress, New, New]) }, : downstairs
47148 Sep 22 23:22:24.618 INFO [0] client ExtentReopen { repair_id: ReconciliationId(535), extent_id: 10 }
47149 Sep 22 23:22:24.618 INFO [1] received reconcile message
47150 Sep 22 23:22:24.618 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(535), op: ExtentReopen { repair_id: ReconciliationId(535), extent_id: 10 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47151 Sep 22 23:22:24.618 INFO [1] client ExtentReopen { repair_id: ReconciliationId(535), extent_id: 10 }
47152 Sep 22 23:22:24.619 INFO [2] received reconcile message
47153 Sep 22 23:22:24.619 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(535), op: ExtentReopen { repair_id: ReconciliationId(535), extent_id: 10 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47154 Sep 22 23:22:24.619 INFO [2] client ExtentReopen { repair_id: ReconciliationId(535), extent_id: 10 }
47155 Sep 22 23:22:24.619 DEBG 535 Reopen extent 10
47156 Sep 22 23:22:24.619 DEBG 535 Reopen extent 10
47157 Sep 22 23:22:24.620 DEBG 535 Reopen extent 10
47158 Sep 22 23:22:24.620 DEBG [2] It's time to notify for 535
47159 Sep 22 23:22:24.620 INFO Completion from [2] id:535 status:true
47160 Sep 22 23:22:24.621 INFO [536/752] Repair commands completed
47161 Sep 22 23:22:24.621 INFO Pop front: ReconcileIO { id: ReconciliationId(536), op: ExtentFlush { repair_id: ReconciliationId(536), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47162 Sep 22 23:22:24.621 INFO Sent repair work, now wait for resp
47163 Sep 22 23:22:24.621 INFO [0] received reconcile message
47164 Sep 22 23:22:24.621 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(536), op: ExtentFlush { repair_id: ReconciliationId(536), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47165 Sep 22 23:22:24.621 INFO [0] client ExtentFlush { repair_id: ReconciliationId(536), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47166 Sep 22 23:22:24.621 INFO [1] received reconcile message
47167 Sep 22 23:22:24.621 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(536), op: ExtentFlush { repair_id: ReconciliationId(536), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47168 Sep 22 23:22:24.621 INFO [1] client ExtentFlush { repair_id: ReconciliationId(536), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47169 Sep 22 23:22:24.621 INFO [2] received reconcile message
47170 Sep 22 23:22:24.621 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(536), op: ExtentFlush { repair_id: ReconciliationId(536), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47171 Sep 22 23:22:24.621 INFO [2] client ExtentFlush { repair_id: ReconciliationId(536), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47172 Sep 22 23:22:24.621 DEBG 536 Flush extent 102 with f:2 g:2
47173 Sep 22 23:22:24.621 DEBG Flush just extent 102 with f:2 and g:2
47174 Sep 22 23:22:24.621 DEBG [1] It's time to notify for 536
47175 Sep 22 23:22:24.621 INFO Completion from [1] id:536 status:true
47176 Sep 22 23:22:24.621 INFO [537/752] Repair commands completed
47177 Sep 22 23:22:24.621 INFO Pop front: ReconcileIO { id: ReconciliationId(537), op: ExtentClose { repair_id: ReconciliationId(537), extent_id: 102 }, state: ClientData([New, New, New]) }
47178 Sep 22 23:22:24.621 INFO Sent repair work, now wait for resp
47179 Sep 22 23:22:24.621 INFO [0] received reconcile message
47180 Sep 22 23:22:24.621 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(537), op: ExtentClose { repair_id: ReconciliationId(537), extent_id: 102 }, state: ClientData([InProgress, New, New]) }, : downstairs
47181 Sep 22 23:22:24.621 INFO [0] client ExtentClose { repair_id: ReconciliationId(537), extent_id: 102 }
47182 Sep 22 23:22:24.621 INFO [1] received reconcile message
47183 Sep 22 23:22:24.621 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(537), op: ExtentClose { repair_id: ReconciliationId(537), extent_id: 102 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47184 Sep 22 23:22:24.621 INFO [1] client ExtentClose { repair_id: ReconciliationId(537), extent_id: 102 }
47185 Sep 22 23:22:24.621 INFO [2] received reconcile message
47186 Sep 22 23:22:24.621 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(537), op: ExtentClose { repair_id: ReconciliationId(537), extent_id: 102 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47187 Sep 22 23:22:24.621 INFO [2] client ExtentClose { repair_id: ReconciliationId(537), extent_id: 102 }
47188 Sep 22 23:22:24.622 DEBG 537 Close extent 102
47189 Sep 22 23:22:24.622 DEBG 537 Close extent 102
47190 Sep 22 23:22:24.622 DEBG 537 Close extent 102
47191 Sep 22 23:22:24.622 DEBG [2] It's time to notify for 537
47192 Sep 22 23:22:24.623 INFO Completion from [2] id:537 status:true
47193 Sep 22 23:22:24.623 INFO [538/752] Repair commands completed
47194 Sep 22 23:22:24.623 INFO Pop front: ReconcileIO { id: ReconciliationId(538), op: ExtentRepair { repair_id: ReconciliationId(538), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47195 Sep 22 23:22:24.623 INFO Sent repair work, now wait for resp
47196 Sep 22 23:22:24.623 INFO [0] received reconcile message
47197 Sep 22 23:22:24.623 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(538), op: ExtentRepair { repair_id: ReconciliationId(538), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47198 Sep 22 23:22:24.623 INFO [0] client ExtentRepair { repair_id: ReconciliationId(538), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47199 Sep 22 23:22:24.623 INFO [0] Sending repair request ReconciliationId(538)
47200 Sep 22 23:22:24.623 INFO [1] received reconcile message
47201 Sep 22 23:22:24.623 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(538), op: ExtentRepair { repair_id: ReconciliationId(538), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47202 Sep 22 23:22:24.623 INFO [1] client ExtentRepair { repair_id: ReconciliationId(538), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47203 Sep 22 23:22:24.623 INFO [1] No action required ReconciliationId(538)
47204 Sep 22 23:22:24.623 INFO [2] received reconcile message
47205 Sep 22 23:22:24.623 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(538), op: ExtentRepair { repair_id: ReconciliationId(538), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47206 Sep 22 23:22:24.623 INFO [2] client ExtentRepair { repair_id: ReconciliationId(538), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47207 Sep 22 23:22:24.623 INFO [2] No action required ReconciliationId(538)
47208 Sep 22 23:22:24.623 DEBG 538 Repair extent 102 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
47209 Sep 22 23:22:24.623 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/066.copy"
47210 Sep 22 23:22:24.687 INFO accepted connection, remote_addr: 127.0.0.1:60465, local_addr: 127.0.0.1:52864, task: repair
47211 Sep 22 23:22:24.687 TRCE incoming request, uri: /extent/102/files, method: GET, req_id: 652d8c9c-1f13-4868-8cae-847f70b5d42f, remote_addr: 127.0.0.1:60465, local_addr: 127.0.0.1:52864, task: repair
47212 Sep 22 23:22:24.687 INFO request completed, latency_us: 202, response_code: 200, uri: /extent/102/files, method: GET, req_id: 652d8c9c-1f13-4868-8cae-847f70b5d42f, remote_addr: 127.0.0.1:60465, local_addr: 127.0.0.1:52864, task: repair
47213 Sep 22 23:22:24.688 INFO eid:102 Found repair files: ["066", "066.db"]
47214 Sep 22 23:22:24.688 TRCE incoming request, uri: /newextent/102/data, method: GET, req_id: 4318ae0c-e308-4667-b91e-eeeaaaef7ef5, remote_addr: 127.0.0.1:60465, local_addr: 127.0.0.1:52864, task: repair
47215 Sep 22 23:22:24.688 INFO request completed, latency_us: 325, response_code: 200, uri: /newextent/102/data, method: GET, req_id: 4318ae0c-e308-4667-b91e-eeeaaaef7ef5, remote_addr: 127.0.0.1:60465, local_addr: 127.0.0.1:52864, task: repair
47216 Sep 22 23:22:24.693 TRCE incoming request, uri: /newextent/102/db, method: GET, req_id: 7058aed6-2f4d-4842-a128-4e21b6f2e1fe, remote_addr: 127.0.0.1:60465, local_addr: 127.0.0.1:52864, task: repair
47217 Sep 22 23:22:24.693 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/102/db, method: GET, req_id: 7058aed6-2f4d-4842-a128-4e21b6f2e1fe, remote_addr: 127.0.0.1:60465, local_addr: 127.0.0.1:52864, task: repair
47218 Sep 22 23:22:24.694 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/066.copy" to "/tmp/downstairs-zrMnlo6G/00/000/066.replace"
47219 Sep 22 23:22:24.694 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47220 Sep 22 23:22:24.695 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/066.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
47221 Sep 22 23:22:24.695 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/066"
47222 Sep 22 23:22:24.695 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/066.db"
47223 Sep 22 23:22:24.695 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47224 Sep 22 23:22:24.695 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/066.replace" to "/tmp/downstairs-zrMnlo6G/00/000/066.completed"
47225 Sep 22 23:22:24.696 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47226 Sep 22 23:22:24.696 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47227 Sep 22 23:22:24.696 DEBG [0] It's time to notify for 538
47228 Sep 22 23:22:24.696 INFO Completion from [0] id:538 status:true
47229 Sep 22 23:22:24.696 INFO [539/752] Repair commands completed
47230 Sep 22 23:22:24.696 INFO Pop front: ReconcileIO { id: ReconciliationId(539), op: ExtentReopen { repair_id: ReconciliationId(539), extent_id: 102 }, state: ClientData([New, New, New]) }
47231 Sep 22 23:22:24.696 INFO Sent repair work, now wait for resp
47232 Sep 22 23:22:24.696 INFO [0] received reconcile message
47233 Sep 22 23:22:24.696 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(539), op: ExtentReopen { repair_id: ReconciliationId(539), extent_id: 102 }, state: ClientData([InProgress, New, New]) }, : downstairs
47234 Sep 22 23:22:24.696 INFO [0] client ExtentReopen { repair_id: ReconciliationId(539), extent_id: 102 }
47235 Sep 22 23:22:24.696 INFO [1] received reconcile message
47236 Sep 22 23:22:24.696 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(539), op: ExtentReopen { repair_id: ReconciliationId(539), extent_id: 102 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47237 Sep 22 23:22:24.696 INFO [1] client ExtentReopen { repair_id: ReconciliationId(539), extent_id: 102 }
47238 Sep 22 23:22:24.696 INFO [2] received reconcile message
47239 Sep 22 23:22:24.696 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(539), op: ExtentReopen { repair_id: ReconciliationId(539), extent_id: 102 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47240 Sep 22 23:22:24.696 INFO [2] client ExtentReopen { repair_id: ReconciliationId(539), extent_id: 102 }
47241 Sep 22 23:22:24.696 DEBG 539 Reopen extent 102
47242 Sep 22 23:22:24.697 DEBG 539 Reopen extent 102
47243 Sep 22 23:22:24.698 DEBG 539 Reopen extent 102
47244 Sep 22 23:22:24.698 DEBG [2] It's time to notify for 539
47245 Sep 22 23:22:24.698 INFO Completion from [2] id:539 status:true
47246 Sep 22 23:22:24.698 INFO [540/752] Repair commands completed
47247 Sep 22 23:22:24.698 INFO Pop front: ReconcileIO { id: ReconciliationId(540), op: ExtentFlush { repair_id: ReconciliationId(540), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47248 Sep 22 23:22:24.698 INFO Sent repair work, now wait for resp
47249 Sep 22 23:22:24.698 INFO [0] received reconcile message
47250 Sep 22 23:22:24.698 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(540), op: ExtentFlush { repair_id: ReconciliationId(540), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47251 Sep 22 23:22:24.698 INFO [0] client ExtentFlush { repair_id: ReconciliationId(540), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47252 Sep 22 23:22:24.698 INFO [1] received reconcile message
47253 Sep 22 23:22:24.698 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(540), op: ExtentFlush { repair_id: ReconciliationId(540), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47254 Sep 22 23:22:24.698 INFO [1] client ExtentFlush { repair_id: ReconciliationId(540), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47255 Sep 22 23:22:24.698 INFO [2] received reconcile message
47256 Sep 22 23:22:24.698 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(540), op: ExtentFlush { repair_id: ReconciliationId(540), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47257 Sep 22 23:22:24.698 INFO [2] client ExtentFlush { repair_id: ReconciliationId(540), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47258 Sep 22 23:22:24.699 DEBG 540 Flush extent 85 with f:2 g:2
47259 Sep 22 23:22:24.699 DEBG Flush just extent 85 with f:2 and g:2
47260 Sep 22 23:22:24.699 DEBG [1] It's time to notify for 540
47261 Sep 22 23:22:24.699 INFO Completion from [1] id:540 status:true
47262 Sep 22 23:22:24.699 INFO [541/752] Repair commands completed
47263 Sep 22 23:22:24.699 INFO Pop front: ReconcileIO { id: ReconciliationId(541), op: ExtentClose { repair_id: ReconciliationId(541), extent_id: 85 }, state: ClientData([New, New, New]) }
47264 Sep 22 23:22:24.699 INFO Sent repair work, now wait for resp
47265 Sep 22 23:22:24.699 INFO [0] received reconcile message
47266 Sep 22 23:22:24.699 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(541), op: ExtentClose { repair_id: ReconciliationId(541), extent_id: 85 }, state: ClientData([InProgress, New, New]) }, : downstairs
47267 Sep 22 23:22:24.699 INFO [0] client ExtentClose { repair_id: ReconciliationId(541), extent_id: 85 }
47268 Sep 22 23:22:24.699 INFO [1] received reconcile message
47269 Sep 22 23:22:24.699 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(541), op: ExtentClose { repair_id: ReconciliationId(541), extent_id: 85 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47270 Sep 22 23:22:24.699 INFO [1] client ExtentClose { repair_id: ReconciliationId(541), extent_id: 85 }
47271 Sep 22 23:22:24.699 INFO [2] received reconcile message
47272 Sep 22 23:22:24.699 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(541), op: ExtentClose { repair_id: ReconciliationId(541), extent_id: 85 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47273 Sep 22 23:22:24.699 INFO [2] client ExtentClose { repair_id: ReconciliationId(541), extent_id: 85 }
47274 Sep 22 23:22:24.699 DEBG 541 Close extent 85
47275 Sep 22 23:22:24.699 DEBG 541 Close extent 85
47276 Sep 22 23:22:24.700 DEBG 541 Close extent 85
47277 Sep 22 23:22:24.700 DEBG [2] It's time to notify for 541
47278 Sep 22 23:22:24.700 INFO Completion from [2] id:541 status:true
47279 Sep 22 23:22:24.700 INFO [542/752] Repair commands completed
47280 Sep 22 23:22:24.700 INFO Pop front: ReconcileIO { id: ReconciliationId(542), op: ExtentRepair { repair_id: ReconciliationId(542), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47281 Sep 22 23:22:24.700 INFO Sent repair work, now wait for resp
47282 Sep 22 23:22:24.700 INFO [0] received reconcile message
47283 Sep 22 23:22:24.700 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(542), op: ExtentRepair { repair_id: ReconciliationId(542), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47284 Sep 22 23:22:24.700 INFO [0] client ExtentRepair { repair_id: ReconciliationId(542), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47285 Sep 22 23:22:24.700 INFO [0] Sending repair request ReconciliationId(542)
47286 Sep 22 23:22:24.700 INFO [1] received reconcile message
47287 Sep 22 23:22:24.700 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(542), op: ExtentRepair { repair_id: ReconciliationId(542), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47288 Sep 22 23:22:24.700 INFO [1] client ExtentRepair { repair_id: ReconciliationId(542), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47289 Sep 22 23:22:24.700 INFO [1] No action required ReconciliationId(542)
47290 Sep 22 23:22:24.700 INFO [2] received reconcile message
47291 Sep 22 23:22:24.701 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(542), op: ExtentRepair { repair_id: ReconciliationId(542), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47292 Sep 22 23:22:24.701 INFO [2] client ExtentRepair { repair_id: ReconciliationId(542), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47293 Sep 22 23:22:24.701 INFO [2] No action required ReconciliationId(542)
47294 Sep 22 23:22:24.701 DEBG 542 Repair extent 85 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
47295 Sep 22 23:22:24.701 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/055.copy"
47296 Sep 22 23:22:24.711 INFO [lossy] skipping 1089
47297 Sep 22 23:22:24.711 INFO [lossy] skipping 1090
47298 Sep 22 23:22:24.716 DEBG Read :1089 deps:[JobId(1088)] res:true
47299 Sep 22 23:22:24.739 WARN returning error on flush!
47300 Sep 22 23:22:24.739 DEBG Flush :1090 extent_limit None deps:[JobId(1089), JobId(1088)] res:false f:34 g:1
47301 Sep 22 23:22:24.739 INFO [lossy] skipping 1090
47302 Sep 22 23:22:24.739 DEBG Flush :1090 extent_limit None deps:[JobId(1089), JobId(1088)] res:true f:34 g:1
47303 Sep 22 23:22:24.739 INFO [lossy] sleeping 1 second
47304 Sep 22 23:22:24.766 INFO accepted connection, remote_addr: 127.0.0.1:60551, local_addr: 127.0.0.1:52864, task: repair
47305 Sep 22 23:22:24.767 TRCE incoming request, uri: /extent/85/files, method: GET, req_id: 66ad53f4-4822-41c4-a1fd-daef5bd37365, remote_addr: 127.0.0.1:60551, local_addr: 127.0.0.1:52864, task: repair
47306 Sep 22 23:22:24.767 INFO request completed, latency_us: 224, response_code: 200, uri: /extent/85/files, method: GET, req_id: 66ad53f4-4822-41c4-a1fd-daef5bd37365, remote_addr: 127.0.0.1:60551, local_addr: 127.0.0.1:52864, task: repair
47307 Sep 22 23:22:24.767 INFO eid:85 Found repair files: ["055", "055.db"]
47308 Sep 22 23:22:24.767 TRCE incoming request, uri: /newextent/85/data, method: GET, req_id: 867be027-70ed-4524-8e97-3317cb147004, remote_addr: 127.0.0.1:60551, local_addr: 127.0.0.1:52864, task: repair
47309 Sep 22 23:22:24.768 INFO request completed, latency_us: 329, response_code: 200, uri: /newextent/85/data, method: GET, req_id: 867be027-70ed-4524-8e97-3317cb147004, remote_addr: 127.0.0.1:60551, local_addr: 127.0.0.1:52864, task: repair
47310 Sep 22 23:22:24.772 TRCE incoming request, uri: /newextent/85/db, method: GET, req_id: 9f388412-8208-4b14-8a1f-dba266bfb627, remote_addr: 127.0.0.1:60551, local_addr: 127.0.0.1:52864, task: repair
47311 Sep 22 23:22:24.773 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/85/db, method: GET, req_id: 9f388412-8208-4b14-8a1f-dba266bfb627, remote_addr: 127.0.0.1:60551, local_addr: 127.0.0.1:52864, task: repair
47312 Sep 22 23:22:24.774 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/055.copy" to "/tmp/downstairs-zrMnlo6G/00/000/055.replace"
47313 Sep 22 23:22:24.774 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47314 Sep 22 23:22:24.775 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/055.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
47315 Sep 22 23:22:24.775 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/055"
47316 Sep 22 23:22:24.775 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/055.db"
47317 Sep 22 23:22:24.775 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47318 Sep 22 23:22:24.775 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/055.replace" to "/tmp/downstairs-zrMnlo6G/00/000/055.completed"
47319 Sep 22 23:22:24.775 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47320 Sep 22 23:22:24.775 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47321 Sep 22 23:22:24.775 DEBG [0] It's time to notify for 542
47322 Sep 22 23:22:24.776 INFO Completion from [0] id:542 status:true
47323 Sep 22 23:22:24.776 INFO [543/752] Repair commands completed
47324 Sep 22 23:22:24.776 INFO Pop front: ReconcileIO { id: ReconciliationId(543), op: ExtentReopen { repair_id: ReconciliationId(543), extent_id: 85 }, state: ClientData([New, New, New]) }
47325 Sep 22 23:22:24.776 INFO Sent repair work, now wait for resp
47326 Sep 22 23:22:24.776 INFO [0] received reconcile message
47327 Sep 22 23:22:24.776 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(543), op: ExtentReopen { repair_id: ReconciliationId(543), extent_id: 85 }, state: ClientData([InProgress, New, New]) }, : downstairs
47328 Sep 22 23:22:24.776 INFO [0] client ExtentReopen { repair_id: ReconciliationId(543), extent_id: 85 }
47329 Sep 22 23:22:24.776 INFO [1] received reconcile message
47330 Sep 22 23:22:24.776 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(543), op: ExtentReopen { repair_id: ReconciliationId(543), extent_id: 85 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47331 Sep 22 23:22:24.776 INFO [1] client ExtentReopen { repair_id: ReconciliationId(543), extent_id: 85 }
47332 Sep 22 23:22:24.776 INFO [2] received reconcile message
47333 Sep 22 23:22:24.776 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(543), op: ExtentReopen { repair_id: ReconciliationId(543), extent_id: 85 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47334 Sep 22 23:22:24.776 INFO [2] client ExtentReopen { repair_id: ReconciliationId(543), extent_id: 85 }
47335 Sep 22 23:22:24.776 DEBG 543 Reopen extent 85
47336 Sep 22 23:22:24.777 DEBG 543 Reopen extent 85
47337 Sep 22 23:22:24.777 DEBG 543 Reopen extent 85
47338 Sep 22 23:22:24.778 DEBG [2] It's time to notify for 543
47339 Sep 22 23:22:24.778 INFO Completion from [2] id:543 status:true
47340 Sep 22 23:22:24.778 INFO [544/752] Repair commands completed
47341 Sep 22 23:22:24.778 INFO Pop front: ReconcileIO { id: ReconciliationId(544), op: ExtentFlush { repair_id: ReconciliationId(544), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47342 Sep 22 23:22:24.778 INFO Sent repair work, now wait for resp
47343 Sep 22 23:22:24.778 INFO [0] received reconcile message
47344 Sep 22 23:22:24.778 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(544), op: ExtentFlush { repair_id: ReconciliationId(544), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47345 Sep 22 23:22:24.778 INFO [0] client ExtentFlush { repair_id: ReconciliationId(544), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47346 Sep 22 23:22:24.778 INFO [1] received reconcile message
47347 Sep 22 23:22:24.778 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(544), op: ExtentFlush { repair_id: ReconciliationId(544), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47348 Sep 22 23:22:24.778 INFO [1] client ExtentFlush { repair_id: ReconciliationId(544), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47349 Sep 22 23:22:24.778 INFO [2] received reconcile message
47350 Sep 22 23:22:24.778 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(544), op: ExtentFlush { repair_id: ReconciliationId(544), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47351 Sep 22 23:22:24.778 INFO [2] client ExtentFlush { repair_id: ReconciliationId(544), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47352 Sep 22 23:22:24.778 DEBG 544 Flush extent 100 with f:2 g:2
47353 Sep 22 23:22:24.778 DEBG Flush just extent 100 with f:2 and g:2
47354 Sep 22 23:22:24.778 DEBG [1] It's time to notify for 544
47355 Sep 22 23:22:24.778 INFO Completion from [1] id:544 status:true
47356 Sep 22 23:22:24.778 INFO [545/752] Repair commands completed
47357 Sep 22 23:22:24.778 INFO Pop front: ReconcileIO { id: ReconciliationId(545), op: ExtentClose { repair_id: ReconciliationId(545), extent_id: 100 }, state: ClientData([New, New, New]) }
47358 Sep 22 23:22:24.778 INFO Sent repair work, now wait for resp
47359 Sep 22 23:22:24.778 INFO [0] received reconcile message
47360 Sep 22 23:22:24.779 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(545), op: ExtentClose { repair_id: ReconciliationId(545), extent_id: 100 }, state: ClientData([InProgress, New, New]) }, : downstairs
47361 Sep 22 23:22:24.779 INFO [0] client ExtentClose { repair_id: ReconciliationId(545), extent_id: 100 }
47362 Sep 22 23:22:24.779 INFO [1] received reconcile message
47363 Sep 22 23:22:24.779 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(545), op: ExtentClose { repair_id: ReconciliationId(545), extent_id: 100 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47364 Sep 22 23:22:24.779 INFO [1] client ExtentClose { repair_id: ReconciliationId(545), extent_id: 100 }
47365 Sep 22 23:22:24.779 INFO [2] received reconcile message
47366 Sep 22 23:22:24.779 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(545), op: ExtentClose { repair_id: ReconciliationId(545), extent_id: 100 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47367 Sep 22 23:22:24.779 INFO [2] client ExtentClose { repair_id: ReconciliationId(545), extent_id: 100 }
47368 Sep 22 23:22:24.779 DEBG 545 Close extent 100
47369 Sep 22 23:22:24.779 DEBG 545 Close extent 100
47370 Sep 22 23:22:24.779 DEBG 545 Close extent 100
47371 Sep 22 23:22:24.780 DEBG [2] It's time to notify for 545
47372 Sep 22 23:22:24.780 INFO Completion from [2] id:545 status:true
47373 Sep 22 23:22:24.780 INFO [546/752] Repair commands completed
47374 Sep 22 23:22:24.780 INFO Pop front: ReconcileIO { id: ReconciliationId(546), op: ExtentRepair { repair_id: ReconciliationId(546), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47375 Sep 22 23:22:24.780 INFO Sent repair work, now wait for resp
47376 Sep 22 23:22:24.780 INFO [0] received reconcile message
47377 Sep 22 23:22:24.780 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(546), op: ExtentRepair { repair_id: ReconciliationId(546), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47378 Sep 22 23:22:24.780 INFO [0] client ExtentRepair { repair_id: ReconciliationId(546), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47379 Sep 22 23:22:24.780 INFO [0] Sending repair request ReconciliationId(546)
47380 Sep 22 23:22:24.780 INFO [1] received reconcile message
47381 Sep 22 23:22:24.780 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(546), op: ExtentRepair { repair_id: ReconciliationId(546), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47382 Sep 22 23:22:24.780 INFO [1] client ExtentRepair { repair_id: ReconciliationId(546), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47383 Sep 22 23:22:24.780 INFO [1] No action required ReconciliationId(546)
47384 Sep 22 23:22:24.780 INFO [2] received reconcile message
47385 Sep 22 23:22:24.780 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(546), op: ExtentRepair { repair_id: ReconciliationId(546), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47386 Sep 22 23:22:24.780 INFO [2] client ExtentRepair { repair_id: ReconciliationId(546), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47387 Sep 22 23:22:24.780 INFO [2] No action required ReconciliationId(546)
47388 Sep 22 23:22:24.780 DEBG 546 Repair extent 100 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
47389 Sep 22 23:22:24.780 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/064.copy"
47390 Sep 22 23:22:24.844 INFO accepted connection, remote_addr: 127.0.0.1:37092, local_addr: 127.0.0.1:52864, task: repair
47391 Sep 22 23:22:24.845 TRCE incoming request, uri: /extent/100/files, method: GET, req_id: 54454a0e-21a2-4b20-9093-d83bf40e8d5c, remote_addr: 127.0.0.1:37092, local_addr: 127.0.0.1:52864, task: repair
47392 Sep 22 23:22:24.845 INFO request completed, latency_us: 224, response_code: 200, uri: /extent/100/files, method: GET, req_id: 54454a0e-21a2-4b20-9093-d83bf40e8d5c, remote_addr: 127.0.0.1:37092, local_addr: 127.0.0.1:52864, task: repair
47393 Sep 22 23:22:24.845 INFO eid:100 Found repair files: ["064", "064.db"]
47394 Sep 22 23:22:24.845 TRCE incoming request, uri: /newextent/100/data, method: GET, req_id: 72b862f8-2500-4e85-b2fc-7d69bb1653c4, remote_addr: 127.0.0.1:37092, local_addr: 127.0.0.1:52864, task: repair
47395 Sep 22 23:22:24.846 INFO request completed, latency_us: 329, response_code: 200, uri: /newextent/100/data, method: GET, req_id: 72b862f8-2500-4e85-b2fc-7d69bb1653c4, remote_addr: 127.0.0.1:37092, local_addr: 127.0.0.1:52864, task: repair
47396 Sep 22 23:22:24.851 TRCE incoming request, uri: /newextent/100/db, method: GET, req_id: d50e9d04-de69-40e0-92ec-ec3e5391f5d5, remote_addr: 127.0.0.1:37092, local_addr: 127.0.0.1:52864, task: repair
47397 Sep 22 23:22:24.851 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/100/db, method: GET, req_id: d50e9d04-de69-40e0-92ec-ec3e5391f5d5, remote_addr: 127.0.0.1:37092, local_addr: 127.0.0.1:52864, task: repair
47398 Sep 22 23:22:24.852 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/064.copy" to "/tmp/downstairs-zrMnlo6G/00/000/064.replace"
47399 Sep 22 23:22:24.852 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47400 Sep 22 23:22:24.853 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/064.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
47401 Sep 22 23:22:24.853 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/064"
47402 Sep 22 23:22:24.853 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/064.db"
47403 Sep 22 23:22:24.853 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47404 Sep 22 23:22:24.853 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/064.replace" to "/tmp/downstairs-zrMnlo6G/00/000/064.completed"
47405 Sep 22 23:22:24.853 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47406 Sep 22 23:22:24.853 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47407 Sep 22 23:22:24.854 DEBG [0] It's time to notify for 546
47408 Sep 22 23:22:24.854 INFO Completion from [0] id:546 status:true
47409 Sep 22 23:22:24.854 INFO [547/752] Repair commands completed
47410 Sep 22 23:22:24.854 INFO Pop front: ReconcileIO { id: ReconciliationId(547), op: ExtentReopen { repair_id: ReconciliationId(547), extent_id: 100 }, state: ClientData([New, New, New]) }
47411 Sep 22 23:22:24.854 INFO Sent repair work, now wait for resp
47412 Sep 22 23:22:24.854 INFO [0] received reconcile message
47413 Sep 22 23:22:24.854 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(547), op: ExtentReopen { repair_id: ReconciliationId(547), extent_id: 100 }, state: ClientData([InProgress, New, New]) }, : downstairs
47414 Sep 22 23:22:24.854 INFO [0] client ExtentReopen { repair_id: ReconciliationId(547), extent_id: 100 }
47415 Sep 22 23:22:24.854 INFO [1] received reconcile message
47416 Sep 22 23:22:24.854 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(547), op: ExtentReopen { repair_id: ReconciliationId(547), extent_id: 100 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47417 Sep 22 23:22:24.854 INFO [1] client ExtentReopen { repair_id: ReconciliationId(547), extent_id: 100 }
47418 Sep 22 23:22:24.854 INFO [2] received reconcile message
47419 Sep 22 23:22:24.854 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(547), op: ExtentReopen { repair_id: ReconciliationId(547), extent_id: 100 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47420 Sep 22 23:22:24.854 INFO [2] client ExtentReopen { repair_id: ReconciliationId(547), extent_id: 100 }
47421 Sep 22 23:22:24.854 DEBG 547 Reopen extent 100
47422 Sep 22 23:22:24.855 DEBG 547 Reopen extent 100
47423 Sep 22 23:22:24.855 DEBG 547 Reopen extent 100
47424 Sep 22 23:22:24.856 DEBG [2] It's time to notify for 547
47425 Sep 22 23:22:24.856 INFO Completion from [2] id:547 status:true
47426 Sep 22 23:22:24.856 INFO [548/752] Repair commands completed
47427 Sep 22 23:22:24.856 INFO Pop front: ReconcileIO { id: ReconciliationId(548), op: ExtentFlush { repair_id: ReconciliationId(548), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47428 Sep 22 23:22:24.856 INFO Sent repair work, now wait for resp
47429 Sep 22 23:22:24.856 INFO [0] received reconcile message
47430 Sep 22 23:22:24.856 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(548), op: ExtentFlush { repair_id: ReconciliationId(548), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47431 Sep 22 23:22:24.856 INFO [0] client ExtentFlush { repair_id: ReconciliationId(548), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47432 Sep 22 23:22:24.856 INFO [1] received reconcile message
47433 Sep 22 23:22:24.856 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(548), op: ExtentFlush { repair_id: ReconciliationId(548), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47434 Sep 22 23:22:24.856 INFO [1] client ExtentFlush { repair_id: ReconciliationId(548), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47435 Sep 22 23:22:24.856 INFO [2] received reconcile message
47436 Sep 22 23:22:24.856 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(548), op: ExtentFlush { repair_id: ReconciliationId(548), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47437 Sep 22 23:22:24.856 INFO [2] client ExtentFlush { repair_id: ReconciliationId(548), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47438 Sep 22 23:22:24.856 DEBG 548 Flush extent 95 with f:2 g:2
47439 Sep 22 23:22:24.857 DEBG Flush just extent 95 with f:2 and g:2
47440 Sep 22 23:22:24.857 DEBG [1] It's time to notify for 548
47441 Sep 22 23:22:24.857 INFO Completion from [1] id:548 status:true
47442 Sep 22 23:22:24.857 INFO [549/752] Repair commands completed
47443 Sep 22 23:22:24.857 INFO Pop front: ReconcileIO { id: ReconciliationId(549), op: ExtentClose { repair_id: ReconciliationId(549), extent_id: 95 }, state: ClientData([New, New, New]) }
47444 Sep 22 23:22:24.857 INFO Sent repair work, now wait for resp
47445 Sep 22 23:22:24.857 INFO [0] received reconcile message
47446 Sep 22 23:22:24.857 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(549), op: ExtentClose { repair_id: ReconciliationId(549), extent_id: 95 }, state: ClientData([InProgress, New, New]) }, : downstairs
47447 Sep 22 23:22:24.857 INFO [0] client ExtentClose { repair_id: ReconciliationId(549), extent_id: 95 }
47448 Sep 22 23:22:24.857 INFO [1] received reconcile message
47449 Sep 22 23:22:24.857 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(549), op: ExtentClose { repair_id: ReconciliationId(549), extent_id: 95 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47450 Sep 22 23:22:24.857 INFO [1] client ExtentClose { repair_id: ReconciliationId(549), extent_id: 95 }
47451 Sep 22 23:22:24.857 INFO [2] received reconcile message
47452 Sep 22 23:22:24.857 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(549), op: ExtentClose { repair_id: ReconciliationId(549), extent_id: 95 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47453 Sep 22 23:22:24.857 INFO [2] client ExtentClose { repair_id: ReconciliationId(549), extent_id: 95 }
47454 Sep 22 23:22:24.857 DEBG 549 Close extent 95
47455 Sep 22 23:22:24.857 DEBG 549 Close extent 95
47456 Sep 22 23:22:24.858 DEBG 549 Close extent 95
47457 Sep 22 23:22:24.858 DEBG [2] It's time to notify for 549
47458 Sep 22 23:22:24.858 INFO Completion from [2] id:549 status:true
47459 Sep 22 23:22:24.858 INFO [550/752] Repair commands completed
47460 Sep 22 23:22:24.858 INFO Pop front: ReconcileIO { id: ReconciliationId(550), op: ExtentRepair { repair_id: ReconciliationId(550), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47461 Sep 22 23:22:24.858 INFO Sent repair work, now wait for resp
47462 Sep 22 23:22:24.858 INFO [0] received reconcile message
47463 Sep 22 23:22:24.858 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(550), op: ExtentRepair { repair_id: ReconciliationId(550), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47464 Sep 22 23:22:24.858 INFO [0] client ExtentRepair { repair_id: ReconciliationId(550), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47465 Sep 22 23:22:24.858 INFO [0] Sending repair request ReconciliationId(550)
47466 Sep 22 23:22:24.858 INFO [1] received reconcile message
47467 Sep 22 23:22:24.858 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(550), op: ExtentRepair { repair_id: ReconciliationId(550), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47468 Sep 22 23:22:24.858 INFO [1] client ExtentRepair { repair_id: ReconciliationId(550), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47469 Sep 22 23:22:24.858 INFO [1] No action required ReconciliationId(550)
47470 Sep 22 23:22:24.858 INFO [2] received reconcile message
47471 Sep 22 23:22:24.858 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(550), op: ExtentRepair { repair_id: ReconciliationId(550), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47472 Sep 22 23:22:24.858 INFO [2] client ExtentRepair { repair_id: ReconciliationId(550), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47473 Sep 22 23:22:24.858 INFO [2] No action required ReconciliationId(550)
47474 Sep 22 23:22:24.859 DEBG 550 Repair extent 95 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
47475 Sep 22 23:22:24.859 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/05F.copy"
47476 Sep 22 23:22:24.921 INFO accepted connection, remote_addr: 127.0.0.1:40895, local_addr: 127.0.0.1:52864, task: repair
47477 Sep 22 23:22:24.922 TRCE incoming request, uri: /extent/95/files, method: GET, req_id: 72ac2774-d5d9-4530-84c1-f9f9a186815f, remote_addr: 127.0.0.1:40895, local_addr: 127.0.0.1:52864, task: repair
47478 Sep 22 23:22:24.922 INFO request completed, latency_us: 202, response_code: 200, uri: /extent/95/files, method: GET, req_id: 72ac2774-d5d9-4530-84c1-f9f9a186815f, remote_addr: 127.0.0.1:40895, local_addr: 127.0.0.1:52864, task: repair
47479 Sep 22 23:22:24.922 INFO eid:95 Found repair files: ["05F", "05F.db"]
47480 Sep 22 23:22:24.922 TRCE incoming request, uri: /newextent/95/data, method: GET, req_id: 0f8c1b99-58a5-471f-b1e8-6c7764780bbc, remote_addr: 127.0.0.1:40895, local_addr: 127.0.0.1:52864, task: repair
47481 Sep 22 23:22:24.923 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/95/data, method: GET, req_id: 0f8c1b99-58a5-471f-b1e8-6c7764780bbc, remote_addr: 127.0.0.1:40895, local_addr: 127.0.0.1:52864, task: repair
47482 Sep 22 23:22:24.927 TRCE incoming request, uri: /newextent/95/db, method: GET, req_id: b9d6ff52-eec9-47df-88a5-278332912e22, remote_addr: 127.0.0.1:40895, local_addr: 127.0.0.1:52864, task: repair
47483 Sep 22 23:22:24.928 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/95/db, method: GET, req_id: b9d6ff52-eec9-47df-88a5-278332912e22, remote_addr: 127.0.0.1:40895, local_addr: 127.0.0.1:52864, task: repair
47484 Sep 22 23:22:24.929 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/05F.copy" to "/tmp/downstairs-zrMnlo6G/00/000/05F.replace"
47485 Sep 22 23:22:24.929 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47486 Sep 22 23:22:24.930 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/05F.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
47487 Sep 22 23:22:24.930 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/05F"
47488 Sep 22 23:22:24.930 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/05F.db"
47489 Sep 22 23:22:24.930 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47490 Sep 22 23:22:24.930 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/05F.replace" to "/tmp/downstairs-zrMnlo6G/00/000/05F.completed"
47491 Sep 22 23:22:24.930 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47492 Sep 22 23:22:24.930 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47493 Sep 22 23:22:24.930 DEBG [0] It's time to notify for 550
47494 Sep 22 23:22:24.931 INFO Completion from [0] id:550 status:true
47495 Sep 22 23:22:24.931 INFO [551/752] Repair commands completed
47496 Sep 22 23:22:24.931 INFO Pop front: ReconcileIO { id: ReconciliationId(551), op: ExtentReopen { repair_id: ReconciliationId(551), extent_id: 95 }, state: ClientData([New, New, New]) }
47497 Sep 22 23:22:24.931 INFO Sent repair work, now wait for resp
47498 Sep 22 23:22:24.931 INFO [0] received reconcile message
47499 Sep 22 23:22:24.931 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(551), op: ExtentReopen { repair_id: ReconciliationId(551), extent_id: 95 }, state: ClientData([InProgress, New, New]) }, : downstairs
47500 Sep 22 23:22:24.931 INFO [0] client ExtentReopen { repair_id: ReconciliationId(551), extent_id: 95 }
47501 Sep 22 23:22:24.931 INFO [1] received reconcile message
47502 Sep 22 23:22:24.931 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(551), op: ExtentReopen { repair_id: ReconciliationId(551), extent_id: 95 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47503 Sep 22 23:22:24.931 INFO [1] client ExtentReopen { repair_id: ReconciliationId(551), extent_id: 95 }
47504 Sep 22 23:22:24.931 INFO [2] received reconcile message
47505 Sep 22 23:22:24.931 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(551), op: ExtentReopen { repair_id: ReconciliationId(551), extent_id: 95 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47506 Sep 22 23:22:24.931 INFO [2] client ExtentReopen { repair_id: ReconciliationId(551), extent_id: 95 }
47507 Sep 22 23:22:24.931 DEBG 551 Reopen extent 95
47508 Sep 22 23:22:24.932 DEBG 551 Reopen extent 95
47509 Sep 22 23:22:24.932 DEBG 551 Reopen extent 95
47510 Sep 22 23:22:24.933 DEBG [2] It's time to notify for 551
47511 Sep 22 23:22:24.933 INFO Completion from [2] id:551 status:true
47512 Sep 22 23:22:24.933 INFO [552/752] Repair commands completed
47513 Sep 22 23:22:24.933 INFO Pop front: ReconcileIO { id: ReconciliationId(552), op: ExtentFlush { repair_id: ReconciliationId(552), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47514 Sep 22 23:22:24.933 INFO Sent repair work, now wait for resp
47515 Sep 22 23:22:24.933 INFO [0] received reconcile message
47516 Sep 22 23:22:24.933 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(552), op: ExtentFlush { repair_id: ReconciliationId(552), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47517 Sep 22 23:22:24.933 INFO [0] client ExtentFlush { repair_id: ReconciliationId(552), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47518 Sep 22 23:22:24.933 INFO [1] received reconcile message
47519 Sep 22 23:22:24.933 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(552), op: ExtentFlush { repair_id: ReconciliationId(552), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47520 Sep 22 23:22:24.933 INFO [1] client ExtentFlush { repair_id: ReconciliationId(552), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47521 Sep 22 23:22:24.933 INFO [2] received reconcile message
47522 Sep 22 23:22:24.933 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(552), op: ExtentFlush { repair_id: ReconciliationId(552), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47523 Sep 22 23:22:24.933 INFO [2] client ExtentFlush { repair_id: ReconciliationId(552), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47524 Sep 22 23:22:24.933 DEBG 552 Flush extent 154 with f:2 g:2
47525 Sep 22 23:22:24.933 DEBG Flush just extent 154 with f:2 and g:2
47526 Sep 22 23:22:24.933 DEBG [1] It's time to notify for 552
47527 Sep 22 23:22:24.933 INFO Completion from [1] id:552 status:true
47528 Sep 22 23:22:24.933 INFO [553/752] Repair commands completed
47529 Sep 22 23:22:24.933 INFO Pop front: ReconcileIO { id: ReconciliationId(553), op: ExtentClose { repair_id: ReconciliationId(553), extent_id: 154 }, state: ClientData([New, New, New]) }
47530 Sep 22 23:22:24.933 INFO Sent repair work, now wait for resp
47531 Sep 22 23:22:24.933 INFO [0] received reconcile message
47532 Sep 22 23:22:24.933 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(553), op: ExtentClose { repair_id: ReconciliationId(553), extent_id: 154 }, state: ClientData([InProgress, New, New]) }, : downstairs
47533 Sep 22 23:22:24.933 INFO [0] client ExtentClose { repair_id: ReconciliationId(553), extent_id: 154 }
47534 Sep 22 23:22:24.933 INFO [1] received reconcile message
47535 Sep 22 23:22:24.933 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(553), op: ExtentClose { repair_id: ReconciliationId(553), extent_id: 154 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47536 Sep 22 23:22:24.934 INFO [1] client ExtentClose { repair_id: ReconciliationId(553), extent_id: 154 }
47537 Sep 22 23:22:24.934 INFO [2] received reconcile message
47538 Sep 22 23:22:24.934 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(553), op: ExtentClose { repair_id: ReconciliationId(553), extent_id: 154 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47539 Sep 22 23:22:24.934 INFO [2] client ExtentClose { repair_id: ReconciliationId(553), extent_id: 154 }
47540 Sep 22 23:22:24.934 DEBG 553 Close extent 154
47541 Sep 22 23:22:24.934 DEBG 553 Close extent 154
47542 Sep 22 23:22:24.934 DEBG 553 Close extent 154
47543 Sep 22 23:22:24.935 DEBG [2] It's time to notify for 553
47544 Sep 22 23:22:24.935 INFO Completion from [2] id:553 status:true
47545 Sep 22 23:22:24.935 INFO [554/752] Repair commands completed
47546 Sep 22 23:22:24.935 INFO Pop front: ReconcileIO { id: ReconciliationId(554), op: ExtentRepair { repair_id: ReconciliationId(554), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47547 Sep 22 23:22:24.935 INFO Sent repair work, now wait for resp
47548 Sep 22 23:22:24.935 INFO [0] received reconcile message
47549 Sep 22 23:22:24.935 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(554), op: ExtentRepair { repair_id: ReconciliationId(554), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47550 Sep 22 23:22:24.935 INFO [0] client ExtentRepair { repair_id: ReconciliationId(554), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47551 Sep 22 23:22:24.935 INFO [0] Sending repair request ReconciliationId(554)
47552 Sep 22 23:22:24.935 INFO [1] received reconcile message
47553 Sep 22 23:22:24.935 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(554), op: ExtentRepair { repair_id: ReconciliationId(554), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47554 Sep 22 23:22:24.935 INFO [1] client ExtentRepair { repair_id: ReconciliationId(554), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47555 Sep 22 23:22:24.935 INFO [1] No action required ReconciliationId(554)
47556 Sep 22 23:22:24.935 INFO [2] received reconcile message
47557 Sep 22 23:22:24.935 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(554), op: ExtentRepair { repair_id: ReconciliationId(554), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47558 Sep 22 23:22:24.935 INFO [2] client ExtentRepair { repair_id: ReconciliationId(554), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47559 Sep 22 23:22:24.935 INFO [2] No action required ReconciliationId(554)
47560 Sep 22 23:22:24.935 DEBG 554 Repair extent 154 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
47561 Sep 22 23:22:24.935 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/09A.copy"
47562 Sep 22 23:22:25.000 INFO accepted connection, remote_addr: 127.0.0.1:35361, local_addr: 127.0.0.1:52864, task: repair
47563 Sep 22 23:22:25.000 TRCE incoming request, uri: /extent/154/files, method: GET, req_id: aaa5a4ba-3718-4df6-b666-a6c949c53f90, remote_addr: 127.0.0.1:35361, local_addr: 127.0.0.1:52864, task: repair
47564 Sep 22 23:22:25.000 INFO request completed, latency_us: 212, response_code: 200, uri: /extent/154/files, method: GET, req_id: aaa5a4ba-3718-4df6-b666-a6c949c53f90, remote_addr: 127.0.0.1:35361, local_addr: 127.0.0.1:52864, task: repair
47565 Sep 22 23:22:25.000 INFO eid:154 Found repair files: ["09A", "09A.db"]
47566 Sep 22 23:22:25.001 TRCE incoming request, uri: /newextent/154/data, method: GET, req_id: 5ec35068-0643-4975-a63c-2c90066ee389, remote_addr: 127.0.0.1:35361, local_addr: 127.0.0.1:52864, task: repair
47567 Sep 22 23:22:25.001 INFO request completed, latency_us: 341, response_code: 200, uri: /newextent/154/data, method: GET, req_id: 5ec35068-0643-4975-a63c-2c90066ee389, remote_addr: 127.0.0.1:35361, local_addr: 127.0.0.1:52864, task: repair
47568 Sep 22 23:22:25.006 TRCE incoming request, uri: /newextent/154/db, method: GET, req_id: 185946b0-4500-433f-9c10-4306b64b054a, remote_addr: 127.0.0.1:35361, local_addr: 127.0.0.1:52864, task: repair
47569 Sep 22 23:22:25.006 INFO request completed, latency_us: 311, response_code: 200, uri: /newextent/154/db, method: GET, req_id: 185946b0-4500-433f-9c10-4306b64b054a, remote_addr: 127.0.0.1:35361, local_addr: 127.0.0.1:52864, task: repair
47570 Sep 22 23:22:25.008 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/09A.copy" to "/tmp/downstairs-zrMnlo6G/00/000/09A.replace"
47571 Sep 22 23:22:25.008 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47572 Sep 22 23:22:25.009 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/09A.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
47573 Sep 22 23:22:25.009 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/09A"
47574 Sep 22 23:22:25.009 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/09A.db"
47575 Sep 22 23:22:25.009 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47576 Sep 22 23:22:25.009 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/09A.replace" to "/tmp/downstairs-zrMnlo6G/00/000/09A.completed"
47577 Sep 22 23:22:25.009 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47578 Sep 22 23:22:25.009 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47579 Sep 22 23:22:25.010 DEBG [0] It's time to notify for 554
47580 Sep 22 23:22:25.010 INFO Completion from [0] id:554 status:true
47581 Sep 22 23:22:25.010 INFO [555/752] Repair commands completed
47582 Sep 22 23:22:25.010 INFO Pop front: ReconcileIO { id: ReconciliationId(555), op: ExtentReopen { repair_id: ReconciliationId(555), extent_id: 154 }, state: ClientData([New, New, New]) }
47583 Sep 22 23:22:25.010 INFO Sent repair work, now wait for resp
47584 Sep 22 23:22:25.010 INFO [0] received reconcile message
47585 Sep 22 23:22:25.010 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(555), op: ExtentReopen { repair_id: ReconciliationId(555), extent_id: 154 }, state: ClientData([InProgress, New, New]) }, : downstairs
47586 Sep 22 23:22:25.010 INFO [0] client ExtentReopen { repair_id: ReconciliationId(555), extent_id: 154 }
47587 Sep 22 23:22:25.010 INFO [1] received reconcile message
47588 Sep 22 23:22:25.010 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(555), op: ExtentReopen { repair_id: ReconciliationId(555), extent_id: 154 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47589 Sep 22 23:22:25.010 INFO [1] client ExtentReopen { repair_id: ReconciliationId(555), extent_id: 154 }
47590 Sep 22 23:22:25.010 INFO [2] received reconcile message
47591 Sep 22 23:22:25.010 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(555), op: ExtentReopen { repair_id: ReconciliationId(555), extent_id: 154 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47592 Sep 22 23:22:25.010 INFO [2] client ExtentReopen { repair_id: ReconciliationId(555), extent_id: 154 }
47593 Sep 22 23:22:25.010 DEBG 555 Reopen extent 154
47594 Sep 22 23:22:25.011 DEBG 555 Reopen extent 154
47595 Sep 22 23:22:25.012 DEBG 555 Reopen extent 154
47596 Sep 22 23:22:25.012 DEBG [2] It's time to notify for 555
47597 Sep 22 23:22:25.012 INFO Completion from [2] id:555 status:true
47598 Sep 22 23:22:25.012 INFO [556/752] Repair commands completed
47599 Sep 22 23:22:25.012 INFO Pop front: ReconcileIO { id: ReconciliationId(556), op: ExtentFlush { repair_id: ReconciliationId(556), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47600 Sep 22 23:22:25.012 INFO Sent repair work, now wait for resp
47601 Sep 22 23:22:25.012 INFO [0] received reconcile message
47602 Sep 22 23:22:25.012 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(556), op: ExtentFlush { repair_id: ReconciliationId(556), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47603 Sep 22 23:22:25.012 INFO [0] client ExtentFlush { repair_id: ReconciliationId(556), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47604 Sep 22 23:22:25.013 INFO [1] received reconcile message
47605 Sep 22 23:22:25.013 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(556), op: ExtentFlush { repair_id: ReconciliationId(556), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47606 Sep 22 23:22:25.013 INFO [1] client ExtentFlush { repair_id: ReconciliationId(556), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47607 Sep 22 23:22:25.013 INFO [2] received reconcile message
47608 Sep 22 23:22:25.013 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(556), op: ExtentFlush { repair_id: ReconciliationId(556), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47609 Sep 22 23:22:25.013 INFO [2] client ExtentFlush { repair_id: ReconciliationId(556), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47610 Sep 22 23:22:25.013 DEBG 556 Flush extent 50 with f:2 g:2
47611 Sep 22 23:22:25.013 DEBG Flush just extent 50 with f:2 and g:2
47612 Sep 22 23:22:25.013 DEBG [1] It's time to notify for 556
47613 Sep 22 23:22:25.013 INFO Completion from [1] id:556 status:true
47614 Sep 22 23:22:25.013 INFO [557/752] Repair commands completed
47615 Sep 22 23:22:25.013 INFO Pop front: ReconcileIO { id: ReconciliationId(557), op: ExtentClose { repair_id: ReconciliationId(557), extent_id: 50 }, state: ClientData([New, New, New]) }
47616 Sep 22 23:22:25.013 INFO Sent repair work, now wait for resp
47617 Sep 22 23:22:25.013 INFO [0] received reconcile message
47618 Sep 22 23:22:25.013 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(557), op: ExtentClose { repair_id: ReconciliationId(557), extent_id: 50 }, state: ClientData([InProgress, New, New]) }, : downstairs
47619 Sep 22 23:22:25.013 INFO [0] client ExtentClose { repair_id: ReconciliationId(557), extent_id: 50 }
47620 Sep 22 23:22:25.013 INFO [1] received reconcile message
47621 Sep 22 23:22:25.013 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(557), op: ExtentClose { repair_id: ReconciliationId(557), extent_id: 50 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47622 Sep 22 23:22:25.013 INFO [1] client ExtentClose { repair_id: ReconciliationId(557), extent_id: 50 }
47623 Sep 22 23:22:25.013 INFO [2] received reconcile message
47624 Sep 22 23:22:25.013 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(557), op: ExtentClose { repair_id: ReconciliationId(557), extent_id: 50 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47625 Sep 22 23:22:25.013 INFO [2] client ExtentClose { repair_id: ReconciliationId(557), extent_id: 50 }
47626 Sep 22 23:22:25.013 DEBG 557 Close extent 50
47627 Sep 22 23:22:25.014 DEBG 557 Close extent 50
47628 Sep 22 23:22:25.014 DEBG 557 Close extent 50
47629 Sep 22 23:22:25.014 DEBG [2] It's time to notify for 557
47630 Sep 22 23:22:25.014 INFO Completion from [2] id:557 status:true
47631 Sep 22 23:22:25.014 INFO [558/752] Repair commands completed
47632 Sep 22 23:22:25.014 INFO Pop front: ReconcileIO { id: ReconciliationId(558), op: ExtentRepair { repair_id: ReconciliationId(558), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47633 Sep 22 23:22:25.014 INFO Sent repair work, now wait for resp
47634 Sep 22 23:22:25.014 INFO [0] received reconcile message
47635 Sep 22 23:22:25.014 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(558), op: ExtentRepair { repair_id: ReconciliationId(558), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47636 Sep 22 23:22:25.014 INFO [0] client ExtentRepair { repair_id: ReconciliationId(558), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47637 Sep 22 23:22:25.015 INFO [0] Sending repair request ReconciliationId(558)
47638 Sep 22 23:22:25.015 INFO [1] received reconcile message
47639 Sep 22 23:22:25.015 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(558), op: ExtentRepair { repair_id: ReconciliationId(558), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47640 Sep 22 23:22:25.015 INFO [1] client ExtentRepair { repair_id: ReconciliationId(558), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47641 Sep 22 23:22:25.015 INFO [1] No action required ReconciliationId(558)
47642 Sep 22 23:22:25.015 INFO [2] received reconcile message
47643 Sep 22 23:22:25.015 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(558), op: ExtentRepair { repair_id: ReconciliationId(558), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47644 Sep 22 23:22:25.015 INFO [2] client ExtentRepair { repair_id: ReconciliationId(558), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47645 Sep 22 23:22:25.015 INFO [2] No action required ReconciliationId(558)
47646 Sep 22 23:22:25.015 DEBG 558 Repair extent 50 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
47647 Sep 22 23:22:25.015 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/032.copy"
47648 Sep 22 23:22:25.079 INFO accepted connection, remote_addr: 127.0.0.1:43407, local_addr: 127.0.0.1:52864, task: repair
47649 Sep 22 23:22:25.079 TRCE incoming request, uri: /extent/50/files, method: GET, req_id: 01233bc9-73b0-4caf-b5e0-32cdf2a6182b, remote_addr: 127.0.0.1:43407, local_addr: 127.0.0.1:52864, task: repair
47650 Sep 22 23:22:25.079 INFO request completed, latency_us: 210, response_code: 200, uri: /extent/50/files, method: GET, req_id: 01233bc9-73b0-4caf-b5e0-32cdf2a6182b, remote_addr: 127.0.0.1:43407, local_addr: 127.0.0.1:52864, task: repair
47651 Sep 22 23:22:25.080 INFO eid:50 Found repair files: ["032", "032.db"]
47652 Sep 22 23:22:25.080 TRCE incoming request, uri: /newextent/50/data, method: GET, req_id: f73f8b44-9df8-4613-a0ae-a0a37cd3010b, remote_addr: 127.0.0.1:43407, local_addr: 127.0.0.1:52864, task: repair
47653 Sep 22 23:22:25.080 INFO request completed, latency_us: 325, response_code: 200, uri: /newextent/50/data, method: GET, req_id: f73f8b44-9df8-4613-a0ae-a0a37cd3010b, remote_addr: 127.0.0.1:43407, local_addr: 127.0.0.1:52864, task: repair
47654 Sep 22 23:22:25.085 TRCE incoming request, uri: /newextent/50/db, method: GET, req_id: dc1568cc-ee09-42d0-a67f-32e941131f78, remote_addr: 127.0.0.1:43407, local_addr: 127.0.0.1:52864, task: repair
47655 Sep 22 23:22:25.085 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/50/db, method: GET, req_id: dc1568cc-ee09-42d0-a67f-32e941131f78, remote_addr: 127.0.0.1:43407, local_addr: 127.0.0.1:52864, task: repair
47656 Sep 22 23:22:25.086 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/032.copy" to "/tmp/downstairs-zrMnlo6G/00/000/032.replace"
47657 Sep 22 23:22:25.086 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47658 Sep 22 23:22:25.087 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/032.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
47659 Sep 22 23:22:25.087 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/032"
47660 Sep 22 23:22:25.087 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/032.db"
47661 Sep 22 23:22:25.088 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47662 Sep 22 23:22:25.088 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/032.replace" to "/tmp/downstairs-zrMnlo6G/00/000/032.completed"
47663 Sep 22 23:22:25.088 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47664 Sep 22 23:22:25.088 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47665 Sep 22 23:22:25.088 DEBG [0] It's time to notify for 558
47666 Sep 22 23:22:25.088 INFO Completion from [0] id:558 status:true
47667 Sep 22 23:22:25.088 INFO [559/752] Repair commands completed
47668 Sep 22 23:22:25.088 INFO Pop front: ReconcileIO { id: ReconciliationId(559), op: ExtentReopen { repair_id: ReconciliationId(559), extent_id: 50 }, state: ClientData([New, New, New]) }
47669 Sep 22 23:22:25.088 INFO Sent repair work, now wait for resp
47670 Sep 22 23:22:25.088 INFO [0] received reconcile message
47671 Sep 22 23:22:25.088 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(559), op: ExtentReopen { repair_id: ReconciliationId(559), extent_id: 50 }, state: ClientData([InProgress, New, New]) }, : downstairs
47672 Sep 22 23:22:25.088 INFO [0] client ExtentReopen { repair_id: ReconciliationId(559), extent_id: 50 }
47673 Sep 22 23:22:25.088 INFO [1] received reconcile message
47674 Sep 22 23:22:25.088 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(559), op: ExtentReopen { repair_id: ReconciliationId(559), extent_id: 50 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47675 Sep 22 23:22:25.088 INFO [1] client ExtentReopen { repair_id: ReconciliationId(559), extent_id: 50 }
47676 Sep 22 23:22:25.088 INFO [2] received reconcile message
47677 Sep 22 23:22:25.088 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(559), op: ExtentReopen { repair_id: ReconciliationId(559), extent_id: 50 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47678 Sep 22 23:22:25.088 INFO [2] client ExtentReopen { repair_id: ReconciliationId(559), extent_id: 50 }
47679 Sep 22 23:22:25.088 DEBG 559 Reopen extent 50
47680 Sep 22 23:22:25.089 DEBG 559 Reopen extent 50
47681 Sep 22 23:22:25.089 DEBG 559 Reopen extent 50
47682 Sep 22 23:22:25.090 DEBG [2] It's time to notify for 559
47683 Sep 22 23:22:25.090 INFO Completion from [2] id:559 status:true
47684 Sep 22 23:22:25.090 INFO [560/752] Repair commands completed
47685 Sep 22 23:22:25.090 INFO Pop front: ReconcileIO { id: ReconciliationId(560), op: ExtentFlush { repair_id: ReconciliationId(560), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47686 Sep 22 23:22:25.090 INFO Sent repair work, now wait for resp
47687 Sep 22 23:22:25.090 INFO [0] received reconcile message
47688 Sep 22 23:22:25.090 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(560), op: ExtentFlush { repair_id: ReconciliationId(560), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47689 Sep 22 23:22:25.090 INFO [0] client ExtentFlush { repair_id: ReconciliationId(560), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47690 Sep 22 23:22:25.090 INFO [1] received reconcile message
47691 Sep 22 23:22:25.090 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(560), op: ExtentFlush { repair_id: ReconciliationId(560), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47692 Sep 22 23:22:25.090 INFO [1] client ExtentFlush { repair_id: ReconciliationId(560), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47693 Sep 22 23:22:25.090 INFO [2] received reconcile message
47694 Sep 22 23:22:25.090 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(560), op: ExtentFlush { repair_id: ReconciliationId(560), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47695 Sep 22 23:22:25.091 INFO [2] client ExtentFlush { repair_id: ReconciliationId(560), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47696 Sep 22 23:22:25.091 DEBG 560 Flush extent 91 with f:2 g:2
47697 Sep 22 23:22:25.091 DEBG Flush just extent 91 with f:2 and g:2
47698 Sep 22 23:22:25.091 DEBG [1] It's time to notify for 560
47699 Sep 22 23:22:25.091 INFO Completion from [1] id:560 status:true
47700 Sep 22 23:22:25.091 INFO [561/752] Repair commands completed
47701 Sep 22 23:22:25.091 INFO Pop front: ReconcileIO { id: ReconciliationId(561), op: ExtentClose { repair_id: ReconciliationId(561), extent_id: 91 }, state: ClientData([New, New, New]) }
47702 Sep 22 23:22:25.091 INFO Sent repair work, now wait for resp
47703 Sep 22 23:22:25.091 INFO [0] received reconcile message
47704 Sep 22 23:22:25.091 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(561), op: ExtentClose { repair_id: ReconciliationId(561), extent_id: 91 }, state: ClientData([InProgress, New, New]) }, : downstairs
47705 Sep 22 23:22:25.091 INFO [0] client ExtentClose { repair_id: ReconciliationId(561), extent_id: 91 }
47706 Sep 22 23:22:25.091 INFO [1] received reconcile message
47707 Sep 22 23:22:25.091 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(561), op: ExtentClose { repair_id: ReconciliationId(561), extent_id: 91 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47708 Sep 22 23:22:25.091 INFO [1] client ExtentClose { repair_id: ReconciliationId(561), extent_id: 91 }
47709 Sep 22 23:22:25.091 INFO [2] received reconcile message
47710 Sep 22 23:22:25.091 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(561), op: ExtentClose { repair_id: ReconciliationId(561), extent_id: 91 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47711 Sep 22 23:22:25.091 INFO [2] client ExtentClose { repair_id: ReconciliationId(561), extent_id: 91 }
47712 Sep 22 23:22:25.091 DEBG 561 Close extent 91
47713 Sep 22 23:22:25.091 DEBG 561 Close extent 91
47714 Sep 22 23:22:25.092 DEBG 561 Close extent 91
47715 Sep 22 23:22:25.092 DEBG [2] It's time to notify for 561
47716 Sep 22 23:22:25.092 INFO Completion from [2] id:561 status:true
47717 Sep 22 23:22:25.092 INFO [562/752] Repair commands completed
47718 Sep 22 23:22:25.092 INFO Pop front: ReconcileIO { id: ReconciliationId(562), op: ExtentRepair { repair_id: ReconciliationId(562), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47719 Sep 22 23:22:25.092 INFO Sent repair work, now wait for resp
47720 Sep 22 23:22:25.092 INFO [0] received reconcile message
47721 Sep 22 23:22:25.092 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(562), op: ExtentRepair { repair_id: ReconciliationId(562), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47722 Sep 22 23:22:25.092 INFO [0] client ExtentRepair { repair_id: ReconciliationId(562), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47723 Sep 22 23:22:25.092 INFO [0] Sending repair request ReconciliationId(562)
47724 Sep 22 23:22:25.092 INFO [1] received reconcile message
47725 Sep 22 23:22:25.092 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(562), op: ExtentRepair { repair_id: ReconciliationId(562), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47726 Sep 22 23:22:25.092 INFO [1] client ExtentRepair { repair_id: ReconciliationId(562), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47727 Sep 22 23:22:25.093 INFO [1] No action required ReconciliationId(562)
47728 Sep 22 23:22:25.093 INFO [2] received reconcile message
47729 Sep 22 23:22:25.093 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(562), op: ExtentRepair { repair_id: ReconciliationId(562), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47730 Sep 22 23:22:25.093 INFO [2] client ExtentRepair { repair_id: ReconciliationId(562), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47731 Sep 22 23:22:25.093 INFO [2] No action required ReconciliationId(562)
47732 Sep 22 23:22:25.093 DEBG 562 Repair extent 91 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
47733 Sep 22 23:22:25.093 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/05B.copy"
47734 Sep 22 23:22:25.116 DEBG [1] Read AckReady 1089, : downstairs
47735 Sep 22 23:22:25.117 ERRO [1] job id 1090 saw error GenericError("test error")
47736 Sep 22 23:22:25.118 DEBG up_ds_listen was notified
47737 Sep 22 23:22:25.118 DEBG up_ds_listen process 1089
47738 Sep 22 23:22:25.118 DEBG [A] ack job 1089:90, : downstairs
47739 Sep 22 23:22:25.158 INFO accepted connection, remote_addr: 127.0.0.1:55866, local_addr: 127.0.0.1:52864, task: repair
47740 Sep 22 23:22:25.159 TRCE incoming request, uri: /extent/91/files, method: GET, req_id: b382f27e-dea6-46a6-a6f2-9aefcceb9446, remote_addr: 127.0.0.1:55866, local_addr: 127.0.0.1:52864, task: repair
47741 Sep 22 23:22:25.159 INFO request completed, latency_us: 204, response_code: 200, uri: /extent/91/files, method: GET, req_id: b382f27e-dea6-46a6-a6f2-9aefcceb9446, remote_addr: 127.0.0.1:55866, local_addr: 127.0.0.1:52864, task: repair
47742 Sep 22 23:22:25.159 INFO eid:91 Found repair files: ["05B", "05B.db"]
47743 Sep 22 23:22:25.160 TRCE incoming request, uri: /newextent/91/data, method: GET, req_id: a4131c46-b608-4804-b761-932fc0505706, remote_addr: 127.0.0.1:55866, local_addr: 127.0.0.1:52864, task: repair
47744 Sep 22 23:22:25.160 INFO request completed, latency_us: 326, response_code: 200, uri: /newextent/91/data, method: GET, req_id: a4131c46-b608-4804-b761-932fc0505706, remote_addr: 127.0.0.1:55866, local_addr: 127.0.0.1:52864, task: repair
47745 Sep 22 23:22:25.165 TRCE incoming request, uri: /newextent/91/db, method: GET, req_id: b679f97a-5bc0-434a-bd1b-ca948795f656, remote_addr: 127.0.0.1:55866, local_addr: 127.0.0.1:52864, task: repair
47746 Sep 22 23:22:25.165 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/91/db, method: GET, req_id: b679f97a-5bc0-434a-bd1b-ca948795f656, remote_addr: 127.0.0.1:55866, local_addr: 127.0.0.1:52864, task: repair
47747 Sep 22 23:22:25.166 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/05B.copy" to "/tmp/downstairs-zrMnlo6G/00/000/05B.replace"
47748 Sep 22 23:22:25.166 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47749 Sep 22 23:22:25.167 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/05B.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
47750 Sep 22 23:22:25.167 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/05B"
47751 Sep 22 23:22:25.167 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/05B.db"
47752 Sep 22 23:22:25.167 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47753 Sep 22 23:22:25.167 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/05B.replace" to "/tmp/downstairs-zrMnlo6G/00/000/05B.completed"
47754 Sep 22 23:22:25.167 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47755 Sep 22 23:22:25.167 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47756 Sep 22 23:22:25.168 DEBG [0] It's time to notify for 562
47757 Sep 22 23:22:25.168 INFO Completion from [0] id:562 status:true
47758 Sep 22 23:22:25.168 INFO [563/752] Repair commands completed
47759 Sep 22 23:22:25.168 INFO Pop front: ReconcileIO { id: ReconciliationId(563), op: ExtentReopen { repair_id: ReconciliationId(563), extent_id: 91 }, state: ClientData([New, New, New]) }
47760 Sep 22 23:22:25.168 INFO Sent repair work, now wait for resp
47761 Sep 22 23:22:25.168 INFO [0] received reconcile message
47762 Sep 22 23:22:25.168 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(563), op: ExtentReopen { repair_id: ReconciliationId(563), extent_id: 91 }, state: ClientData([InProgress, New, New]) }, : downstairs
47763 Sep 22 23:22:25.168 INFO [0] client ExtentReopen { repair_id: ReconciliationId(563), extent_id: 91 }
47764 Sep 22 23:22:25.168 INFO [1] received reconcile message
47765 Sep 22 23:22:25.168 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(563), op: ExtentReopen { repair_id: ReconciliationId(563), extent_id: 91 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47766 Sep 22 23:22:25.168 INFO [1] client ExtentReopen { repair_id: ReconciliationId(563), extent_id: 91 }
47767 Sep 22 23:22:25.168 INFO [2] received reconcile message
47768 Sep 22 23:22:25.168 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(563), op: ExtentReopen { repair_id: ReconciliationId(563), extent_id: 91 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47769 Sep 22 23:22:25.168 INFO [2] client ExtentReopen { repair_id: ReconciliationId(563), extent_id: 91 }
47770 Sep 22 23:22:25.168 DEBG 563 Reopen extent 91
47771 Sep 22 23:22:25.169 DEBG 563 Reopen extent 91
47772 Sep 22 23:22:25.169 DEBG 563 Reopen extent 91
47773 Sep 22 23:22:25.170 DEBG [2] It's time to notify for 563
47774 Sep 22 23:22:25.170 INFO Completion from [2] id:563 status:true
47775 Sep 22 23:22:25.170 INFO [564/752] Repair commands completed
47776 Sep 22 23:22:25.170 INFO Pop front: ReconcileIO { id: ReconciliationId(564), op: ExtentFlush { repair_id: ReconciliationId(564), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47777 Sep 22 23:22:25.170 INFO Sent repair work, now wait for resp
47778 Sep 22 23:22:25.170 INFO [0] received reconcile message
47779 Sep 22 23:22:25.170 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(564), op: ExtentFlush { repair_id: ReconciliationId(564), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47780 Sep 22 23:22:25.170 INFO [0] client ExtentFlush { repair_id: ReconciliationId(564), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47781 Sep 22 23:22:25.170 INFO [1] received reconcile message
47782 Sep 22 23:22:25.170 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(564), op: ExtentFlush { repair_id: ReconciliationId(564), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47783 Sep 22 23:22:25.170 INFO [1] client ExtentFlush { repair_id: ReconciliationId(564), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47784 Sep 22 23:22:25.170 INFO [2] received reconcile message
47785 Sep 22 23:22:25.170 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(564), op: ExtentFlush { repair_id: ReconciliationId(564), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47786 Sep 22 23:22:25.170 INFO [2] client ExtentFlush { repair_id: ReconciliationId(564), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47787 Sep 22 23:22:25.170 DEBG 564 Flush extent 158 with f:2 g:2
47788 Sep 22 23:22:25.170 DEBG Flush just extent 158 with f:2 and g:2
47789 Sep 22 23:22:25.170 DEBG [1] It's time to notify for 564
47790 Sep 22 23:22:25.170 INFO Completion from [1] id:564 status:true
47791 Sep 22 23:22:25.170 INFO [565/752] Repair commands completed
47792 Sep 22 23:22:25.171 INFO Pop front: ReconcileIO { id: ReconciliationId(565), op: ExtentClose { repair_id: ReconciliationId(565), extent_id: 158 }, state: ClientData([New, New, New]) }
47793 Sep 22 23:22:25.171 INFO Sent repair work, now wait for resp
47794 Sep 22 23:22:25.171 INFO [0] received reconcile message
47795 Sep 22 23:22:25.171 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(565), op: ExtentClose { repair_id: ReconciliationId(565), extent_id: 158 }, state: ClientData([InProgress, New, New]) }, : downstairs
47796 Sep 22 23:22:25.171 INFO [0] client ExtentClose { repair_id: ReconciliationId(565), extent_id: 158 }
47797 Sep 22 23:22:25.171 INFO [1] received reconcile message
47798 Sep 22 23:22:25.171 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(565), op: ExtentClose { repair_id: ReconciliationId(565), extent_id: 158 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47799 Sep 22 23:22:25.171 INFO [1] client ExtentClose { repair_id: ReconciliationId(565), extent_id: 158 }
47800 Sep 22 23:22:25.171 INFO [2] received reconcile message
47801 Sep 22 23:22:25.171 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(565), op: ExtentClose { repair_id: ReconciliationId(565), extent_id: 158 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47802 Sep 22 23:22:25.171 INFO [2] client ExtentClose { repair_id: ReconciliationId(565), extent_id: 158 }
47803 Sep 22 23:22:25.171 DEBG up_ds_listen checked 1 jobs, back to waiting
47804 Sep 22 23:22:25.171 DEBG 565 Close extent 158
47805 Sep 22 23:22:25.171 DEBG 565 Close extent 158
47806 Sep 22 23:22:25.171 DEBG 565 Close extent 158
47807 Sep 22 23:22:25.172 DEBG [2] It's time to notify for 565
47808 Sep 22 23:22:25.172 INFO Completion from [2] id:565 status:true
47809 Sep 22 23:22:25.172 INFO [566/752] Repair commands completed
47810 Sep 22 23:22:25.172 INFO Pop front: ReconcileIO { id: ReconciliationId(566), op: ExtentRepair { repair_id: ReconciliationId(566), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47811 Sep 22 23:22:25.172 INFO Sent repair work, now wait for resp
47812 Sep 22 23:22:25.172 INFO [0] received reconcile message
47813 Sep 22 23:22:25.172 WARN returning error on read!
47814 Sep 22 23:22:25.172 DEBG Read :1089 deps:[JobId(1088)] res:false
47815 Sep 22 23:22:25.172 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(566), op: ExtentRepair { repair_id: ReconciliationId(566), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47816 Sep 22 23:22:25.172 INFO [0] client ExtentRepair { repair_id: ReconciliationId(566), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47817 Sep 22 23:22:25.172 INFO [0] Sending repair request ReconciliationId(566)
47818 Sep 22 23:22:25.172 INFO [1] received reconcile message
47819 Sep 22 23:22:25.172 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(566), op: ExtentRepair { repair_id: ReconciliationId(566), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47820 Sep 22 23:22:25.172 INFO [1] client ExtentRepair { repair_id: ReconciliationId(566), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47821 Sep 22 23:22:25.172 INFO [1] No action required ReconciliationId(566)
47822 Sep 22 23:22:25.172 INFO [2] received reconcile message
47823 Sep 22 23:22:25.172 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(566), op: ExtentRepair { repair_id: ReconciliationId(566), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47824 Sep 22 23:22:25.172 INFO [2] client ExtentRepair { repair_id: ReconciliationId(566), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47825 Sep 22 23:22:25.172 INFO [2] No action required ReconciliationId(566)
47826 Sep 22 23:22:25.172 DEBG 566 Repair extent 158 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
47827 Sep 22 23:22:25.172 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/09E.copy"
47828 Sep 22 23:22:25.178 DEBG Read :1089 deps:[JobId(1088)] res:true
47829 Sep 22 23:22:25.199 DEBG IO Read 1091 has deps [JobId(1090)]
47830 Sep 22 23:22:25.200 ERRO [0] job id 1089 saw error GenericError("test error")
47831 Sep 22 23:22:25.213 DEBG Flush :1090 extent_limit None deps:[JobId(1089), JobId(1088)] res:true f:34 g:1
47832 Sep 22 23:22:25.219 DEBG Read :1091 deps:[JobId(1090)] res:true
47833 Sep 22 23:22:25.238 INFO accepted connection, remote_addr: 127.0.0.1:44139, local_addr: 127.0.0.1:52864, task: repair
47834 Sep 22 23:22:25.238 TRCE incoming request, uri: /extent/158/files, method: GET, req_id: 7d4fce06-12bc-4138-ae19-3297800b269d, remote_addr: 127.0.0.1:44139, local_addr: 127.0.0.1:52864, task: repair
47835 Sep 22 23:22:25.238 INFO request completed, latency_us: 212, response_code: 200, uri: /extent/158/files, method: GET, req_id: 7d4fce06-12bc-4138-ae19-3297800b269d, remote_addr: 127.0.0.1:44139, local_addr: 127.0.0.1:52864, task: repair
47836 Sep 22 23:22:25.239 INFO eid:158 Found repair files: ["09E", "09E.db"]
47837 Sep 22 23:22:25.239 TRCE incoming request, uri: /newextent/158/data, method: GET, req_id: 94d36839-fc84-4d10-8dda-d113b8c43bb0, remote_addr: 127.0.0.1:44139, local_addr: 127.0.0.1:52864, task: repair
47838 Sep 22 23:22:25.239 INFO request completed, latency_us: 333, response_code: 200, uri: /newextent/158/data, method: GET, req_id: 94d36839-fc84-4d10-8dda-d113b8c43bb0, remote_addr: 127.0.0.1:44139, local_addr: 127.0.0.1:52864, task: repair
47839 Sep 22 23:22:25.244 TRCE incoming request, uri: /newextent/158/db, method: GET, req_id: 9c1e0009-c6b4-4c16-b8e8-0a61bc1848a2, remote_addr: 127.0.0.1:44139, local_addr: 127.0.0.1:52864, task: repair
47840 Sep 22 23:22:25.244 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/158/db, method: GET, req_id: 9c1e0009-c6b4-4c16-b8e8-0a61bc1848a2, remote_addr: 127.0.0.1:44139, local_addr: 127.0.0.1:52864, task: repair
47841 Sep 22 23:22:25.245 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/09E.copy" to "/tmp/downstairs-zrMnlo6G/00/000/09E.replace"
47842 Sep 22 23:22:25.245 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47843 Sep 22 23:22:25.246 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/09E.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
47844 Sep 22 23:22:25.246 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/09E"
47845 Sep 22 23:22:25.246 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/09E.db"
47846 Sep 22 23:22:25.246 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47847 Sep 22 23:22:25.246 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/09E.replace" to "/tmp/downstairs-zrMnlo6G/00/000/09E.completed"
47848 Sep 22 23:22:25.246 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47849 Sep 22 23:22:25.247 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47850 Sep 22 23:22:25.247 DEBG [0] It's time to notify for 566
47851 Sep 22 23:22:25.247 INFO Completion from [0] id:566 status:true
47852 Sep 22 23:22:25.247 INFO [567/752] Repair commands completed
47853 Sep 22 23:22:25.247 INFO Pop front: ReconcileIO { id: ReconciliationId(567), op: ExtentReopen { repair_id: ReconciliationId(567), extent_id: 158 }, state: ClientData([New, New, New]) }
47854 Sep 22 23:22:25.247 INFO Sent repair work, now wait for resp
47855 Sep 22 23:22:25.247 INFO [0] received reconcile message
47856 Sep 22 23:22:25.247 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(567), op: ExtentReopen { repair_id: ReconciliationId(567), extent_id: 158 }, state: ClientData([InProgress, New, New]) }, : downstairs
47857 Sep 22 23:22:25.247 INFO [0] client ExtentReopen { repair_id: ReconciliationId(567), extent_id: 158 }
47858 Sep 22 23:22:25.247 INFO [1] received reconcile message
47859 Sep 22 23:22:25.247 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(567), op: ExtentReopen { repair_id: ReconciliationId(567), extent_id: 158 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47860 Sep 22 23:22:25.247 INFO [1] client ExtentReopen { repair_id: ReconciliationId(567), extent_id: 158 }
47861 Sep 22 23:22:25.247 INFO [2] received reconcile message
47862 Sep 22 23:22:25.247 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(567), op: ExtentReopen { repair_id: ReconciliationId(567), extent_id: 158 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47863 Sep 22 23:22:25.247 INFO [2] client ExtentReopen { repair_id: ReconciliationId(567), extent_id: 158 }
47864 Sep 22 23:22:25.247 DEBG 567 Reopen extent 158
47865 Sep 22 23:22:25.248 DEBG 567 Reopen extent 158
47866 Sep 22 23:22:25.248 DEBG 567 Reopen extent 158
47867 Sep 22 23:22:25.249 DEBG [2] It's time to notify for 567
47868 Sep 22 23:22:25.249 INFO Completion from [2] id:567 status:true
47869 Sep 22 23:22:25.249 INFO [568/752] Repair commands completed
47870 Sep 22 23:22:25.249 INFO Pop front: ReconcileIO { id: ReconciliationId(568), op: ExtentFlush { repair_id: ReconciliationId(568), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47871 Sep 22 23:22:25.249 INFO Sent repair work, now wait for resp
47872 Sep 22 23:22:25.249 INFO [0] received reconcile message
47873 Sep 22 23:22:25.249 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(568), op: ExtentFlush { repair_id: ReconciliationId(568), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47874 Sep 22 23:22:25.249 INFO [0] client ExtentFlush { repair_id: ReconciliationId(568), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47875 Sep 22 23:22:25.249 INFO [1] received reconcile message
47876 Sep 22 23:22:25.249 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(568), op: ExtentFlush { repair_id: ReconciliationId(568), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47877 Sep 22 23:22:25.249 INFO [1] client ExtentFlush { repair_id: ReconciliationId(568), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47878 Sep 22 23:22:25.249 INFO [2] received reconcile message
47879 Sep 22 23:22:25.249 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(568), op: ExtentFlush { repair_id: ReconciliationId(568), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47880 Sep 22 23:22:25.249 INFO [2] client ExtentFlush { repair_id: ReconciliationId(568), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47881 Sep 22 23:22:25.250 DEBG 568 Flush extent 177 with f:2 g:2
47882 Sep 22 23:22:25.250 DEBG Flush just extent 177 with f:2 and g:2
47883 Sep 22 23:22:25.250 DEBG [1] It's time to notify for 568
47884 Sep 22 23:22:25.250 INFO Completion from [1] id:568 status:true
47885 Sep 22 23:22:25.250 INFO [569/752] Repair commands completed
47886 Sep 22 23:22:25.250 INFO Pop front: ReconcileIO { id: ReconciliationId(569), op: ExtentClose { repair_id: ReconciliationId(569), extent_id: 177 }, state: ClientData([New, New, New]) }
47887 Sep 22 23:22:25.250 INFO Sent repair work, now wait for resp
47888 Sep 22 23:22:25.250 INFO [0] received reconcile message
47889 Sep 22 23:22:25.250 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(569), op: ExtentClose { repair_id: ReconciliationId(569), extent_id: 177 }, state: ClientData([InProgress, New, New]) }, : downstairs
47890 Sep 22 23:22:25.250 INFO [0] client ExtentClose { repair_id: ReconciliationId(569), extent_id: 177 }
47891 Sep 22 23:22:25.250 INFO [1] received reconcile message
47892 Sep 22 23:22:25.250 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(569), op: ExtentClose { repair_id: ReconciliationId(569), extent_id: 177 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47893 Sep 22 23:22:25.250 INFO [1] client ExtentClose { repair_id: ReconciliationId(569), extent_id: 177 }
47894 Sep 22 23:22:25.250 INFO [2] received reconcile message
47895 Sep 22 23:22:25.250 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(569), op: ExtentClose { repair_id: ReconciliationId(569), extent_id: 177 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47896 Sep 22 23:22:25.250 INFO [2] client ExtentClose { repair_id: ReconciliationId(569), extent_id: 177 }
47897 Sep 22 23:22:25.250 DEBG 569 Close extent 177
47898 Sep 22 23:22:25.250 DEBG 569 Close extent 177
47899 Sep 22 23:22:25.251 DEBG 569 Close extent 177
47900 Sep 22 23:22:25.251 DEBG [2] It's time to notify for 569
47901 Sep 22 23:22:25.251 INFO Completion from [2] id:569 status:true
47902 Sep 22 23:22:25.251 INFO [570/752] Repair commands completed
47903 Sep 22 23:22:25.251 INFO Pop front: ReconcileIO { id: ReconciliationId(570), op: ExtentRepair { repair_id: ReconciliationId(570), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47904 Sep 22 23:22:25.251 INFO Sent repair work, now wait for resp
47905 Sep 22 23:22:25.251 INFO [0] received reconcile message
47906 Sep 22 23:22:25.251 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(570), op: ExtentRepair { repair_id: ReconciliationId(570), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47907 Sep 22 23:22:25.251 INFO [0] client ExtentRepair { repair_id: ReconciliationId(570), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47908 Sep 22 23:22:25.251 INFO [0] Sending repair request ReconciliationId(570)
47909 Sep 22 23:22:25.251 INFO [1] received reconcile message
47910 Sep 22 23:22:25.251 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(570), op: ExtentRepair { repair_id: ReconciliationId(570), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47911 Sep 22 23:22:25.251 INFO [1] client ExtentRepair { repair_id: ReconciliationId(570), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47912 Sep 22 23:22:25.251 INFO [1] No action required ReconciliationId(570)
47913 Sep 22 23:22:25.251 INFO [2] received reconcile message
47914 Sep 22 23:22:25.251 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(570), op: ExtentRepair { repair_id: ReconciliationId(570), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47915 Sep 22 23:22:25.251 INFO [2] client ExtentRepair { repair_id: ReconciliationId(570), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47916 Sep 22 23:22:25.252 INFO [2] No action required ReconciliationId(570)
47917 Sep 22 23:22:25.252 DEBG 570 Repair extent 177 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
47918 Sep 22 23:22:25.252 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0B1.copy"
47919 Sep 22 23:22:25.316 INFO accepted connection, remote_addr: 127.0.0.1:62403, local_addr: 127.0.0.1:52864, task: repair
47920 Sep 22 23:22:25.317 TRCE incoming request, uri: /extent/177/files, method: GET, req_id: a71e632f-2612-4078-915c-6d53daa1c12f, remote_addr: 127.0.0.1:62403, local_addr: 127.0.0.1:52864, task: repair
47921 Sep 22 23:22:25.317 INFO request completed, latency_us: 215, response_code: 200, uri: /extent/177/files, method: GET, req_id: a71e632f-2612-4078-915c-6d53daa1c12f, remote_addr: 127.0.0.1:62403, local_addr: 127.0.0.1:52864, task: repair
47922 Sep 22 23:22:25.317 INFO eid:177 Found repair files: ["0B1", "0B1.db"]
47923 Sep 22 23:22:25.317 TRCE incoming request, uri: /newextent/177/data, method: GET, req_id: 51b99753-2242-44c4-b420-e3ec4a8fcc8a, remote_addr: 127.0.0.1:62403, local_addr: 127.0.0.1:52864, task: repair
47924 Sep 22 23:22:25.318 INFO request completed, latency_us: 318, response_code: 200, uri: /newextent/177/data, method: GET, req_id: 51b99753-2242-44c4-b420-e3ec4a8fcc8a, remote_addr: 127.0.0.1:62403, local_addr: 127.0.0.1:52864, task: repair
47925 Sep 22 23:22:25.323 TRCE incoming request, uri: /newextent/177/db, method: GET, req_id: f1a0743b-5c53-40d5-9a2d-49d7d18e5627, remote_addr: 127.0.0.1:62403, local_addr: 127.0.0.1:52864, task: repair
47926 Sep 22 23:22:25.323 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/177/db, method: GET, req_id: f1a0743b-5c53-40d5-9a2d-49d7d18e5627, remote_addr: 127.0.0.1:62403, local_addr: 127.0.0.1:52864, task: repair
47927 Sep 22 23:22:25.324 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0B1.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0B1.replace"
47928 Sep 22 23:22:25.324 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47929 Sep 22 23:22:25.325 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0B1.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
47930 Sep 22 23:22:25.325 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B1"
47931 Sep 22 23:22:25.325 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B1.db"
47932 Sep 22 23:22:25.325 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47933 Sep 22 23:22:25.325 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0B1.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0B1.completed"
47934 Sep 22 23:22:25.325 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47935 Sep 22 23:22:25.325 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
47936 Sep 22 23:22:25.326 DEBG [0] It's time to notify for 570
47937 Sep 22 23:22:25.326 INFO Completion from [0] id:570 status:true
47938 Sep 22 23:22:25.326 INFO [571/752] Repair commands completed
47939 Sep 22 23:22:25.326 INFO Pop front: ReconcileIO { id: ReconciliationId(571), op: ExtentReopen { repair_id: ReconciliationId(571), extent_id: 177 }, state: ClientData([New, New, New]) }
47940 Sep 22 23:22:25.326 INFO Sent repair work, now wait for resp
47941 Sep 22 23:22:25.326 INFO [0] received reconcile message
47942 Sep 22 23:22:25.326 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(571), op: ExtentReopen { repair_id: ReconciliationId(571), extent_id: 177 }, state: ClientData([InProgress, New, New]) }, : downstairs
47943 Sep 22 23:22:25.326 INFO [0] client ExtentReopen { repair_id: ReconciliationId(571), extent_id: 177 }
47944 Sep 22 23:22:25.326 INFO [1] received reconcile message
47945 Sep 22 23:22:25.326 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(571), op: ExtentReopen { repair_id: ReconciliationId(571), extent_id: 177 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47946 Sep 22 23:22:25.326 INFO [1] client ExtentReopen { repair_id: ReconciliationId(571), extent_id: 177 }
47947 Sep 22 23:22:25.326 INFO [2] received reconcile message
47948 Sep 22 23:22:25.326 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(571), op: ExtentReopen { repair_id: ReconciliationId(571), extent_id: 177 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47949 Sep 22 23:22:25.326 INFO [2] client ExtentReopen { repair_id: ReconciliationId(571), extent_id: 177 }
47950 Sep 22 23:22:25.326 DEBG 571 Reopen extent 177
47951 Sep 22 23:22:25.327 DEBG 571 Reopen extent 177
47952 Sep 22 23:22:25.327 DEBG 571 Reopen extent 177
47953 Sep 22 23:22:25.328 DEBG [2] It's time to notify for 571
47954 Sep 22 23:22:25.328 INFO Completion from [2] id:571 status:true
47955 Sep 22 23:22:25.328 INFO [572/752] Repair commands completed
47956 Sep 22 23:22:25.328 INFO Pop front: ReconcileIO { id: ReconciliationId(572), op: ExtentFlush { repair_id: ReconciliationId(572), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47957 Sep 22 23:22:25.328 INFO Sent repair work, now wait for resp
47958 Sep 22 23:22:25.328 INFO [0] received reconcile message
47959 Sep 22 23:22:25.328 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(572), op: ExtentFlush { repair_id: ReconciliationId(572), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47960 Sep 22 23:22:25.328 INFO [0] client ExtentFlush { repair_id: ReconciliationId(572), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47961 Sep 22 23:22:25.328 INFO [1] received reconcile message
47962 Sep 22 23:22:25.328 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(572), op: ExtentFlush { repair_id: ReconciliationId(572), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47963 Sep 22 23:22:25.328 INFO [1] client ExtentFlush { repair_id: ReconciliationId(572), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47964 Sep 22 23:22:25.328 INFO [2] received reconcile message
47965 Sep 22 23:22:25.328 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(572), op: ExtentFlush { repair_id: ReconciliationId(572), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47966 Sep 22 23:22:25.328 INFO [2] client ExtentFlush { repair_id: ReconciliationId(572), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47967 Sep 22 23:22:25.328 DEBG 572 Flush extent 134 with f:2 g:2
47968 Sep 22 23:22:25.328 DEBG Flush just extent 134 with f:2 and g:2
47969 Sep 22 23:22:25.328 DEBG [1] It's time to notify for 572
47970 Sep 22 23:22:25.328 INFO Completion from [1] id:572 status:true
47971 Sep 22 23:22:25.329 INFO [573/752] Repair commands completed
47972 Sep 22 23:22:25.329 INFO Pop front: ReconcileIO { id: ReconciliationId(573), op: ExtentClose { repair_id: ReconciliationId(573), extent_id: 134 }, state: ClientData([New, New, New]) }
47973 Sep 22 23:22:25.329 INFO Sent repair work, now wait for resp
47974 Sep 22 23:22:25.329 INFO [0] received reconcile message
47975 Sep 22 23:22:25.329 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(573), op: ExtentClose { repair_id: ReconciliationId(573), extent_id: 134 }, state: ClientData([InProgress, New, New]) }, : downstairs
47976 Sep 22 23:22:25.329 INFO [0] client ExtentClose { repair_id: ReconciliationId(573), extent_id: 134 }
47977 Sep 22 23:22:25.329 INFO [1] received reconcile message
47978 Sep 22 23:22:25.329 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(573), op: ExtentClose { repair_id: ReconciliationId(573), extent_id: 134 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47979 Sep 22 23:22:25.329 INFO [1] client ExtentClose { repair_id: ReconciliationId(573), extent_id: 134 }
47980 Sep 22 23:22:25.329 INFO [2] received reconcile message
47981 Sep 22 23:22:25.329 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(573), op: ExtentClose { repair_id: ReconciliationId(573), extent_id: 134 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47982 Sep 22 23:22:25.329 INFO [2] client ExtentClose { repair_id: ReconciliationId(573), extent_id: 134 }
47983 Sep 22 23:22:25.329 DEBG 573 Close extent 134
47984 Sep 22 23:22:25.329 DEBG 573 Close extent 134
47985 Sep 22 23:22:25.329 DEBG 573 Close extent 134
47986 Sep 22 23:22:25.330 DEBG [2] It's time to notify for 573
47987 Sep 22 23:22:25.330 INFO Completion from [2] id:573 status:true
47988 Sep 22 23:22:25.330 INFO [574/752] Repair commands completed
47989 Sep 22 23:22:25.330 INFO Pop front: ReconcileIO { id: ReconciliationId(574), op: ExtentRepair { repair_id: ReconciliationId(574), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47990 Sep 22 23:22:25.330 INFO Sent repair work, now wait for resp
47991 Sep 22 23:22:25.330 INFO [0] received reconcile message
47992 Sep 22 23:22:25.330 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(574), op: ExtentRepair { repair_id: ReconciliationId(574), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47993 Sep 22 23:22:25.330 INFO [0] client ExtentRepair { repair_id: ReconciliationId(574), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47994 Sep 22 23:22:25.330 INFO [0] Sending repair request ReconciliationId(574)
47995 Sep 22 23:22:25.330 INFO [1] received reconcile message
47996 Sep 22 23:22:25.330 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(574), op: ExtentRepair { repair_id: ReconciliationId(574), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47997 Sep 22 23:22:25.330 INFO [1] client ExtentRepair { repair_id: ReconciliationId(574), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
47998 Sep 22 23:22:25.330 INFO [1] No action required ReconciliationId(574)
47999 Sep 22 23:22:25.330 INFO [2] received reconcile message
48000 Sep 22 23:22:25.330 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(574), op: ExtentRepair { repair_id: ReconciliationId(574), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48001 Sep 22 23:22:25.330 INFO [2] client ExtentRepair { repair_id: ReconciliationId(574), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48002 Sep 22 23:22:25.330 INFO [2] No action required ReconciliationId(574)
48003 Sep 22 23:22:25.330 DEBG 574 Repair extent 134 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
48004 Sep 22 23:22:25.330 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/086.copy"
48005 Sep 22 23:22:25.395 INFO accepted connection, remote_addr: 127.0.0.1:53520, local_addr: 127.0.0.1:52864, task: repair
48006 Sep 22 23:22:25.395 TRCE incoming request, uri: /extent/134/files, method: GET, req_id: d5f937a2-74d8-47da-a4da-a4f5dbb0a0f7, remote_addr: 127.0.0.1:53520, local_addr: 127.0.0.1:52864, task: repair
48007 Sep 22 23:22:25.395 INFO request completed, latency_us: 201, response_code: 200, uri: /extent/134/files, method: GET, req_id: d5f937a2-74d8-47da-a4da-a4f5dbb0a0f7, remote_addr: 127.0.0.1:53520, local_addr: 127.0.0.1:52864, task: repair
48008 Sep 22 23:22:25.396 INFO eid:134 Found repair files: ["086", "086.db"]
48009 Sep 22 23:22:25.396 TRCE incoming request, uri: /newextent/134/data, method: GET, req_id: f1bf4896-b15a-486f-abf3-376e80d8463d, remote_addr: 127.0.0.1:53520, local_addr: 127.0.0.1:52864, task: repair
48010 Sep 22 23:22:25.396 INFO request completed, latency_us: 322, response_code: 200, uri: /newextent/134/data, method: GET, req_id: f1bf4896-b15a-486f-abf3-376e80d8463d, remote_addr: 127.0.0.1:53520, local_addr: 127.0.0.1:52864, task: repair
48011 Sep 22 23:22:25.401 TRCE incoming request, uri: /newextent/134/db, method: GET, req_id: e7b9a11e-fec2-4ec4-a46c-275d0a912cbe, remote_addr: 127.0.0.1:53520, local_addr: 127.0.0.1:52864, task: repair
48012 Sep 22 23:22:25.401 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/134/db, method: GET, req_id: e7b9a11e-fec2-4ec4-a46c-275d0a912cbe, remote_addr: 127.0.0.1:53520, local_addr: 127.0.0.1:52864, task: repair
48013 Sep 22 23:22:25.402 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/086.copy" to "/tmp/downstairs-zrMnlo6G/00/000/086.replace"
48014 Sep 22 23:22:25.402 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48015 Sep 22 23:22:25.403 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/086.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
48016 Sep 22 23:22:25.403 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/086"
48017 Sep 22 23:22:25.403 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/086.db"
48018 Sep 22 23:22:25.403 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48019 Sep 22 23:22:25.403 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/086.replace" to "/tmp/downstairs-zrMnlo6G/00/000/086.completed"
48020 Sep 22 23:22:25.403 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48021 Sep 22 23:22:25.404 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48022 Sep 22 23:22:25.404 DEBG [0] It's time to notify for 574
48023 Sep 22 23:22:25.404 INFO Completion from [0] id:574 status:true
48024 Sep 22 23:22:25.404 INFO [575/752] Repair commands completed
48025 Sep 22 23:22:25.404 INFO Pop front: ReconcileIO { id: ReconciliationId(575), op: ExtentReopen { repair_id: ReconciliationId(575), extent_id: 134 }, state: ClientData([New, New, New]) }
48026 Sep 22 23:22:25.404 INFO Sent repair work, now wait for resp
48027 Sep 22 23:22:25.404 INFO [0] received reconcile message
48028 Sep 22 23:22:25.404 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(575), op: ExtentReopen { repair_id: ReconciliationId(575), extent_id: 134 }, state: ClientData([InProgress, New, New]) }, : downstairs
48029 Sep 22 23:22:25.404 INFO [0] client ExtentReopen { repair_id: ReconciliationId(575), extent_id: 134 }
48030 Sep 22 23:22:25.404 INFO [1] received reconcile message
48031 Sep 22 23:22:25.404 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(575), op: ExtentReopen { repair_id: ReconciliationId(575), extent_id: 134 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48032 Sep 22 23:22:25.404 INFO [1] client ExtentReopen { repair_id: ReconciliationId(575), extent_id: 134 }
48033 Sep 22 23:22:25.404 INFO [2] received reconcile message
48034 Sep 22 23:22:25.404 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(575), op: ExtentReopen { repair_id: ReconciliationId(575), extent_id: 134 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48035 Sep 22 23:22:25.404 INFO [2] client ExtentReopen { repair_id: ReconciliationId(575), extent_id: 134 }
48036 Sep 22 23:22:25.404 DEBG 575 Reopen extent 134
48037 Sep 22 23:22:25.405 DEBG 575 Reopen extent 134
48038 Sep 22 23:22:25.405 DEBG 575 Reopen extent 134
48039 Sep 22 23:22:25.406 DEBG [2] It's time to notify for 575
48040 Sep 22 23:22:25.406 INFO Completion from [2] id:575 status:true
48041 Sep 22 23:22:25.406 INFO [576/752] Repair commands completed
48042 Sep 22 23:22:25.406 INFO Pop front: ReconcileIO { id: ReconciliationId(576), op: ExtentFlush { repair_id: ReconciliationId(576), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48043 Sep 22 23:22:25.406 INFO Sent repair work, now wait for resp
48044 Sep 22 23:22:25.406 INFO [0] received reconcile message
48045 Sep 22 23:22:25.406 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(576), op: ExtentFlush { repair_id: ReconciliationId(576), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48046 Sep 22 23:22:25.406 INFO [0] client ExtentFlush { repair_id: ReconciliationId(576), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48047 Sep 22 23:22:25.406 INFO [1] received reconcile message
48048 Sep 22 23:22:25.406 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(576), op: ExtentFlush { repair_id: ReconciliationId(576), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48049 Sep 22 23:22:25.406 INFO [1] client ExtentFlush { repair_id: ReconciliationId(576), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48050 Sep 22 23:22:25.406 INFO [2] received reconcile message
48051 Sep 22 23:22:25.406 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(576), op: ExtentFlush { repair_id: ReconciliationId(576), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48052 Sep 22 23:22:25.406 INFO [2] client ExtentFlush { repair_id: ReconciliationId(576), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48053 Sep 22 23:22:25.406 DEBG 576 Flush extent 69 with f:2 g:2
48054 Sep 22 23:22:25.406 DEBG Flush just extent 69 with f:2 and g:2
48055 Sep 22 23:22:25.407 DEBG [1] It's time to notify for 576
48056 Sep 22 23:22:25.407 INFO Completion from [1] id:576 status:true
48057 Sep 22 23:22:25.407 INFO [577/752] Repair commands completed
48058 Sep 22 23:22:25.407 INFO Pop front: ReconcileIO { id: ReconciliationId(577), op: ExtentClose { repair_id: ReconciliationId(577), extent_id: 69 }, state: ClientData([New, New, New]) }
48059 Sep 22 23:22:25.407 INFO Sent repair work, now wait for resp
48060 Sep 22 23:22:25.407 INFO [0] received reconcile message
48061 Sep 22 23:22:25.407 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(577), op: ExtentClose { repair_id: ReconciliationId(577), extent_id: 69 }, state: ClientData([InProgress, New, New]) }, : downstairs
48062 Sep 22 23:22:25.407 INFO [0] client ExtentClose { repair_id: ReconciliationId(577), extent_id: 69 }
48063 Sep 22 23:22:25.407 INFO [1] received reconcile message
48064 Sep 22 23:22:25.407 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(577), op: ExtentClose { repair_id: ReconciliationId(577), extent_id: 69 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48065 Sep 22 23:22:25.407 INFO [1] client ExtentClose { repair_id: ReconciliationId(577), extent_id: 69 }
48066 Sep 22 23:22:25.407 INFO [2] received reconcile message
48067 Sep 22 23:22:25.407 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(577), op: ExtentClose { repair_id: ReconciliationId(577), extent_id: 69 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48068 Sep 22 23:22:25.407 INFO [2] client ExtentClose { repair_id: ReconciliationId(577), extent_id: 69 }
48069 Sep 22 23:22:25.407 DEBG 577 Close extent 69
48070 Sep 22 23:22:25.407 DEBG 577 Close extent 69
48071 Sep 22 23:22:25.408 DEBG 577 Close extent 69
48072 Sep 22 23:22:25.408 DEBG [2] It's time to notify for 577
48073 Sep 22 23:22:25.408 INFO Completion from [2] id:577 status:true
48074 Sep 22 23:22:25.408 INFO [578/752] Repair commands completed
48075 Sep 22 23:22:25.408 INFO Pop front: ReconcileIO { id: ReconciliationId(578), op: ExtentRepair { repair_id: ReconciliationId(578), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48076 Sep 22 23:22:25.408 INFO Sent repair work, now wait for resp
48077 Sep 22 23:22:25.408 INFO [0] received reconcile message
48078 Sep 22 23:22:25.408 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(578), op: ExtentRepair { repair_id: ReconciliationId(578), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48079 Sep 22 23:22:25.408 INFO [0] client ExtentRepair { repair_id: ReconciliationId(578), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48080 Sep 22 23:22:25.408 INFO [0] Sending repair request ReconciliationId(578)
48081 Sep 22 23:22:25.408 INFO [1] received reconcile message
48082 Sep 22 23:22:25.408 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(578), op: ExtentRepair { repair_id: ReconciliationId(578), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48083 Sep 22 23:22:25.408 INFO [1] client ExtentRepair { repair_id: ReconciliationId(578), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48084 Sep 22 23:22:25.408 INFO [1] No action required ReconciliationId(578)
48085 Sep 22 23:22:25.408 INFO [2] received reconcile message
48086 Sep 22 23:22:25.408 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(578), op: ExtentRepair { repair_id: ReconciliationId(578), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48087 Sep 22 23:22:25.408 INFO [2] client ExtentRepair { repair_id: ReconciliationId(578), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48088 Sep 22 23:22:25.408 INFO [2] No action required ReconciliationId(578)
48089 Sep 22 23:22:25.409 DEBG 578 Repair extent 69 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
48090 Sep 22 23:22:25.409 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/045.copy"
48091 Sep 22 23:22:25.475 INFO accepted connection, remote_addr: 127.0.0.1:51898, local_addr: 127.0.0.1:52864, task: repair
48092 Sep 22 23:22:25.475 TRCE incoming request, uri: /extent/69/files, method: GET, req_id: 992447ea-399c-4a38-b230-8560abc1f62b, remote_addr: 127.0.0.1:51898, local_addr: 127.0.0.1:52864, task: repair
48093 Sep 22 23:22:25.475 INFO request completed, latency_us: 268, response_code: 200, uri: /extent/69/files, method: GET, req_id: 992447ea-399c-4a38-b230-8560abc1f62b, remote_addr: 127.0.0.1:51898, local_addr: 127.0.0.1:52864, task: repair
48094 Sep 22 23:22:25.476 INFO eid:69 Found repair files: ["045", "045.db"]
48095 Sep 22 23:22:25.476 TRCE incoming request, uri: /newextent/69/data, method: GET, req_id: 5e02e57a-58c4-4653-b377-d77e8d6acaa3, remote_addr: 127.0.0.1:51898, local_addr: 127.0.0.1:52864, task: repair
48096 Sep 22 23:22:25.476 INFO request completed, latency_us: 345, response_code: 200, uri: /newextent/69/data, method: GET, req_id: 5e02e57a-58c4-4653-b377-d77e8d6acaa3, remote_addr: 127.0.0.1:51898, local_addr: 127.0.0.1:52864, task: repair
48097 Sep 22 23:22:25.482 TRCE incoming request, uri: /newextent/69/db, method: GET, req_id: 7ff78441-283b-4fe3-9760-28a32ef94c13, remote_addr: 127.0.0.1:51898, local_addr: 127.0.0.1:52864, task: repair
48098 Sep 22 23:22:25.482 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/69/db, method: GET, req_id: 7ff78441-283b-4fe3-9760-28a32ef94c13, remote_addr: 127.0.0.1:51898, local_addr: 127.0.0.1:52864, task: repair
48099 Sep 22 23:22:25.483 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/045.copy" to "/tmp/downstairs-zrMnlo6G/00/000/045.replace"
48100 Sep 22 23:22:25.483 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48101 Sep 22 23:22:25.484 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/045.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
48102 Sep 22 23:22:25.484 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/045"
48103 Sep 22 23:22:25.485 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/045.db"
48104 Sep 22 23:22:25.485 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48105 Sep 22 23:22:25.485 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/045.replace" to "/tmp/downstairs-zrMnlo6G/00/000/045.completed"
48106 Sep 22 23:22:25.485 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48107 Sep 22 23:22:25.485 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48108 Sep 22 23:22:25.485 DEBG [0] It's time to notify for 578
48109 Sep 22 23:22:25.485 INFO Completion from [0] id:578 status:true
48110 Sep 22 23:22:25.485 INFO [579/752] Repair commands completed
48111 Sep 22 23:22:25.485 INFO Pop front: ReconcileIO { id: ReconciliationId(579), op: ExtentReopen { repair_id: ReconciliationId(579), extent_id: 69 }, state: ClientData([New, New, New]) }
48112 Sep 22 23:22:25.485 INFO Sent repair work, now wait for resp
48113 Sep 22 23:22:25.485 INFO [0] received reconcile message
48114 Sep 22 23:22:25.485 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(579), op: ExtentReopen { repair_id: ReconciliationId(579), extent_id: 69 }, state: ClientData([InProgress, New, New]) }, : downstairs
48115 Sep 22 23:22:25.485 INFO [0] client ExtentReopen { repair_id: ReconciliationId(579), extent_id: 69 }
48116 Sep 22 23:22:25.485 INFO [1] received reconcile message
48117 Sep 22 23:22:25.485 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(579), op: ExtentReopen { repair_id: ReconciliationId(579), extent_id: 69 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48118 Sep 22 23:22:25.485 INFO [1] client ExtentReopen { repair_id: ReconciliationId(579), extent_id: 69 }
48119 Sep 22 23:22:25.485 INFO [2] received reconcile message
48120 Sep 22 23:22:25.485 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(579), op: ExtentReopen { repair_id: ReconciliationId(579), extent_id: 69 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48121 Sep 22 23:22:25.485 INFO [2] client ExtentReopen { repair_id: ReconciliationId(579), extent_id: 69 }
48122 Sep 22 23:22:25.486 DEBG 579 Reopen extent 69
48123 Sep 22 23:22:25.486 DEBG 579 Reopen extent 69
48124 Sep 22 23:22:25.487 DEBG 579 Reopen extent 69
48125 Sep 22 23:22:25.488 DEBG [2] It's time to notify for 579
48126 Sep 22 23:22:25.488 INFO Completion from [2] id:579 status:true
48127 Sep 22 23:22:25.488 INFO [580/752] Repair commands completed
48128 Sep 22 23:22:25.488 INFO Pop front: ReconcileIO { id: ReconciliationId(580), op: ExtentFlush { repair_id: ReconciliationId(580), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48129 Sep 22 23:22:25.488 INFO Sent repair work, now wait for resp
48130 Sep 22 23:22:25.488 INFO [0] received reconcile message
48131 Sep 22 23:22:25.488 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(580), op: ExtentFlush { repair_id: ReconciliationId(580), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48132 Sep 22 23:22:25.488 INFO [0] client ExtentFlush { repair_id: ReconciliationId(580), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48133 Sep 22 23:22:25.488 INFO [1] received reconcile message
48134 Sep 22 23:22:25.488 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(580), op: ExtentFlush { repair_id: ReconciliationId(580), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48135 Sep 22 23:22:25.488 INFO [1] client ExtentFlush { repair_id: ReconciliationId(580), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48136 Sep 22 23:22:25.488 INFO [2] received reconcile message
48137 Sep 22 23:22:25.488 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(580), op: ExtentFlush { repair_id: ReconciliationId(580), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48138 Sep 22 23:22:25.488 INFO [2] client ExtentFlush { repair_id: ReconciliationId(580), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48139 Sep 22 23:22:25.488 DEBG 580 Flush extent 155 with f:2 g:2
48140 Sep 22 23:22:25.488 DEBG Flush just extent 155 with f:2 and g:2
48141 Sep 22 23:22:25.488 DEBG [1] It's time to notify for 580
48142 Sep 22 23:22:25.488 INFO Completion from [1] id:580 status:true
48143 Sep 22 23:22:25.488 INFO [581/752] Repair commands completed
48144 Sep 22 23:22:25.488 INFO Pop front: ReconcileIO { id: ReconciliationId(581), op: ExtentClose { repair_id: ReconciliationId(581), extent_id: 155 }, state: ClientData([New, New, New]) }
48145 Sep 22 23:22:25.488 INFO Sent repair work, now wait for resp
48146 Sep 22 23:22:25.488 INFO [0] received reconcile message
48147 Sep 22 23:22:25.488 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(581), op: ExtentClose { repair_id: ReconciliationId(581), extent_id: 155 }, state: ClientData([InProgress, New, New]) }, : downstairs
48148 Sep 22 23:22:25.488 INFO [0] client ExtentClose { repair_id: ReconciliationId(581), extent_id: 155 }
48149 Sep 22 23:22:25.488 INFO [1] received reconcile message
48150 Sep 22 23:22:25.488 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(581), op: ExtentClose { repair_id: ReconciliationId(581), extent_id: 155 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48151 Sep 22 23:22:25.488 INFO [1] client ExtentClose { repair_id: ReconciliationId(581), extent_id: 155 }
48152 Sep 22 23:22:25.488 INFO [2] received reconcile message
48153 Sep 22 23:22:25.488 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(581), op: ExtentClose { repair_id: ReconciliationId(581), extent_id: 155 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48154 Sep 22 23:22:25.488 INFO [2] client ExtentClose { repair_id: ReconciliationId(581), extent_id: 155 }
48155 Sep 22 23:22:25.489 DEBG 581 Close extent 155
48156 Sep 22 23:22:25.489 DEBG 581 Close extent 155
48157 Sep 22 23:22:25.489 DEBG 581 Close extent 155
48158 Sep 22 23:22:25.490 DEBG [2] It's time to notify for 581
48159 Sep 22 23:22:25.490 INFO Completion from [2] id:581 status:true
48160 Sep 22 23:22:25.490 INFO [582/752] Repair commands completed
48161 Sep 22 23:22:25.490 INFO Pop front: ReconcileIO { id: ReconciliationId(582), op: ExtentRepair { repair_id: ReconciliationId(582), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48162 Sep 22 23:22:25.490 INFO Sent repair work, now wait for resp
48163 Sep 22 23:22:25.490 INFO [0] received reconcile message
48164 Sep 22 23:22:25.490 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(582), op: ExtentRepair { repair_id: ReconciliationId(582), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48165 Sep 22 23:22:25.490 INFO [0] client ExtentRepair { repair_id: ReconciliationId(582), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48166 Sep 22 23:22:25.490 INFO [0] Sending repair request ReconciliationId(582)
48167 Sep 22 23:22:25.490 INFO [1] received reconcile message
48168 Sep 22 23:22:25.490 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(582), op: ExtentRepair { repair_id: ReconciliationId(582), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48169 Sep 22 23:22:25.490 INFO [1] client ExtentRepair { repair_id: ReconciliationId(582), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48170 Sep 22 23:22:25.490 INFO [1] No action required ReconciliationId(582)
48171 Sep 22 23:22:25.490 INFO [2] received reconcile message
48172 Sep 22 23:22:25.490 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(582), op: ExtentRepair { repair_id: ReconciliationId(582), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48173 Sep 22 23:22:25.490 INFO [2] client ExtentRepair { repair_id: ReconciliationId(582), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48174 Sep 22 23:22:25.490 INFO [2] No action required ReconciliationId(582)
48175 Sep 22 23:22:25.490 DEBG 582 Repair extent 155 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
48176 Sep 22 23:22:25.490 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/09B.copy"
48177 Sep 22 23:22:25.553 INFO accepted connection, remote_addr: 127.0.0.1:43168, local_addr: 127.0.0.1:52864, task: repair
48178 Sep 22 23:22:25.553 TRCE incoming request, uri: /extent/155/files, method: GET, req_id: 3116d4a7-4c54-43f2-bb3d-3143c4975672, remote_addr: 127.0.0.1:43168, local_addr: 127.0.0.1:52864, task: repair
48179 Sep 22 23:22:25.553 INFO request completed, latency_us: 281, response_code: 200, uri: /extent/155/files, method: GET, req_id: 3116d4a7-4c54-43f2-bb3d-3143c4975672, remote_addr: 127.0.0.1:43168, local_addr: 127.0.0.1:52864, task: repair
48180 Sep 22 23:22:25.554 INFO eid:155 Found repair files: ["09B", "09B.db"]
48181 Sep 22 23:22:25.554 TRCE incoming request, uri: /newextent/155/data, method: GET, req_id: 5657a3ec-3fc8-4c3f-b4a5-877673b97309, remote_addr: 127.0.0.1:43168, local_addr: 127.0.0.1:52864, task: repair
48182 Sep 22 23:22:25.554 INFO request completed, latency_us: 376, response_code: 200, uri: /newextent/155/data, method: GET, req_id: 5657a3ec-3fc8-4c3f-b4a5-877673b97309, remote_addr: 127.0.0.1:43168, local_addr: 127.0.0.1:52864, task: repair
48183 Sep 22 23:22:25.559 TRCE incoming request, uri: /newextent/155/db, method: GET, req_id: dd5a0f71-4f60-43d7-acb3-d05eabbf5b65, remote_addr: 127.0.0.1:43168, local_addr: 127.0.0.1:52864, task: repair
48184 Sep 22 23:22:25.560 INFO request completed, latency_us: 309, response_code: 200, uri: /newextent/155/db, method: GET, req_id: dd5a0f71-4f60-43d7-acb3-d05eabbf5b65, remote_addr: 127.0.0.1:43168, local_addr: 127.0.0.1:52864, task: repair
48185 Sep 22 23:22:25.561 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/09B.copy" to "/tmp/downstairs-zrMnlo6G/00/000/09B.replace"
48186 Sep 22 23:22:25.561 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48187 Sep 22 23:22:25.562 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/09B.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
48188 Sep 22 23:22:25.562 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/09B"
48189 Sep 22 23:22:25.562 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/09B.db"
48190 Sep 22 23:22:25.563 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48191 Sep 22 23:22:25.563 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/09B.replace" to "/tmp/downstairs-zrMnlo6G/00/000/09B.completed"
48192 Sep 22 23:22:25.563 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48193 Sep 22 23:22:25.563 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48194 Sep 22 23:22:25.563 DEBG [0] It's time to notify for 582
48195 Sep 22 23:22:25.563 INFO Completion from [0] id:582 status:true
48196 Sep 22 23:22:25.563 INFO [583/752] Repair commands completed
48197 Sep 22 23:22:25.563 INFO Pop front: ReconcileIO { id: ReconciliationId(583), op: ExtentReopen { repair_id: ReconciliationId(583), extent_id: 155 }, state: ClientData([New, New, New]) }
48198 Sep 22 23:22:25.563 INFO Sent repair work, now wait for resp
48199 Sep 22 23:22:25.563 INFO [0] received reconcile message
48200 Sep 22 23:22:25.563 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(583), op: ExtentReopen { repair_id: ReconciliationId(583), extent_id: 155 }, state: ClientData([InProgress, New, New]) }, : downstairs
48201 Sep 22 23:22:25.563 INFO [0] client ExtentReopen { repair_id: ReconciliationId(583), extent_id: 155 }
48202 Sep 22 23:22:25.563 INFO [1] received reconcile message
48203 Sep 22 23:22:25.563 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(583), op: ExtentReopen { repair_id: ReconciliationId(583), extent_id: 155 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48204 Sep 22 23:22:25.563 INFO [1] client ExtentReopen { repair_id: ReconciliationId(583), extent_id: 155 }
48205 Sep 22 23:22:25.563 INFO [2] received reconcile message
48206 Sep 22 23:22:25.563 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(583), op: ExtentReopen { repair_id: ReconciliationId(583), extent_id: 155 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48207 Sep 22 23:22:25.563 INFO [2] client ExtentReopen { repair_id: ReconciliationId(583), extent_id: 155 }
48208 Sep 22 23:22:25.564 DEBG 583 Reopen extent 155
48209 Sep 22 23:22:25.564 DEBG 583 Reopen extent 155
48210 Sep 22 23:22:25.565 DEBG 583 Reopen extent 155
48211 Sep 22 23:22:25.565 DEBG [2] It's time to notify for 583
48212 Sep 22 23:22:25.566 INFO Completion from [2] id:583 status:true
48213 Sep 22 23:22:25.566 INFO [584/752] Repair commands completed
48214 Sep 22 23:22:25.566 INFO Pop front: ReconcileIO { id: ReconciliationId(584), op: ExtentFlush { repair_id: ReconciliationId(584), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48215 Sep 22 23:22:25.566 INFO Sent repair work, now wait for resp
48216 Sep 22 23:22:25.566 INFO [0] received reconcile message
48217 Sep 22 23:22:25.566 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(584), op: ExtentFlush { repair_id: ReconciliationId(584), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48218 Sep 22 23:22:25.566 INFO [0] client ExtentFlush { repair_id: ReconciliationId(584), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48219 Sep 22 23:22:25.566 INFO [1] received reconcile message
48220 Sep 22 23:22:25.566 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(584), op: ExtentFlush { repair_id: ReconciliationId(584), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48221 Sep 22 23:22:25.566 INFO [1] client ExtentFlush { repair_id: ReconciliationId(584), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48222 Sep 22 23:22:25.566 INFO [2] received reconcile message
48223 Sep 22 23:22:25.566 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(584), op: ExtentFlush { repair_id: ReconciliationId(584), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48224 Sep 22 23:22:25.566 INFO [2] client ExtentFlush { repair_id: ReconciliationId(584), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48225 Sep 22 23:22:25.566 DEBG 584 Flush extent 0 with f:2 g:2
48226 Sep 22 23:22:25.566 DEBG Flush just extent 0 with f:2 and g:2
48227 Sep 22 23:22:25.566 DEBG [1] It's time to notify for 584
48228 Sep 22 23:22:25.566 INFO Completion from [1] id:584 status:true
48229 Sep 22 23:22:25.566 INFO [585/752] Repair commands completed
48230 Sep 22 23:22:25.566 INFO Pop front: ReconcileIO { id: ReconciliationId(585), op: ExtentClose { repair_id: ReconciliationId(585), extent_id: 0 }, state: ClientData([New, New, New]) }
48231 Sep 22 23:22:25.566 INFO Sent repair work, now wait for resp
48232 Sep 22 23:22:25.566 INFO [0] received reconcile message
48233 Sep 22 23:22:25.566 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(585), op: ExtentClose { repair_id: ReconciliationId(585), extent_id: 0 }, state: ClientData([InProgress, New, New]) }, : downstairs
48234 Sep 22 23:22:25.566 INFO [0] client ExtentClose { repair_id: ReconciliationId(585), extent_id: 0 }
48235 Sep 22 23:22:25.566 INFO [1] received reconcile message
48236 Sep 22 23:22:25.566 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(585), op: ExtentClose { repair_id: ReconciliationId(585), extent_id: 0 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48237 Sep 22 23:22:25.566 INFO [1] client ExtentClose { repair_id: ReconciliationId(585), extent_id: 0 }
48238 Sep 22 23:22:25.566 INFO [2] received reconcile message
48239 Sep 22 23:22:25.566 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(585), op: ExtentClose { repair_id: ReconciliationId(585), extent_id: 0 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48240 Sep 22 23:22:25.566 INFO [2] client ExtentClose { repair_id: ReconciliationId(585), extent_id: 0 }
48241 Sep 22 23:22:25.567 DEBG 585 Close extent 0
48242 Sep 22 23:22:25.567 DEBG 585 Close extent 0
48243 Sep 22 23:22:25.567 DEBG 585 Close extent 0
48244 Sep 22 23:22:25.568 DEBG [2] It's time to notify for 585
48245 Sep 22 23:22:25.568 INFO Completion from [2] id:585 status:true
48246 Sep 22 23:22:25.568 INFO [586/752] Repair commands completed
48247 Sep 22 23:22:25.568 INFO Pop front: ReconcileIO { id: ReconciliationId(586), op: ExtentRepair { repair_id: ReconciliationId(586), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48248 Sep 22 23:22:25.568 INFO Sent repair work, now wait for resp
48249 Sep 22 23:22:25.568 INFO [0] received reconcile message
48250 Sep 22 23:22:25.568 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(586), op: ExtentRepair { repair_id: ReconciliationId(586), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48251 Sep 22 23:22:25.568 INFO [0] client ExtentRepair { repair_id: ReconciliationId(586), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48252 Sep 22 23:22:25.568 INFO [0] Sending repair request ReconciliationId(586)
48253 Sep 22 23:22:25.568 INFO [1] received reconcile message
48254 Sep 22 23:22:25.568 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(586), op: ExtentRepair { repair_id: ReconciliationId(586), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48255 Sep 22 23:22:25.568 INFO [1] client ExtentRepair { repair_id: ReconciliationId(586), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48256 Sep 22 23:22:25.568 INFO [1] No action required ReconciliationId(586)
48257 Sep 22 23:22:25.568 INFO [2] received reconcile message
48258 Sep 22 23:22:25.568 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(586), op: ExtentRepair { repair_id: ReconciliationId(586), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48259 Sep 22 23:22:25.568 INFO [2] client ExtentRepair { repair_id: ReconciliationId(586), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48260 Sep 22 23:22:25.568 INFO [2] No action required ReconciliationId(586)
48261 Sep 22 23:22:25.568 DEBG 586 Repair extent 0 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
48262 Sep 22 23:22:25.568 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/000.copy"
48263 Sep 22 23:22:25.621 DEBG up_ds_listen was notified
48264 Sep 22 23:22:25.621 DEBG up_ds_listen process 1090
48265 Sep 22 23:22:25.621 DEBG [A] ack job 1090:91, : downstairs
48266 Sep 22 23:22:25.621 DEBG up_ds_listen checked 1 jobs, back to waiting
48267 Sep 22 23:22:25.622 DEBG Flush :1086 extent_limit None deps:[JobId(1085), JobId(1084)] res:true f:32 g:1
48268 Sep 22 23:22:25.627 DEBG Read :1087 deps:[JobId(1086)] res:true
48269 Sep 22 23:22:25.632 INFO accepted connection, remote_addr: 127.0.0.1:49566, local_addr: 127.0.0.1:52864, task: repair
48270 Sep 22 23:22:25.632 TRCE incoming request, uri: /extent/0/files, method: GET, req_id: 77e00068-f05b-45bb-b6c3-3b43650c38ea, remote_addr: 127.0.0.1:49566, local_addr: 127.0.0.1:52864, task: repair
48271 Sep 22 23:22:25.632 INFO request completed, latency_us: 211, response_code: 200, uri: /extent/0/files, method: GET, req_id: 77e00068-f05b-45bb-b6c3-3b43650c38ea, remote_addr: 127.0.0.1:49566, local_addr: 127.0.0.1:52864, task: repair
48272 Sep 22 23:22:25.633 INFO eid:0 Found repair files: ["000", "000.db"]
48273 Sep 22 23:22:25.633 TRCE incoming request, uri: /newextent/0/data, method: GET, req_id: 71903321-8833-4637-b215-d43b720fd67d, remote_addr: 127.0.0.1:49566, local_addr: 127.0.0.1:52864, task: repair
48274 Sep 22 23:22:25.633 INFO request completed, latency_us: 332, response_code: 200, uri: /newextent/0/data, method: GET, req_id: 71903321-8833-4637-b215-d43b720fd67d, remote_addr: 127.0.0.1:49566, local_addr: 127.0.0.1:52864, task: repair
48275 Sep 22 23:22:25.638 TRCE incoming request, uri: /newextent/0/db, method: GET, req_id: fab1d54a-5b31-4e75-aa06-247bea7c7bb3, remote_addr: 127.0.0.1:49566, local_addr: 127.0.0.1:52864, task: repair
48276 Sep 22 23:22:25.639 INFO request completed, latency_us: 318, response_code: 200, uri: /newextent/0/db, method: GET, req_id: fab1d54a-5b31-4e75-aa06-247bea7c7bb3, remote_addr: 127.0.0.1:49566, local_addr: 127.0.0.1:52864, task: repair
48277 Sep 22 23:22:25.640 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/000.copy" to "/tmp/downstairs-zrMnlo6G/00/000/000.replace"
48278 Sep 22 23:22:25.640 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48279 Sep 22 23:22:25.641 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/000.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
48280 Sep 22 23:22:25.641 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/000"
48281 Sep 22 23:22:25.641 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/000.db"
48282 Sep 22 23:22:25.641 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48283 Sep 22 23:22:25.641 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/000.replace" to "/tmp/downstairs-zrMnlo6G/00/000/000.completed"
48284 Sep 22 23:22:25.641 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48285 Sep 22 23:22:25.641 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48286 Sep 22 23:22:25.641 DEBG [0] It's time to notify for 586
48287 Sep 22 23:22:25.642 INFO Completion from [0] id:586 status:true
48288 Sep 22 23:22:25.642 INFO [587/752] Repair commands completed
48289 Sep 22 23:22:25.642 INFO Pop front: ReconcileIO { id: ReconciliationId(587), op: ExtentReopen { repair_id: ReconciliationId(587), extent_id: 0 }, state: ClientData([New, New, New]) }
48290 Sep 22 23:22:25.642 INFO Sent repair work, now wait for resp
48291 Sep 22 23:22:25.642 INFO [0] received reconcile message
48292 Sep 22 23:22:25.642 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(587), op: ExtentReopen { repair_id: ReconciliationId(587), extent_id: 0 }, state: ClientData([InProgress, New, New]) }, : downstairs
48293 Sep 22 23:22:25.642 INFO [0] client ExtentReopen { repair_id: ReconciliationId(587), extent_id: 0 }
48294 Sep 22 23:22:25.642 INFO [1] received reconcile message
48295 Sep 22 23:22:25.642 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(587), op: ExtentReopen { repair_id: ReconciliationId(587), extent_id: 0 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48296 Sep 22 23:22:25.642 INFO [1] client ExtentReopen { repair_id: ReconciliationId(587), extent_id: 0 }
48297 Sep 22 23:22:25.642 INFO [2] received reconcile message
48298 Sep 22 23:22:25.642 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(587), op: ExtentReopen { repair_id: ReconciliationId(587), extent_id: 0 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48299 Sep 22 23:22:25.642 INFO [2] client ExtentReopen { repair_id: ReconciliationId(587), extent_id: 0 }
48300 Sep 22 23:22:25.642 DEBG 587 Reopen extent 0
48301 Sep 22 23:22:25.643 DEBG 587 Reopen extent 0
48302 Sep 22 23:22:25.643 DEBG 587 Reopen extent 0
48303 Sep 22 23:22:25.644 DEBG [2] It's time to notify for 587
48304 Sep 22 23:22:25.644 INFO Completion from [2] id:587 status:true
48305 Sep 22 23:22:25.644 INFO [588/752] Repair commands completed
48306 Sep 22 23:22:25.644 INFO Pop front: ReconcileIO { id: ReconciliationId(588), op: ExtentFlush { repair_id: ReconciliationId(588), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48307 Sep 22 23:22:25.644 INFO Sent repair work, now wait for resp
48308 Sep 22 23:22:25.644 INFO [0] received reconcile message
48309 Sep 22 23:22:25.644 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(588), op: ExtentFlush { repair_id: ReconciliationId(588), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48310 Sep 22 23:22:25.644 INFO [0] client ExtentFlush { repair_id: ReconciliationId(588), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48311 Sep 22 23:22:25.644 INFO [1] received reconcile message
48312 Sep 22 23:22:25.644 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(588), op: ExtentFlush { repair_id: ReconciliationId(588), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48313 Sep 22 23:22:25.644 INFO [1] client ExtentFlush { repair_id: ReconciliationId(588), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48314 Sep 22 23:22:25.644 INFO [2] received reconcile message
48315 Sep 22 23:22:25.644 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(588), op: ExtentFlush { repair_id: ReconciliationId(588), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48316 Sep 22 23:22:25.644 INFO [2] client ExtentFlush { repair_id: ReconciliationId(588), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48317 Sep 22 23:22:25.644 DEBG 588 Flush extent 164 with f:2 g:2
48318 Sep 22 23:22:25.644 DEBG Flush just extent 164 with f:2 and g:2
48319 Sep 22 23:22:25.644 DEBG [1] It's time to notify for 588
48320 Sep 22 23:22:25.644 INFO Completion from [1] id:588 status:true
48321 Sep 22 23:22:25.644 INFO [589/752] Repair commands completed
48322 Sep 22 23:22:25.644 INFO Pop front: ReconcileIO { id: ReconciliationId(589), op: ExtentClose { repair_id: ReconciliationId(589), extent_id: 164 }, state: ClientData([New, New, New]) }
48323 Sep 22 23:22:25.644 INFO Sent repair work, now wait for resp
48324 Sep 22 23:22:25.644 INFO [0] received reconcile message
48325 Sep 22 23:22:25.644 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(589), op: ExtentClose { repair_id: ReconciliationId(589), extent_id: 164 }, state: ClientData([InProgress, New, New]) }, : downstairs
48326 Sep 22 23:22:25.644 INFO [0] client ExtentClose { repair_id: ReconciliationId(589), extent_id: 164 }
48327 Sep 22 23:22:25.645 INFO [1] received reconcile message
48328 Sep 22 23:22:25.645 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(589), op: ExtentClose { repair_id: ReconciliationId(589), extent_id: 164 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48329 Sep 22 23:22:25.645 INFO [1] client ExtentClose { repair_id: ReconciliationId(589), extent_id: 164 }
48330 Sep 22 23:22:25.645 INFO [2] received reconcile message
48331 Sep 22 23:22:25.645 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(589), op: ExtentClose { repair_id: ReconciliationId(589), extent_id: 164 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48332 Sep 22 23:22:25.645 INFO [2] client ExtentClose { repair_id: ReconciliationId(589), extent_id: 164 }
48333 Sep 22 23:22:25.645 DEBG 589 Close extent 164
48334 Sep 22 23:22:25.645 DEBG 589 Close extent 164
48335 Sep 22 23:22:25.645 DEBG 589 Close extent 164
48336 Sep 22 23:22:25.646 DEBG [2] It's time to notify for 589
48337 Sep 22 23:22:25.646 INFO Completion from [2] id:589 status:true
48338 Sep 22 23:22:25.646 INFO [590/752] Repair commands completed
48339 Sep 22 23:22:25.646 INFO Pop front: ReconcileIO { id: ReconciliationId(590), op: ExtentRepair { repair_id: ReconciliationId(590), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48340 Sep 22 23:22:25.646 INFO Sent repair work, now wait for resp
48341 Sep 22 23:22:25.646 INFO [0] received reconcile message
48342 Sep 22 23:22:25.646 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(590), op: ExtentRepair { repair_id: ReconciliationId(590), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48343 Sep 22 23:22:25.646 INFO [0] client ExtentRepair { repair_id: ReconciliationId(590), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48344 Sep 22 23:22:25.646 INFO [0] Sending repair request ReconciliationId(590)
48345 Sep 22 23:22:25.646 INFO [1] received reconcile message
48346 Sep 22 23:22:25.646 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(590), op: ExtentRepair { repair_id: ReconciliationId(590), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48347 Sep 22 23:22:25.646 INFO [1] client ExtentRepair { repair_id: ReconciliationId(590), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48348 Sep 22 23:22:25.646 INFO [1] No action required ReconciliationId(590)
48349 Sep 22 23:22:25.646 INFO [2] received reconcile message
48350 Sep 22 23:22:25.646 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(590), op: ExtentRepair { repair_id: ReconciliationId(590), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48351 Sep 22 23:22:25.646 INFO [2] client ExtentRepair { repair_id: ReconciliationId(590), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48352 Sep 22 23:22:25.646 INFO [2] No action required ReconciliationId(590)
48353 Sep 22 23:22:25.646 DEBG 590 Repair extent 164 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
48354 Sep 22 23:22:25.646 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0A4.copy"
48355 Sep 22 23:22:25.649 DEBG [rc] retire 1086 clears [JobId(1085), JobId(1086)], : downstairs
48356 Sep 22 23:22:25.652 INFO [lossy] sleeping 1 second
48357 Sep 22 23:22:25.712 INFO accepted connection, remote_addr: 127.0.0.1:64591, local_addr: 127.0.0.1:52864, task: repair
48358 Sep 22 23:22:25.712 TRCE incoming request, uri: /extent/164/files, method: GET, req_id: 1385ac13-c900-4093-a2ec-51bea160704d, remote_addr: 127.0.0.1:64591, local_addr: 127.0.0.1:52864, task: repair
48359 Sep 22 23:22:25.712 INFO request completed, latency_us: 210, response_code: 200, uri: /extent/164/files, method: GET, req_id: 1385ac13-c900-4093-a2ec-51bea160704d, remote_addr: 127.0.0.1:64591, local_addr: 127.0.0.1:52864, task: repair
48360 Sep 22 23:22:25.713 INFO eid:164 Found repair files: ["0A4", "0A4.db"]
48361 Sep 22 23:22:25.713 TRCE incoming request, uri: /newextent/164/data, method: GET, req_id: 9e2524b9-95d3-48cd-aa53-0a4e4fb2f974, remote_addr: 127.0.0.1:64591, local_addr: 127.0.0.1:52864, task: repair
48362 Sep 22 23:22:25.713 INFO request completed, latency_us: 327, response_code: 200, uri: /newextent/164/data, method: GET, req_id: 9e2524b9-95d3-48cd-aa53-0a4e4fb2f974, remote_addr: 127.0.0.1:64591, local_addr: 127.0.0.1:52864, task: repair
48363 Sep 22 23:22:25.718 TRCE incoming request, uri: /newextent/164/db, method: GET, req_id: 80cb5eb5-5e70-40ac-9638-f5b782327418, remote_addr: 127.0.0.1:64591, local_addr: 127.0.0.1:52864, task: repair
48364 Sep 22 23:22:25.718 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/164/db, method: GET, req_id: 80cb5eb5-5e70-40ac-9638-f5b782327418, remote_addr: 127.0.0.1:64591, local_addr: 127.0.0.1:52864, task: repair
48365 Sep 22 23:22:25.720 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0A4.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0A4.replace"
48366 Sep 22 23:22:25.720 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48367 Sep 22 23:22:25.720 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0A4.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
48368 Sep 22 23:22:25.721 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A4"
48369 Sep 22 23:22:25.721 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0A4.db"
48370 Sep 22 23:22:25.721 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48371 Sep 22 23:22:25.721 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0A4.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0A4.completed"
48372 Sep 22 23:22:25.721 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48373 Sep 22 23:22:25.721 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48374 Sep 22 23:22:25.721 DEBG [0] It's time to notify for 590
48375 Sep 22 23:22:25.721 INFO Completion from [0] id:590 status:true
48376 Sep 22 23:22:25.721 INFO [591/752] Repair commands completed
48377 Sep 22 23:22:25.721 INFO Pop front: ReconcileIO { id: ReconciliationId(591), op: ExtentReopen { repair_id: ReconciliationId(591), extent_id: 164 }, state: ClientData([New, New, New]) }
48378 Sep 22 23:22:25.721 INFO Sent repair work, now wait for resp
48379 Sep 22 23:22:25.721 INFO [0] received reconcile message
48380 Sep 22 23:22:25.721 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(591), op: ExtentReopen { repair_id: ReconciliationId(591), extent_id: 164 }, state: ClientData([InProgress, New, New]) }, : downstairs
48381 Sep 22 23:22:25.721 INFO [0] client ExtentReopen { repair_id: ReconciliationId(591), extent_id: 164 }
48382 Sep 22 23:22:25.721 INFO [1] received reconcile message
48383 Sep 22 23:22:25.721 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(591), op: ExtentReopen { repair_id: ReconciliationId(591), extent_id: 164 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48384 Sep 22 23:22:25.721 INFO [1] client ExtentReopen { repair_id: ReconciliationId(591), extent_id: 164 }
48385 Sep 22 23:22:25.721 INFO [2] received reconcile message
48386 Sep 22 23:22:25.722 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(591), op: ExtentReopen { repair_id: ReconciliationId(591), extent_id: 164 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48387 Sep 22 23:22:25.722 INFO [2] client ExtentReopen { repair_id: ReconciliationId(591), extent_id: 164 }
48388 Sep 22 23:22:25.722 DEBG 591 Reopen extent 164
48389 Sep 22 23:22:25.722 DEBG 591 Reopen extent 164
48390 Sep 22 23:22:25.723 DEBG 591 Reopen extent 164
48391 Sep 22 23:22:25.723 DEBG [2] It's time to notify for 591
48392 Sep 22 23:22:25.723 INFO Completion from [2] id:591 status:true
48393 Sep 22 23:22:25.723 INFO [592/752] Repair commands completed
48394 Sep 22 23:22:25.723 INFO Pop front: ReconcileIO { id: ReconciliationId(592), op: ExtentFlush { repair_id: ReconciliationId(592), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48395 Sep 22 23:22:25.723 INFO Sent repair work, now wait for resp
48396 Sep 22 23:22:25.724 INFO [0] received reconcile message
48397 Sep 22 23:22:25.724 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(592), op: ExtentFlush { repair_id: ReconciliationId(592), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48398 Sep 22 23:22:25.724 INFO [0] client ExtentFlush { repair_id: ReconciliationId(592), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48399 Sep 22 23:22:25.724 INFO [1] received reconcile message
48400 Sep 22 23:22:25.724 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(592), op: ExtentFlush { repair_id: ReconciliationId(592), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48401 Sep 22 23:22:25.724 INFO [1] client ExtentFlush { repair_id: ReconciliationId(592), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48402 Sep 22 23:22:25.724 INFO [2] received reconcile message
48403 Sep 22 23:22:25.724 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(592), op: ExtentFlush { repair_id: ReconciliationId(592), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48404 Sep 22 23:22:25.724 INFO [2] client ExtentFlush { repair_id: ReconciliationId(592), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48405 Sep 22 23:22:25.724 DEBG 592 Flush extent 24 with f:2 g:2
48406 Sep 22 23:22:25.724 DEBG Flush just extent 24 with f:2 and g:2
48407 Sep 22 23:22:25.724 DEBG [1] It's time to notify for 592
48408 Sep 22 23:22:25.724 INFO Completion from [1] id:592 status:true
48409 Sep 22 23:22:25.724 INFO [593/752] Repair commands completed
48410 Sep 22 23:22:25.724 INFO Pop front: ReconcileIO { id: ReconciliationId(593), op: ExtentClose { repair_id: ReconciliationId(593), extent_id: 24 }, state: ClientData([New, New, New]) }
48411 Sep 22 23:22:25.724 INFO Sent repair work, now wait for resp
48412 Sep 22 23:22:25.724 INFO [0] received reconcile message
48413 Sep 22 23:22:25.724 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(593), op: ExtentClose { repair_id: ReconciliationId(593), extent_id: 24 }, state: ClientData([InProgress, New, New]) }, : downstairs
48414 Sep 22 23:22:25.724 INFO [0] client ExtentClose { repair_id: ReconciliationId(593), extent_id: 24 }
48415 Sep 22 23:22:25.724 INFO [1] received reconcile message
48416 Sep 22 23:22:25.724 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(593), op: ExtentClose { repair_id: ReconciliationId(593), extent_id: 24 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48417 Sep 22 23:22:25.724 INFO [1] client ExtentClose { repair_id: ReconciliationId(593), extent_id: 24 }
48418 Sep 22 23:22:25.724 INFO [2] received reconcile message
48419 Sep 22 23:22:25.724 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(593), op: ExtentClose { repair_id: ReconciliationId(593), extent_id: 24 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48420 Sep 22 23:22:25.724 INFO [2] client ExtentClose { repair_id: ReconciliationId(593), extent_id: 24 }
48421 Sep 22 23:22:25.724 DEBG 593 Close extent 24
48422 Sep 22 23:22:25.725 DEBG 593 Close extent 24
48423 Sep 22 23:22:25.725 DEBG 593 Close extent 24
48424 Sep 22 23:22:25.725 DEBG [2] It's time to notify for 593
48425 Sep 22 23:22:25.725 INFO Completion from [2] id:593 status:true
48426 Sep 22 23:22:25.725 INFO [594/752] Repair commands completed
48427 Sep 22 23:22:25.725 INFO Pop front: ReconcileIO { id: ReconciliationId(594), op: ExtentRepair { repair_id: ReconciliationId(594), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48428 Sep 22 23:22:25.725 INFO Sent repair work, now wait for resp
48429 Sep 22 23:22:25.726 INFO [0] received reconcile message
48430 Sep 22 23:22:25.726 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(594), op: ExtentRepair { repair_id: ReconciliationId(594), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48431 Sep 22 23:22:25.726 INFO [0] client ExtentRepair { repair_id: ReconciliationId(594), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48432 Sep 22 23:22:25.726 INFO [0] Sending repair request ReconciliationId(594)
48433 Sep 22 23:22:25.726 INFO [1] received reconcile message
48434 Sep 22 23:22:25.726 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(594), op: ExtentRepair { repair_id: ReconciliationId(594), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48435 Sep 22 23:22:25.726 INFO [1] client ExtentRepair { repair_id: ReconciliationId(594), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48436 Sep 22 23:22:25.726 INFO [1] No action required ReconciliationId(594)
48437 Sep 22 23:22:25.726 INFO [2] received reconcile message
48438 Sep 22 23:22:25.726 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(594), op: ExtentRepair { repair_id: ReconciliationId(594), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48439 Sep 22 23:22:25.726 INFO [2] client ExtentRepair { repair_id: ReconciliationId(594), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48440 Sep 22 23:22:25.726 INFO [2] No action required ReconciliationId(594)
48441 Sep 22 23:22:25.726 DEBG 594 Repair extent 24 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
48442 Sep 22 23:22:25.726 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/018.copy"
48443 Sep 22 23:22:25.790 INFO accepted connection, remote_addr: 127.0.0.1:60516, local_addr: 127.0.0.1:52864, task: repair
48444 Sep 22 23:22:25.791 TRCE incoming request, uri: /extent/24/files, method: GET, req_id: e9a2a2d9-0228-479d-a1a1-69fbc26cc87d, remote_addr: 127.0.0.1:60516, local_addr: 127.0.0.1:52864, task: repair
48445 Sep 22 23:22:25.791 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/24/files, method: GET, req_id: e9a2a2d9-0228-479d-a1a1-69fbc26cc87d, remote_addr: 127.0.0.1:60516, local_addr: 127.0.0.1:52864, task: repair
48446 Sep 22 23:22:25.791 INFO eid:24 Found repair files: ["018", "018.db"]
48447 Sep 22 23:22:25.791 TRCE incoming request, uri: /newextent/24/data, method: GET, req_id: 7e565e8c-534b-4d1d-ab37-22d39760078c, remote_addr: 127.0.0.1:60516, local_addr: 127.0.0.1:52864, task: repair
48448 Sep 22 23:22:25.792 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/24/data, method: GET, req_id: 7e565e8c-534b-4d1d-ab37-22d39760078c, remote_addr: 127.0.0.1:60516, local_addr: 127.0.0.1:52864, task: repair
48449 Sep 22 23:22:25.796 TRCE incoming request, uri: /newextent/24/db, method: GET, req_id: b096e23f-727c-4d77-a7cc-7b67c7ddc61a, remote_addr: 127.0.0.1:60516, local_addr: 127.0.0.1:52864, task: repair
48450 Sep 22 23:22:25.797 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/24/db, method: GET, req_id: b096e23f-727c-4d77-a7cc-7b67c7ddc61a, remote_addr: 127.0.0.1:60516, local_addr: 127.0.0.1:52864, task: repair
48451 Sep 22 23:22:25.798 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/018.copy" to "/tmp/downstairs-zrMnlo6G/00/000/018.replace"
48452 Sep 22 23:22:25.798 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48453 Sep 22 23:22:25.799 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/018.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
48454 Sep 22 23:22:25.799 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/018"
48455 Sep 22 23:22:25.799 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/018.db"
48456 Sep 22 23:22:25.799 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48457 Sep 22 23:22:25.799 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/018.replace" to "/tmp/downstairs-zrMnlo6G/00/000/018.completed"
48458 Sep 22 23:22:25.799 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48459 Sep 22 23:22:25.799 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48460 Sep 22 23:22:25.799 DEBG [0] It's time to notify for 594
48461 Sep 22 23:22:25.799 INFO Completion from [0] id:594 status:true
48462 Sep 22 23:22:25.799 INFO [595/752] Repair commands completed
48463 Sep 22 23:22:25.799 INFO Pop front: ReconcileIO { id: ReconciliationId(595), op: ExtentReopen { repair_id: ReconciliationId(595), extent_id: 24 }, state: ClientData([New, New, New]) }
48464 Sep 22 23:22:25.799 INFO Sent repair work, now wait for resp
48465 Sep 22 23:22:25.800 INFO [0] received reconcile message
48466 Sep 22 23:22:25.800 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(595), op: ExtentReopen { repair_id: ReconciliationId(595), extent_id: 24 }, state: ClientData([InProgress, New, New]) }, : downstairs
48467 Sep 22 23:22:25.800 INFO [0] client ExtentReopen { repair_id: ReconciliationId(595), extent_id: 24 }
48468 Sep 22 23:22:25.800 INFO [1] received reconcile message
48469 Sep 22 23:22:25.800 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(595), op: ExtentReopen { repair_id: ReconciliationId(595), extent_id: 24 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48470 Sep 22 23:22:25.800 INFO [1] client ExtentReopen { repair_id: ReconciliationId(595), extent_id: 24 }
48471 Sep 22 23:22:25.800 INFO [2] received reconcile message
48472 Sep 22 23:22:25.800 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(595), op: ExtentReopen { repair_id: ReconciliationId(595), extent_id: 24 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48473 Sep 22 23:22:25.800 INFO [2] client ExtentReopen { repair_id: ReconciliationId(595), extent_id: 24 }
48474 Sep 22 23:22:25.800 DEBG 595 Reopen extent 24
48475 Sep 22 23:22:25.800 DEBG 595 Reopen extent 24
48476 Sep 22 23:22:25.801 DEBG 595 Reopen extent 24
48477 Sep 22 23:22:25.802 DEBG [2] It's time to notify for 595
48478 Sep 22 23:22:25.802 INFO Completion from [2] id:595 status:true
48479 Sep 22 23:22:25.802 INFO [596/752] Repair commands completed
48480 Sep 22 23:22:25.802 INFO Pop front: ReconcileIO { id: ReconciliationId(596), op: ExtentFlush { repair_id: ReconciliationId(596), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48481 Sep 22 23:22:25.802 INFO Sent repair work, now wait for resp
48482 Sep 22 23:22:25.802 INFO [0] received reconcile message
48483 Sep 22 23:22:25.802 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(596), op: ExtentFlush { repair_id: ReconciliationId(596), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48484 Sep 22 23:22:25.802 INFO [0] client ExtentFlush { repair_id: ReconciliationId(596), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48485 Sep 22 23:22:25.802 INFO [1] received reconcile message
48486 Sep 22 23:22:25.802 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(596), op: ExtentFlush { repair_id: ReconciliationId(596), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48487 Sep 22 23:22:25.802 INFO [1] client ExtentFlush { repair_id: ReconciliationId(596), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48488 Sep 22 23:22:25.802 INFO [2] received reconcile message
48489 Sep 22 23:22:25.802 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(596), op: ExtentFlush { repair_id: ReconciliationId(596), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48490 Sep 22 23:22:25.802 INFO [2] client ExtentFlush { repair_id: ReconciliationId(596), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48491 Sep 22 23:22:25.802 DEBG 596 Flush extent 39 with f:2 g:2
48492 Sep 22 23:22:25.802 DEBG Flush just extent 39 with f:2 and g:2
48493 Sep 22 23:22:25.802 DEBG [1] It's time to notify for 596
48494 Sep 22 23:22:25.802 INFO Completion from [1] id:596 status:true
48495 Sep 22 23:22:25.802 INFO [597/752] Repair commands completed
48496 Sep 22 23:22:25.802 INFO Pop front: ReconcileIO { id: ReconciliationId(597), op: ExtentClose { repair_id: ReconciliationId(597), extent_id: 39 }, state: ClientData([New, New, New]) }
48497 Sep 22 23:22:25.802 INFO Sent repair work, now wait for resp
48498 Sep 22 23:22:25.802 INFO [0] received reconcile message
48499 Sep 22 23:22:25.802 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(597), op: ExtentClose { repair_id: ReconciliationId(597), extent_id: 39 }, state: ClientData([InProgress, New, New]) }, : downstairs
48500 Sep 22 23:22:25.802 INFO [0] client ExtentClose { repair_id: ReconciliationId(597), extent_id: 39 }
48501 Sep 22 23:22:25.802 INFO [1] received reconcile message
48502 Sep 22 23:22:25.802 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(597), op: ExtentClose { repair_id: ReconciliationId(597), extent_id: 39 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48503 Sep 22 23:22:25.802 INFO [1] client ExtentClose { repair_id: ReconciliationId(597), extent_id: 39 }
48504 Sep 22 23:22:25.802 INFO [2] received reconcile message
48505 Sep 22 23:22:25.802 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(597), op: ExtentClose { repair_id: ReconciliationId(597), extent_id: 39 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48506 Sep 22 23:22:25.802 INFO [2] client ExtentClose { repair_id: ReconciliationId(597), extent_id: 39 }
48507 Sep 22 23:22:25.803 DEBG 597 Close extent 39
48508 Sep 22 23:22:25.803 DEBG 597 Close extent 39
48509 Sep 22 23:22:25.803 DEBG 597 Close extent 39
48510 Sep 22 23:22:25.804 DEBG [2] It's time to notify for 597
48511 Sep 22 23:22:25.804 INFO Completion from [2] id:597 status:true
48512 Sep 22 23:22:25.804 INFO [598/752] Repair commands completed
48513 Sep 22 23:22:25.804 INFO Pop front: ReconcileIO { id: ReconciliationId(598), op: ExtentRepair { repair_id: ReconciliationId(598), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48514 Sep 22 23:22:25.804 INFO Sent repair work, now wait for resp
48515 Sep 22 23:22:25.804 INFO [0] received reconcile message
48516 Sep 22 23:22:25.804 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(598), op: ExtentRepair { repair_id: ReconciliationId(598), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48517 Sep 22 23:22:25.804 INFO [0] client ExtentRepair { repair_id: ReconciliationId(598), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48518 Sep 22 23:22:25.804 INFO [0] Sending repair request ReconciliationId(598)
48519 Sep 22 23:22:25.804 INFO [1] received reconcile message
48520 Sep 22 23:22:25.804 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(598), op: ExtentRepair { repair_id: ReconciliationId(598), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48521 Sep 22 23:22:25.804 INFO [1] client ExtentRepair { repair_id: ReconciliationId(598), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48522 Sep 22 23:22:25.804 INFO [1] No action required ReconciliationId(598)
48523 Sep 22 23:22:25.804 INFO [2] received reconcile message
48524 Sep 22 23:22:25.804 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(598), op: ExtentRepair { repair_id: ReconciliationId(598), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48525 Sep 22 23:22:25.804 INFO [2] client ExtentRepair { repair_id: ReconciliationId(598), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48526 Sep 22 23:22:25.804 INFO [2] No action required ReconciliationId(598)
48527 Sep 22 23:22:25.804 DEBG 598 Repair extent 39 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
48528 Sep 22 23:22:25.804 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/027.copy"
48529 Sep 22 23:22:25.869 INFO accepted connection, remote_addr: 127.0.0.1:50542, local_addr: 127.0.0.1:52864, task: repair
48530 Sep 22 23:22:25.869 TRCE incoming request, uri: /extent/39/files, method: GET, req_id: b8a74093-007e-4eb5-9e9f-66d132ff9366, remote_addr: 127.0.0.1:50542, local_addr: 127.0.0.1:52864, task: repair
48531 Sep 22 23:22:25.869 INFO request completed, latency_us: 203, response_code: 200, uri: /extent/39/files, method: GET, req_id: b8a74093-007e-4eb5-9e9f-66d132ff9366, remote_addr: 127.0.0.1:50542, local_addr: 127.0.0.1:52864, task: repair
48532 Sep 22 23:22:25.870 INFO eid:39 Found repair files: ["027", "027.db"]
48533 Sep 22 23:22:25.870 TRCE incoming request, uri: /newextent/39/data, method: GET, req_id: 407df6bc-f6d0-4f07-805a-0c90f1c13280, remote_addr: 127.0.0.1:50542, local_addr: 127.0.0.1:52864, task: repair
48534 Sep 22 23:22:25.870 INFO request completed, latency_us: 327, response_code: 200, uri: /newextent/39/data, method: GET, req_id: 407df6bc-f6d0-4f07-805a-0c90f1c13280, remote_addr: 127.0.0.1:50542, local_addr: 127.0.0.1:52864, task: repair
48535 Sep 22 23:22:25.875 TRCE incoming request, uri: /newextent/39/db, method: GET, req_id: a3e140db-4494-4a2c-b644-3e5892e628c3, remote_addr: 127.0.0.1:50542, local_addr: 127.0.0.1:52864, task: repair
48536 Sep 22 23:22:25.875 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/39/db, method: GET, req_id: a3e140db-4494-4a2c-b644-3e5892e628c3, remote_addr: 127.0.0.1:50542, local_addr: 127.0.0.1:52864, task: repair
48537 Sep 22 23:22:25.876 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/027.copy" to "/tmp/downstairs-zrMnlo6G/00/000/027.replace"
48538 Sep 22 23:22:25.876 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48539 Sep 22 23:22:25.877 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/027.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
48540 Sep 22 23:22:25.877 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/027"
48541 Sep 22 23:22:25.877 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/027.db"
48542 Sep 22 23:22:25.877 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48543 Sep 22 23:22:25.877 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/027.replace" to "/tmp/downstairs-zrMnlo6G/00/000/027.completed"
48544 Sep 22 23:22:25.877 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48545 Sep 22 23:22:25.878 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48546 Sep 22 23:22:25.878 DEBG [0] It's time to notify for 598
48547 Sep 22 23:22:25.878 INFO Completion from [0] id:598 status:true
48548 Sep 22 23:22:25.878 INFO [599/752] Repair commands completed
48549 Sep 22 23:22:25.878 INFO Pop front: ReconcileIO { id: ReconciliationId(599), op: ExtentReopen { repair_id: ReconciliationId(599), extent_id: 39 }, state: ClientData([New, New, New]) }
48550 Sep 22 23:22:25.878 INFO Sent repair work, now wait for resp
48551 Sep 22 23:22:25.878 INFO [0] received reconcile message
48552 Sep 22 23:22:25.878 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(599), op: ExtentReopen { repair_id: ReconciliationId(599), extent_id: 39 }, state: ClientData([InProgress, New, New]) }, : downstairs
48553 Sep 22 23:22:25.878 INFO [0] client ExtentReopen { repair_id: ReconciliationId(599), extent_id: 39 }
48554 Sep 22 23:22:25.878 INFO [1] received reconcile message
48555 Sep 22 23:22:25.878 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(599), op: ExtentReopen { repair_id: ReconciliationId(599), extent_id: 39 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48556 Sep 22 23:22:25.878 INFO [1] client ExtentReopen { repair_id: ReconciliationId(599), extent_id: 39 }
48557 Sep 22 23:22:25.878 INFO [2] received reconcile message
48558 Sep 22 23:22:25.878 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(599), op: ExtentReopen { repair_id: ReconciliationId(599), extent_id: 39 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48559 Sep 22 23:22:25.878 INFO [2] client ExtentReopen { repair_id: ReconciliationId(599), extent_id: 39 }
48560 Sep 22 23:22:25.878 DEBG 599 Reopen extent 39
48561 Sep 22 23:22:25.879 DEBG 599 Reopen extent 39
48562 Sep 22 23:22:25.879 DEBG 599 Reopen extent 39
48563 Sep 22 23:22:25.880 DEBG [2] It's time to notify for 599
48564 Sep 22 23:22:25.880 INFO Completion from [2] id:599 status:true
48565 Sep 22 23:22:25.880 INFO [600/752] Repair commands completed
48566 Sep 22 23:22:25.880 INFO Pop front: ReconcileIO { id: ReconciliationId(600), op: ExtentFlush { repair_id: ReconciliationId(600), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48567 Sep 22 23:22:25.880 INFO Sent repair work, now wait for resp
48568 Sep 22 23:22:25.880 INFO [0] received reconcile message
48569 Sep 22 23:22:25.880 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(600), op: ExtentFlush { repair_id: ReconciliationId(600), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48570 Sep 22 23:22:25.880 INFO [0] client ExtentFlush { repair_id: ReconciliationId(600), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48571 Sep 22 23:22:25.880 INFO [1] received reconcile message
48572 Sep 22 23:22:25.880 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(600), op: ExtentFlush { repair_id: ReconciliationId(600), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48573 Sep 22 23:22:25.880 INFO [1] client ExtentFlush { repair_id: ReconciliationId(600), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48574 Sep 22 23:22:25.880 INFO [2] received reconcile message
48575 Sep 22 23:22:25.880 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(600), op: ExtentFlush { repair_id: ReconciliationId(600), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48576 Sep 22 23:22:25.880 INFO [2] client ExtentFlush { repair_id: ReconciliationId(600), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48577 Sep 22 23:22:25.880 DEBG 600 Flush extent 48 with f:2 g:2
48578 Sep 22 23:22:25.880 DEBG Flush just extent 48 with f:2 and g:2
48579 Sep 22 23:22:25.881 DEBG [1] It's time to notify for 600
48580 Sep 22 23:22:25.881 INFO Completion from [1] id:600 status:true
48581 Sep 22 23:22:25.881 INFO [601/752] Repair commands completed
48582 Sep 22 23:22:25.881 INFO Pop front: ReconcileIO { id: ReconciliationId(601), op: ExtentClose { repair_id: ReconciliationId(601), extent_id: 48 }, state: ClientData([New, New, New]) }
48583 Sep 22 23:22:25.881 INFO Sent repair work, now wait for resp
48584 Sep 22 23:22:25.881 INFO [0] received reconcile message
48585 Sep 22 23:22:25.881 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(601), op: ExtentClose { repair_id: ReconciliationId(601), extent_id: 48 }, state: ClientData([InProgress, New, New]) }, : downstairs
48586 Sep 22 23:22:25.881 INFO [0] client ExtentClose { repair_id: ReconciliationId(601), extent_id: 48 }
48587 Sep 22 23:22:25.881 INFO [1] received reconcile message
48588 Sep 22 23:22:25.881 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(601), op: ExtentClose { repair_id: ReconciliationId(601), extent_id: 48 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48589 Sep 22 23:22:25.881 INFO [1] client ExtentClose { repair_id: ReconciliationId(601), extent_id: 48 }
48590 Sep 22 23:22:25.881 INFO [2] received reconcile message
48591 Sep 22 23:22:25.881 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(601), op: ExtentClose { repair_id: ReconciliationId(601), extent_id: 48 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48592 Sep 22 23:22:25.881 INFO [2] client ExtentClose { repair_id: ReconciliationId(601), extent_id: 48 }
48593 Sep 22 23:22:25.881 DEBG 601 Close extent 48
48594 Sep 22 23:22:25.881 DEBG 601 Close extent 48
48595 Sep 22 23:22:25.882 DEBG 601 Close extent 48
48596 Sep 22 23:22:25.882 DEBG [2] It's time to notify for 601
48597 Sep 22 23:22:25.882 INFO Completion from [2] id:601 status:true
48598 Sep 22 23:22:25.882 INFO [602/752] Repair commands completed
48599 Sep 22 23:22:25.882 INFO Pop front: ReconcileIO { id: ReconciliationId(602), op: ExtentRepair { repair_id: ReconciliationId(602), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48600 Sep 22 23:22:25.882 INFO Sent repair work, now wait for resp
48601 Sep 22 23:22:25.882 INFO [0] received reconcile message
48602 Sep 22 23:22:25.882 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(602), op: ExtentRepair { repair_id: ReconciliationId(602), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48603 Sep 22 23:22:25.882 INFO [0] client ExtentRepair { repair_id: ReconciliationId(602), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48604 Sep 22 23:22:25.882 INFO [0] Sending repair request ReconciliationId(602)
48605 Sep 22 23:22:25.882 INFO [1] received reconcile message
48606 Sep 22 23:22:25.882 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(602), op: ExtentRepair { repair_id: ReconciliationId(602), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48607 Sep 22 23:22:25.882 INFO [1] client ExtentRepair { repair_id: ReconciliationId(602), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48608 Sep 22 23:22:25.882 INFO [1] No action required ReconciliationId(602)
48609 Sep 22 23:22:25.882 INFO [2] received reconcile message
48610 Sep 22 23:22:25.882 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(602), op: ExtentRepair { repair_id: ReconciliationId(602), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48611 Sep 22 23:22:25.882 INFO [2] client ExtentRepair { repair_id: ReconciliationId(602), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48612 Sep 22 23:22:25.882 INFO [2] No action required ReconciliationId(602)
48613 Sep 22 23:22:25.883 DEBG 602 Repair extent 48 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
48614 Sep 22 23:22:25.883 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/030.copy"
48615 Sep 22 23:22:25.946 INFO accepted connection, remote_addr: 127.0.0.1:41819, local_addr: 127.0.0.1:52864, task: repair
48616 Sep 22 23:22:25.946 TRCE incoming request, uri: /extent/48/files, method: GET, req_id: 5e3137d7-a847-43fc-bc05-a938a8c698e3, remote_addr: 127.0.0.1:41819, local_addr: 127.0.0.1:52864, task: repair
48617 Sep 22 23:22:25.946 INFO request completed, latency_us: 196, response_code: 200, uri: /extent/48/files, method: GET, req_id: 5e3137d7-a847-43fc-bc05-a938a8c698e3, remote_addr: 127.0.0.1:41819, local_addr: 127.0.0.1:52864, task: repair
48618 Sep 22 23:22:25.946 INFO eid:48 Found repair files: ["030", "030.db"]
48619 Sep 22 23:22:25.947 TRCE incoming request, uri: /newextent/48/data, method: GET, req_id: 2fc3a3f7-c9bd-4aaa-9642-3d90b4e5c783, remote_addr: 127.0.0.1:41819, local_addr: 127.0.0.1:52864, task: repair
48620 Sep 22 23:22:25.947 INFO request completed, latency_us: 257, response_code: 200, uri: /newextent/48/data, method: GET, req_id: 2fc3a3f7-c9bd-4aaa-9642-3d90b4e5c783, remote_addr: 127.0.0.1:41819, local_addr: 127.0.0.1:52864, task: repair
48621 Sep 22 23:22:25.952 TRCE incoming request, uri: /newextent/48/db, method: GET, req_id: d5972454-a3b3-45c4-b299-a8c0a6668dea, remote_addr: 127.0.0.1:41819, local_addr: 127.0.0.1:52864, task: repair
48622 Sep 22 23:22:25.952 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/48/db, method: GET, req_id: d5972454-a3b3-45c4-b299-a8c0a6668dea, remote_addr: 127.0.0.1:41819, local_addr: 127.0.0.1:52864, task: repair
48623 Sep 22 23:22:25.953 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/030.copy" to "/tmp/downstairs-zrMnlo6G/00/000/030.replace"
48624 Sep 22 23:22:25.953 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48625 Sep 22 23:22:25.954 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/030.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
48626 Sep 22 23:22:25.954 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/030"
48627 Sep 22 23:22:25.954 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/030.db"
48628 Sep 22 23:22:25.954 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48629 Sep 22 23:22:25.954 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/030.replace" to "/tmp/downstairs-zrMnlo6G/00/000/030.completed"
48630 Sep 22 23:22:25.954 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48631 Sep 22 23:22:25.954 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48632 Sep 22 23:22:25.954 DEBG [0] It's time to notify for 602
48633 Sep 22 23:22:25.955 INFO Completion from [0] id:602 status:true
48634 Sep 22 23:22:25.955 INFO [603/752] Repair commands completed
48635 Sep 22 23:22:25.955 INFO Pop front: ReconcileIO { id: ReconciliationId(603), op: ExtentReopen { repair_id: ReconciliationId(603), extent_id: 48 }, state: ClientData([New, New, New]) }
48636 Sep 22 23:22:25.955 INFO Sent repair work, now wait for resp
48637 Sep 22 23:22:25.955 INFO [0] received reconcile message
48638 Sep 22 23:22:25.955 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(603), op: ExtentReopen { repair_id: ReconciliationId(603), extent_id: 48 }, state: ClientData([InProgress, New, New]) }, : downstairs
48639 Sep 22 23:22:25.955 INFO [0] client ExtentReopen { repair_id: ReconciliationId(603), extent_id: 48 }
48640 Sep 22 23:22:25.955 INFO [1] received reconcile message
48641 Sep 22 23:22:25.955 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(603), op: ExtentReopen { repair_id: ReconciliationId(603), extent_id: 48 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48642 Sep 22 23:22:25.955 INFO [1] client ExtentReopen { repair_id: ReconciliationId(603), extent_id: 48 }
48643 Sep 22 23:22:25.955 INFO [2] received reconcile message
48644 Sep 22 23:22:25.955 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(603), op: ExtentReopen { repair_id: ReconciliationId(603), extent_id: 48 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48645 Sep 22 23:22:25.955 INFO [2] client ExtentReopen { repair_id: ReconciliationId(603), extent_id: 48 }
48646 Sep 22 23:22:25.955 DEBG 603 Reopen extent 48
48647 Sep 22 23:22:25.956 DEBG 603 Reopen extent 48
48648 Sep 22 23:22:25.956 DEBG 603 Reopen extent 48
48649 Sep 22 23:22:25.957 DEBG [2] It's time to notify for 603
48650 Sep 22 23:22:25.957 INFO Completion from [2] id:603 status:true
48651 Sep 22 23:22:25.957 INFO [604/752] Repair commands completed
48652 Sep 22 23:22:25.957 INFO Pop front: ReconcileIO { id: ReconciliationId(604), op: ExtentFlush { repair_id: ReconciliationId(604), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48653 Sep 22 23:22:25.957 INFO Sent repair work, now wait for resp
48654 Sep 22 23:22:25.957 INFO [0] received reconcile message
48655 Sep 22 23:22:25.957 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(604), op: ExtentFlush { repair_id: ReconciliationId(604), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48656 Sep 22 23:22:25.957 INFO [0] client ExtentFlush { repair_id: ReconciliationId(604), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48657 Sep 22 23:22:25.957 INFO [1] received reconcile message
48658 Sep 22 23:22:25.957 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(604), op: ExtentFlush { repair_id: ReconciliationId(604), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48659 Sep 22 23:22:25.957 INFO [1] client ExtentFlush { repair_id: ReconciliationId(604), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48660 Sep 22 23:22:25.957 INFO [2] received reconcile message
48661 Sep 22 23:22:25.957 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(604), op: ExtentFlush { repair_id: ReconciliationId(604), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48662 Sep 22 23:22:25.957 INFO [2] client ExtentFlush { repair_id: ReconciliationId(604), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48663 Sep 22 23:22:25.957 DEBG 604 Flush extent 16 with f:2 g:2
48664 Sep 22 23:22:25.957 DEBG Flush just extent 16 with f:2 and g:2
48665 Sep 22 23:22:25.957 DEBG [1] It's time to notify for 604
48666 Sep 22 23:22:25.957 INFO Completion from [1] id:604 status:true
48667 Sep 22 23:22:25.957 INFO [605/752] Repair commands completed
48668 Sep 22 23:22:25.957 INFO Pop front: ReconcileIO { id: ReconciliationId(605), op: ExtentClose { repair_id: ReconciliationId(605), extent_id: 16 }, state: ClientData([New, New, New]) }
48669 Sep 22 23:22:25.957 INFO Sent repair work, now wait for resp
48670 Sep 22 23:22:25.957 INFO [0] received reconcile message
48671 Sep 22 23:22:25.957 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(605), op: ExtentClose { repair_id: ReconciliationId(605), extent_id: 16 }, state: ClientData([InProgress, New, New]) }, : downstairs
48672 Sep 22 23:22:25.957 INFO [0] client ExtentClose { repair_id: ReconciliationId(605), extent_id: 16 }
48673 Sep 22 23:22:25.957 INFO [1] received reconcile message
48674 Sep 22 23:22:25.958 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(605), op: ExtentClose { repair_id: ReconciliationId(605), extent_id: 16 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48675 Sep 22 23:22:25.958 INFO [1] client ExtentClose { repair_id: ReconciliationId(605), extent_id: 16 }
48676 Sep 22 23:22:25.958 INFO [2] received reconcile message
48677 Sep 22 23:22:25.958 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(605), op: ExtentClose { repair_id: ReconciliationId(605), extent_id: 16 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48678 Sep 22 23:22:25.958 INFO [2] client ExtentClose { repair_id: ReconciliationId(605), extent_id: 16 }
48679 Sep 22 23:22:25.958 DEBG 605 Close extent 16
48680 Sep 22 23:22:25.958 DEBG 605 Close extent 16
48681 Sep 22 23:22:25.958 DEBG 605 Close extent 16
48682 Sep 22 23:22:25.959 DEBG [2] It's time to notify for 605
48683 Sep 22 23:22:25.959 INFO Completion from [2] id:605 status:true
48684 Sep 22 23:22:25.959 INFO [606/752] Repair commands completed
48685 Sep 22 23:22:25.959 INFO Pop front: ReconcileIO { id: ReconciliationId(606), op: ExtentRepair { repair_id: ReconciliationId(606), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48686 Sep 22 23:22:25.959 INFO Sent repair work, now wait for resp
48687 Sep 22 23:22:25.959 INFO [0] received reconcile message
48688 Sep 22 23:22:25.959 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(606), op: ExtentRepair { repair_id: ReconciliationId(606), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48689 Sep 22 23:22:25.959 INFO [0] client ExtentRepair { repair_id: ReconciliationId(606), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48690 Sep 22 23:22:25.959 INFO [0] Sending repair request ReconciliationId(606)
48691 Sep 22 23:22:25.959 INFO [1] received reconcile message
48692 Sep 22 23:22:25.959 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(606), op: ExtentRepair { repair_id: ReconciliationId(606), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48693 Sep 22 23:22:25.959 INFO [1] client ExtentRepair { repair_id: ReconciliationId(606), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48694 Sep 22 23:22:25.959 INFO [1] No action required ReconciliationId(606)
48695 Sep 22 23:22:25.959 INFO [2] received reconcile message
48696 Sep 22 23:22:25.959 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(606), op: ExtentRepair { repair_id: ReconciliationId(606), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48697 Sep 22 23:22:25.959 INFO [2] client ExtentRepair { repair_id: ReconciliationId(606), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48698 Sep 22 23:22:25.959 INFO [2] No action required ReconciliationId(606)
48699 Sep 22 23:22:25.959 DEBG 606 Repair extent 16 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
48700 Sep 22 23:22:25.959 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/010.copy"
48701 Sep 22 23:22:26.025 INFO accepted connection, remote_addr: 127.0.0.1:50664, local_addr: 127.0.0.1:52864, task: repair
48702 Sep 22 23:22:26.025 TRCE incoming request, uri: /extent/16/files, method: GET, req_id: dd9029d7-5cf8-47a7-81f0-72cb97a746a5, remote_addr: 127.0.0.1:50664, local_addr: 127.0.0.1:52864, task: repair
48703 Sep 22 23:22:26.026 INFO request completed, latency_us: 245, response_code: 200, uri: /extent/16/files, method: GET, req_id: dd9029d7-5cf8-47a7-81f0-72cb97a746a5, remote_addr: 127.0.0.1:50664, local_addr: 127.0.0.1:52864, task: repair
48704 Sep 22 23:22:26.026 INFO eid:16 Found repair files: ["010", "010.db"]
48705 Sep 22 23:22:26.026 TRCE incoming request, uri: /newextent/16/data, method: GET, req_id: e16b43fc-6511-44f8-9f3c-dca5d3b41908, remote_addr: 127.0.0.1:50664, local_addr: 127.0.0.1:52864, task: repair
48706 Sep 22 23:22:26.027 INFO request completed, latency_us: 336, response_code: 200, uri: /newextent/16/data, method: GET, req_id: e16b43fc-6511-44f8-9f3c-dca5d3b41908, remote_addr: 127.0.0.1:50664, local_addr: 127.0.0.1:52864, task: repair
48707 Sep 22 23:22:26.029 DEBG [0] Read AckReady 1091, : downstairs
48708 Sep 22 23:22:26.030 DEBG up_ds_listen was notified
48709 Sep 22 23:22:26.030 DEBG up_ds_listen process 1091
48710 Sep 22 23:22:26.030 DEBG [A] ack job 1091:92, : downstairs
48711 Sep 22 23:22:26.031 TRCE incoming request, uri: /newextent/16/db, method: GET, req_id: d9385f59-6676-4a50-8fad-30f09f1d4ccf, remote_addr: 127.0.0.1:50664, local_addr: 127.0.0.1:52864, task: repair
48712 Sep 22 23:22:26.032 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/16/db, method: GET, req_id: d9385f59-6676-4a50-8fad-30f09f1d4ccf, remote_addr: 127.0.0.1:50664, local_addr: 127.0.0.1:52864, task: repair
48713 Sep 22 23:22:26.033 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/010.copy" to "/tmp/downstairs-zrMnlo6G/00/000/010.replace"
48714 Sep 22 23:22:26.033 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48715 Sep 22 23:22:26.034 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/010.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
48716 Sep 22 23:22:26.034 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/010"
48717 Sep 22 23:22:26.034 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/010.db"
48718 Sep 22 23:22:26.034 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48719 Sep 22 23:22:26.034 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/010.replace" to "/tmp/downstairs-zrMnlo6G/00/000/010.completed"
48720 Sep 22 23:22:26.034 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48721 Sep 22 23:22:26.034 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48722 Sep 22 23:22:26.035 DEBG [0] It's time to notify for 606
48723 Sep 22 23:22:26.035 INFO Completion from [0] id:606 status:true
48724 Sep 22 23:22:26.035 INFO [607/752] Repair commands completed
48725 Sep 22 23:22:26.035 INFO Pop front: ReconcileIO { id: ReconciliationId(607), op: ExtentReopen { repair_id: ReconciliationId(607), extent_id: 16 }, state: ClientData([New, New, New]) }
48726 Sep 22 23:22:26.035 INFO Sent repair work, now wait for resp
48727 Sep 22 23:22:26.035 INFO [0] received reconcile message
48728 Sep 22 23:22:26.035 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(607), op: ExtentReopen { repair_id: ReconciliationId(607), extent_id: 16 }, state: ClientData([InProgress, New, New]) }, : downstairs
48729 Sep 22 23:22:26.035 INFO [0] client ExtentReopen { repair_id: ReconciliationId(607), extent_id: 16 }
48730 Sep 22 23:22:26.035 INFO [1] received reconcile message
48731 Sep 22 23:22:26.035 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(607), op: ExtentReopen { repair_id: ReconciliationId(607), extent_id: 16 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48732 Sep 22 23:22:26.035 INFO [1] client ExtentReopen { repair_id: ReconciliationId(607), extent_id: 16 }
48733 Sep 22 23:22:26.035 INFO [2] received reconcile message
48734 Sep 22 23:22:26.035 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(607), op: ExtentReopen { repair_id: ReconciliationId(607), extent_id: 16 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48735 Sep 22 23:22:26.035 INFO [2] client ExtentReopen { repair_id: ReconciliationId(607), extent_id: 16 }
48736 Sep 22 23:22:26.035 DEBG 607 Reopen extent 16
48737 Sep 22 23:22:26.036 DEBG 607 Reopen extent 16
48738 Sep 22 23:22:26.036 DEBG 607 Reopen extent 16
48739 Sep 22 23:22:26.037 DEBG [2] It's time to notify for 607
48740 Sep 22 23:22:26.037 INFO Completion from [2] id:607 status:true
48741 Sep 22 23:22:26.037 INFO [608/752] Repair commands completed
48742 Sep 22 23:22:26.037 INFO Pop front: ReconcileIO { id: ReconciliationId(608), op: ExtentFlush { repair_id: ReconciliationId(608), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48743 Sep 22 23:22:26.037 INFO Sent repair work, now wait for resp
48744 Sep 22 23:22:26.037 INFO [0] received reconcile message
48745 Sep 22 23:22:26.037 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(608), op: ExtentFlush { repair_id: ReconciliationId(608), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48746 Sep 22 23:22:26.037 INFO [0] client ExtentFlush { repair_id: ReconciliationId(608), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48747 Sep 22 23:22:26.037 INFO [1] received reconcile message
48748 Sep 22 23:22:26.037 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(608), op: ExtentFlush { repair_id: ReconciliationId(608), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48749 Sep 22 23:22:26.037 INFO [1] client ExtentFlush { repair_id: ReconciliationId(608), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48750 Sep 22 23:22:26.037 INFO [2] received reconcile message
48751 Sep 22 23:22:26.037 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(608), op: ExtentFlush { repair_id: ReconciliationId(608), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48752 Sep 22 23:22:26.037 INFO [2] client ExtentFlush { repair_id: ReconciliationId(608), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48753 Sep 22 23:22:26.037 DEBG 608 Flush extent 4 with f:2 g:2
48754 Sep 22 23:22:26.037 DEBG Flush just extent 4 with f:2 and g:2
48755 Sep 22 23:22:26.038 DEBG [1] It's time to notify for 608
48756 Sep 22 23:22:26.038 INFO Completion from [1] id:608 status:true
48757 Sep 22 23:22:26.038 INFO [609/752] Repair commands completed
48758 Sep 22 23:22:26.038 INFO Pop front: ReconcileIO { id: ReconciliationId(609), op: ExtentClose { repair_id: ReconciliationId(609), extent_id: 4 }, state: ClientData([New, New, New]) }
48759 Sep 22 23:22:26.038 INFO Sent repair work, now wait for resp
48760 Sep 22 23:22:26.038 INFO [0] received reconcile message
48761 Sep 22 23:22:26.038 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(609), op: ExtentClose { repair_id: ReconciliationId(609), extent_id: 4 }, state: ClientData([InProgress, New, New]) }, : downstairs
48762 Sep 22 23:22:26.038 INFO [0] client ExtentClose { repair_id: ReconciliationId(609), extent_id: 4 }
48763 Sep 22 23:22:26.038 INFO [1] received reconcile message
48764 Sep 22 23:22:26.038 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(609), op: ExtentClose { repair_id: ReconciliationId(609), extent_id: 4 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48765 Sep 22 23:22:26.038 INFO [1] client ExtentClose { repair_id: ReconciliationId(609), extent_id: 4 }
48766 Sep 22 23:22:26.038 INFO [2] received reconcile message
48767 Sep 22 23:22:26.038 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(609), op: ExtentClose { repair_id: ReconciliationId(609), extent_id: 4 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48768 Sep 22 23:22:26.038 INFO [2] client ExtentClose { repair_id: ReconciliationId(609), extent_id: 4 }
48769 Sep 22 23:22:26.038 DEBG 609 Close extent 4
48770 Sep 22 23:22:26.038 DEBG 609 Close extent 4
48771 Sep 22 23:22:26.039 DEBG 609 Close extent 4
48772 Sep 22 23:22:26.039 DEBG [2] It's time to notify for 609
48773 Sep 22 23:22:26.039 INFO Completion from [2] id:609 status:true
48774 Sep 22 23:22:26.039 INFO [610/752] Repair commands completed
48775 Sep 22 23:22:26.039 INFO Pop front: ReconcileIO { id: ReconciliationId(610), op: ExtentRepair { repair_id: ReconciliationId(610), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48776 Sep 22 23:22:26.039 INFO Sent repair work, now wait for resp
48777 Sep 22 23:22:26.039 INFO [0] received reconcile message
48778 Sep 22 23:22:26.039 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(610), op: ExtentRepair { repair_id: ReconciliationId(610), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48779 Sep 22 23:22:26.039 INFO [0] client ExtentRepair { repair_id: ReconciliationId(610), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48780 Sep 22 23:22:26.039 INFO [0] Sending repair request ReconciliationId(610)
48781 Sep 22 23:22:26.039 INFO [1] received reconcile message
48782 Sep 22 23:22:26.039 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(610), op: ExtentRepair { repair_id: ReconciliationId(610), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48783 Sep 22 23:22:26.039 INFO [1] client ExtentRepair { repair_id: ReconciliationId(610), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48784 Sep 22 23:22:26.039 INFO [1] No action required ReconciliationId(610)
48785 Sep 22 23:22:26.039 INFO [2] received reconcile message
48786 Sep 22 23:22:26.039 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(610), op: ExtentRepair { repair_id: ReconciliationId(610), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48787 Sep 22 23:22:26.039 INFO [2] client ExtentRepair { repair_id: ReconciliationId(610), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48788 Sep 22 23:22:26.039 INFO [2] No action required ReconciliationId(610)
48789 Sep 22 23:22:26.040 DEBG 610 Repair extent 4 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
48790 Sep 22 23:22:26.040 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/004.copy"
48791 Sep 22 23:22:26.083 DEBG up_ds_listen checked 1 jobs, back to waiting
48792 Sep 22 23:22:26.086 DEBG Flush :1088 extent_limit None deps:[JobId(1087), JobId(1086)] res:true f:33 g:1
48793 Sep 22 23:22:26.086 WARN returning error on read!
48794 Sep 22 23:22:26.086 DEBG Read :1089 deps:[JobId(1088)] res:false
48795 Sep 22 23:22:26.086 INFO [lossy] skipping 1091
48796 Sep 22 23:22:26.092 DEBG Read :1089 deps:[JobId(1088)] res:true
48797 Sep 22 23:22:26.103 INFO accepted connection, remote_addr: 127.0.0.1:33331, local_addr: 127.0.0.1:52864, task: repair
48798 Sep 22 23:22:26.104 TRCE incoming request, uri: /extent/4/files, method: GET, req_id: 83604101-67d1-4d48-a6d0-00ccd22ed211, remote_addr: 127.0.0.1:33331, local_addr: 127.0.0.1:52864, task: repair
48799 Sep 22 23:22:26.104 INFO request completed, latency_us: 216, response_code: 200, uri: /extent/4/files, method: GET, req_id: 83604101-67d1-4d48-a6d0-00ccd22ed211, remote_addr: 127.0.0.1:33331, local_addr: 127.0.0.1:52864, task: repair
48800 Sep 22 23:22:26.104 INFO eid:4 Found repair files: ["004", "004.db"]
48801 Sep 22 23:22:26.104 TRCE incoming request, uri: /newextent/4/data, method: GET, req_id: 2f909e4b-ee48-4827-8390-535673e5c372, remote_addr: 127.0.0.1:33331, local_addr: 127.0.0.1:52864, task: repair
48802 Sep 22 23:22:26.105 INFO request completed, latency_us: 335, response_code: 200, uri: /newextent/4/data, method: GET, req_id: 2f909e4b-ee48-4827-8390-535673e5c372, remote_addr: 127.0.0.1:33331, local_addr: 127.0.0.1:52864, task: repair
48803 Sep 22 23:22:26.110 TRCE incoming request, uri: /newextent/4/db, method: GET, req_id: 9b352f7c-38d0-4f8a-9e58-959ad055ca33, remote_addr: 127.0.0.1:33331, local_addr: 127.0.0.1:52864, task: repair
48804 Sep 22 23:22:26.110 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/4/db, method: GET, req_id: 9b352f7c-38d0-4f8a-9e58-959ad055ca33, remote_addr: 127.0.0.1:33331, local_addr: 127.0.0.1:52864, task: repair
48805 Sep 22 23:22:26.111 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/004.copy" to "/tmp/downstairs-zrMnlo6G/00/000/004.replace"
48806 Sep 22 23:22:26.111 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48807 Sep 22 23:22:26.112 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/004.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
48808 Sep 22 23:22:26.112 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/004"
48809 Sep 22 23:22:26.112 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/004.db"
48810 Sep 22 23:22:26.112 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48811 Sep 22 23:22:26.112 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/004.replace" to "/tmp/downstairs-zrMnlo6G/00/000/004.completed"
48812 Sep 22 23:22:26.112 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48813 Sep 22 23:22:26.112 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48814 Sep 22 23:22:26.113 DEBG [0] It's time to notify for 610
48815 Sep 22 23:22:26.113 INFO Completion from [0] id:610 status:true
48816 Sep 22 23:22:26.113 INFO [611/752] Repair commands completed
48817 Sep 22 23:22:26.113 INFO Pop front: ReconcileIO { id: ReconciliationId(611), op: ExtentReopen { repair_id: ReconciliationId(611), extent_id: 4 }, state: ClientData([New, New, New]) }
48818 Sep 22 23:22:26.113 INFO Sent repair work, now wait for resp
48819 Sep 22 23:22:26.113 INFO [0] received reconcile message
48820 Sep 22 23:22:26.113 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(611), op: ExtentReopen { repair_id: ReconciliationId(611), extent_id: 4 }, state: ClientData([InProgress, New, New]) }, : downstairs
48821 Sep 22 23:22:26.113 INFO [0] client ExtentReopen { repair_id: ReconciliationId(611), extent_id: 4 }
48822 Sep 22 23:22:26.113 INFO [1] received reconcile message
48823 Sep 22 23:22:26.113 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(611), op: ExtentReopen { repair_id: ReconciliationId(611), extent_id: 4 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48824 Sep 22 23:22:26.113 INFO [1] client ExtentReopen { repair_id: ReconciliationId(611), extent_id: 4 }
48825 Sep 22 23:22:26.113 INFO [2] received reconcile message
48826 Sep 22 23:22:26.113 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(611), op: ExtentReopen { repair_id: ReconciliationId(611), extent_id: 4 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48827 Sep 22 23:22:26.113 INFO [2] client ExtentReopen { repair_id: ReconciliationId(611), extent_id: 4 }
48828 Sep 22 23:22:26.113 DEBG 611 Reopen extent 4
48829 Sep 22 23:22:26.114 DEBG IO Flush 1092 has deps [JobId(1091), JobId(1090)]
48830 Sep 22 23:22:26.114 DEBG 611 Reopen extent 4
48831 Sep 22 23:22:26.114 DEBG 611 Reopen extent 4
48832 Sep 22 23:22:26.115 DEBG [2] It's time to notify for 611
48833 Sep 22 23:22:26.115 INFO Completion from [2] id:611 status:true
48834 Sep 22 23:22:26.115 INFO [612/752] Repair commands completed
48835 Sep 22 23:22:26.115 INFO Pop front: ReconcileIO { id: ReconciliationId(612), op: ExtentFlush { repair_id: ReconciliationId(612), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48836 Sep 22 23:22:26.115 INFO Sent repair work, now wait for resp
48837 Sep 22 23:22:26.115 INFO [0] received reconcile message
48838 Sep 22 23:22:26.115 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(612), op: ExtentFlush { repair_id: ReconciliationId(612), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48839 Sep 22 23:22:26.115 INFO [0] client ExtentFlush { repair_id: ReconciliationId(612), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48840 Sep 22 23:22:26.115 INFO [1] received reconcile message
48841 Sep 22 23:22:26.115 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(612), op: ExtentFlush { repair_id: ReconciliationId(612), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48842 Sep 22 23:22:26.115 INFO [1] client ExtentFlush { repair_id: ReconciliationId(612), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48843 Sep 22 23:22:26.115 INFO [2] received reconcile message
48844 Sep 22 23:22:26.115 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(612), op: ExtentFlush { repair_id: ReconciliationId(612), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48845 Sep 22 23:22:26.115 INFO [2] client ExtentFlush { repair_id: ReconciliationId(612), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48846 Sep 22 23:22:26.116 DEBG 612 Flush extent 70 with f:2 g:2
48847 Sep 22 23:22:26.116 DEBG Flush just extent 70 with f:2 and g:2
48848 Sep 22 23:22:26.116 DEBG [1] It's time to notify for 612
48849 Sep 22 23:22:26.116 INFO Completion from [1] id:612 status:true
48850 Sep 22 23:22:26.116 INFO [613/752] Repair commands completed
48851 Sep 22 23:22:26.116 INFO Pop front: ReconcileIO { id: ReconciliationId(613), op: ExtentClose { repair_id: ReconciliationId(613), extent_id: 70 }, state: ClientData([New, New, New]) }
48852 Sep 22 23:22:26.116 INFO Sent repair work, now wait for resp
48853 Sep 22 23:22:26.116 INFO [0] received reconcile message
48854 Sep 22 23:22:26.116 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(613), op: ExtentClose { repair_id: ReconciliationId(613), extent_id: 70 }, state: ClientData([InProgress, New, New]) }, : downstairs
48855 Sep 22 23:22:26.116 INFO [0] client ExtentClose { repair_id: ReconciliationId(613), extent_id: 70 }
48856 Sep 22 23:22:26.116 INFO [1] received reconcile message
48857 Sep 22 23:22:26.116 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(613), op: ExtentClose { repair_id: ReconciliationId(613), extent_id: 70 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48858 Sep 22 23:22:26.116 INFO [1] client ExtentClose { repair_id: ReconciliationId(613), extent_id: 70 }
48859 Sep 22 23:22:26.116 INFO [2] received reconcile message
48860 Sep 22 23:22:26.116 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(613), op: ExtentClose { repair_id: ReconciliationId(613), extent_id: 70 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48861 Sep 22 23:22:26.116 INFO [2] client ExtentClose { repair_id: ReconciliationId(613), extent_id: 70 }
48862 Sep 22 23:22:26.116 DEBG 613 Close extent 70
48863 Sep 22 23:22:26.116 DEBG 613 Close extent 70
48864 Sep 22 23:22:26.117 DEBG 613 Close extent 70
48865 Sep 22 23:22:26.117 DEBG [2] It's time to notify for 613
48866 Sep 22 23:22:26.117 INFO Completion from [2] id:613 status:true
48867 Sep 22 23:22:26.117 INFO [614/752] Repair commands completed
48868 Sep 22 23:22:26.117 INFO Pop front: ReconcileIO { id: ReconciliationId(614), op: ExtentRepair { repair_id: ReconciliationId(614), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48869 Sep 22 23:22:26.117 INFO Sent repair work, now wait for resp
48870 Sep 22 23:22:26.117 INFO [0] received reconcile message
48871 Sep 22 23:22:26.117 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(614), op: ExtentRepair { repair_id: ReconciliationId(614), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48872 Sep 22 23:22:26.117 INFO [0] client ExtentRepair { repair_id: ReconciliationId(614), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48873 Sep 22 23:22:26.117 INFO [0] Sending repair request ReconciliationId(614)
48874 Sep 22 23:22:26.117 INFO [1] received reconcile message
48875 Sep 22 23:22:26.117 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(614), op: ExtentRepair { repair_id: ReconciliationId(614), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48876 Sep 22 23:22:26.117 INFO [1] client ExtentRepair { repair_id: ReconciliationId(614), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48877 Sep 22 23:22:26.117 INFO [1] No action required ReconciliationId(614)
48878 Sep 22 23:22:26.117 INFO [2] received reconcile message
48879 Sep 22 23:22:26.117 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(614), op: ExtentRepair { repair_id: ReconciliationId(614), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48880 Sep 22 23:22:26.118 INFO [2] client ExtentRepair { repair_id: ReconciliationId(614), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48881 Sep 22 23:22:26.118 INFO [2] No action required ReconciliationId(614)
48882 Sep 22 23:22:26.118 DEBG 614 Repair extent 70 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
48883 Sep 22 23:22:26.118 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/046.copy"
48884 Sep 22 23:22:26.120 DEBG Read :1091 deps:[JobId(1090)] res:true
48885 Sep 22 23:22:26.182 INFO accepted connection, remote_addr: 127.0.0.1:36676, local_addr: 127.0.0.1:52864, task: repair
48886 Sep 22 23:22:26.183 TRCE incoming request, uri: /extent/70/files, method: GET, req_id: 67fb5f22-d2c0-4ba6-855a-2158f293324d, remote_addr: 127.0.0.1:36676, local_addr: 127.0.0.1:52864, task: repair
48887 Sep 22 23:22:26.183 INFO request completed, latency_us: 208, response_code: 200, uri: /extent/70/files, method: GET, req_id: 67fb5f22-d2c0-4ba6-855a-2158f293324d, remote_addr: 127.0.0.1:36676, local_addr: 127.0.0.1:52864, task: repair
48888 Sep 22 23:22:26.183 INFO eid:70 Found repair files: ["046", "046.db"]
48889 Sep 22 23:22:26.183 TRCE incoming request, uri: /newextent/70/data, method: GET, req_id: 6118aca9-01fe-47f6-8df9-e8e653b5a34c, remote_addr: 127.0.0.1:36676, local_addr: 127.0.0.1:52864, task: repair
48890 Sep 22 23:22:26.184 INFO request completed, latency_us: 307, response_code: 200, uri: /newextent/70/data, method: GET, req_id: 6118aca9-01fe-47f6-8df9-e8e653b5a34c, remote_addr: 127.0.0.1:36676, local_addr: 127.0.0.1:52864, task: repair
48891 Sep 22 23:22:26.188 TRCE incoming request, uri: /newextent/70/db, method: GET, req_id: 77a0f105-2491-4bc4-96ba-f33bb7212483, remote_addr: 127.0.0.1:36676, local_addr: 127.0.0.1:52864, task: repair
48892 Sep 22 23:22:26.189 INFO request completed, latency_us: 306, response_code: 200, uri: /newextent/70/db, method: GET, req_id: 77a0f105-2491-4bc4-96ba-f33bb7212483, remote_addr: 127.0.0.1:36676, local_addr: 127.0.0.1:52864, task: repair
48893 Sep 22 23:22:26.190 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/046.copy" to "/tmp/downstairs-zrMnlo6G/00/000/046.replace"
48894 Sep 22 23:22:26.190 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48895 Sep 22 23:22:26.191 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/046.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
48896 Sep 22 23:22:26.191 DEBG IO Read 1093 has deps [JobId(1092)]
48897 Sep 22 23:22:26.191 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/046"
48898 Sep 22 23:22:26.191 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/046.db"
48899 Sep 22 23:22:26.191 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48900 Sep 22 23:22:26.191 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/046.replace" to "/tmp/downstairs-zrMnlo6G/00/000/046.completed"
48901 Sep 22 23:22:26.191 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48902 Sep 22 23:22:26.191 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48903 Sep 22 23:22:26.191 DEBG [0] It's time to notify for 614
48904 Sep 22 23:22:26.192 INFO Completion from [0] id:614 status:true
48905 Sep 22 23:22:26.192 INFO [615/752] Repair commands completed
48906 Sep 22 23:22:26.192 INFO Pop front: ReconcileIO { id: ReconciliationId(615), op: ExtentReopen { repair_id: ReconciliationId(615), extent_id: 70 }, state: ClientData([New, New, New]) }
48907 Sep 22 23:22:26.192 INFO Sent repair work, now wait for resp
48908 Sep 22 23:22:26.192 INFO [0] received reconcile message
48909 Sep 22 23:22:26.192 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(615), op: ExtentReopen { repair_id: ReconciliationId(615), extent_id: 70 }, state: ClientData([InProgress, New, New]) }, : downstairs
48910 Sep 22 23:22:26.192 INFO [0] client ExtentReopen { repair_id: ReconciliationId(615), extent_id: 70 }
48911 Sep 22 23:22:26.192 INFO [1] received reconcile message
48912 Sep 22 23:22:26.192 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(615), op: ExtentReopen { repair_id: ReconciliationId(615), extent_id: 70 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48913 Sep 22 23:22:26.192 INFO [1] client ExtentReopen { repair_id: ReconciliationId(615), extent_id: 70 }
48914 Sep 22 23:22:26.192 INFO [2] received reconcile message
48915 Sep 22 23:22:26.192 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(615), op: ExtentReopen { repair_id: ReconciliationId(615), extent_id: 70 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48916 Sep 22 23:22:26.192 INFO [2] client ExtentReopen { repair_id: ReconciliationId(615), extent_id: 70 }
48917 Sep 22 23:22:26.192 DEBG 615 Reopen extent 70
48918 Sep 22 23:22:26.193 DEBG 615 Reopen extent 70
48919 Sep 22 23:22:26.193 DEBG 615 Reopen extent 70
48920 Sep 22 23:22:26.194 DEBG [2] It's time to notify for 615
48921 Sep 22 23:22:26.194 INFO Completion from [2] id:615 status:true
48922 Sep 22 23:22:26.194 INFO [616/752] Repair commands completed
48923 Sep 22 23:22:26.194 INFO Pop front: ReconcileIO { id: ReconciliationId(616), op: ExtentFlush { repair_id: ReconciliationId(616), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48924 Sep 22 23:22:26.194 INFO Sent repair work, now wait for resp
48925 Sep 22 23:22:26.194 INFO [0] received reconcile message
48926 Sep 22 23:22:26.194 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(616), op: ExtentFlush { repair_id: ReconciliationId(616), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48927 Sep 22 23:22:26.194 INFO [0] client ExtentFlush { repair_id: ReconciliationId(616), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48928 Sep 22 23:22:26.194 INFO [1] received reconcile message
48929 Sep 22 23:22:26.194 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(616), op: ExtentFlush { repair_id: ReconciliationId(616), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48930 Sep 22 23:22:26.194 INFO [1] client ExtentFlush { repair_id: ReconciliationId(616), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48931 Sep 22 23:22:26.194 INFO [2] received reconcile message
48932 Sep 22 23:22:26.194 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(616), op: ExtentFlush { repair_id: ReconciliationId(616), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48933 Sep 22 23:22:26.194 INFO [2] client ExtentFlush { repair_id: ReconciliationId(616), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48934 Sep 22 23:22:26.194 DEBG 616 Flush extent 76 with f:2 g:2
48935 Sep 22 23:22:26.194 DEBG Flush just extent 76 with f:2 and g:2
48936 Sep 22 23:22:26.194 DEBG [1] It's time to notify for 616
48937 Sep 22 23:22:26.194 INFO Completion from [1] id:616 status:true
48938 Sep 22 23:22:26.194 INFO [617/752] Repair commands completed
48939 Sep 22 23:22:26.194 INFO Pop front: ReconcileIO { id: ReconciliationId(617), op: ExtentClose { repair_id: ReconciliationId(617), extent_id: 76 }, state: ClientData([New, New, New]) }
48940 Sep 22 23:22:26.194 INFO Sent repair work, now wait for resp
48941 Sep 22 23:22:26.194 INFO [0] received reconcile message
48942 Sep 22 23:22:26.194 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(617), op: ExtentClose { repair_id: ReconciliationId(617), extent_id: 76 }, state: ClientData([InProgress, New, New]) }, : downstairs
48943 Sep 22 23:22:26.194 INFO [0] client ExtentClose { repair_id: ReconciliationId(617), extent_id: 76 }
48944 Sep 22 23:22:26.194 INFO [1] received reconcile message
48945 Sep 22 23:22:26.195 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(617), op: ExtentClose { repair_id: ReconciliationId(617), extent_id: 76 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48946 Sep 22 23:22:26.195 INFO [1] client ExtentClose { repair_id: ReconciliationId(617), extent_id: 76 }
48947 Sep 22 23:22:26.195 INFO [2] received reconcile message
48948 Sep 22 23:22:26.195 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(617), op: ExtentClose { repair_id: ReconciliationId(617), extent_id: 76 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48949 Sep 22 23:22:26.195 INFO [2] client ExtentClose { repair_id: ReconciliationId(617), extent_id: 76 }
48950 Sep 22 23:22:26.195 DEBG 617 Close extent 76
48951 Sep 22 23:22:26.195 DEBG 617 Close extent 76
48952 Sep 22 23:22:26.195 DEBG 617 Close extent 76
48953 Sep 22 23:22:26.196 DEBG [2] It's time to notify for 617
48954 Sep 22 23:22:26.196 INFO Completion from [2] id:617 status:true
48955 Sep 22 23:22:26.196 INFO [618/752] Repair commands completed
48956 Sep 22 23:22:26.196 INFO Pop front: ReconcileIO { id: ReconciliationId(618), op: ExtentRepair { repair_id: ReconciliationId(618), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48957 Sep 22 23:22:26.196 INFO Sent repair work, now wait for resp
48958 Sep 22 23:22:26.196 INFO [0] received reconcile message
48959 Sep 22 23:22:26.196 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(618), op: ExtentRepair { repair_id: ReconciliationId(618), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48960 Sep 22 23:22:26.196 INFO [0] client ExtentRepair { repair_id: ReconciliationId(618), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48961 Sep 22 23:22:26.196 INFO [0] Sending repair request ReconciliationId(618)
48962 Sep 22 23:22:26.196 INFO [1] received reconcile message
48963 Sep 22 23:22:26.196 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(618), op: ExtentRepair { repair_id: ReconciliationId(618), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48964 Sep 22 23:22:26.196 INFO [1] client ExtentRepair { repair_id: ReconciliationId(618), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48965 Sep 22 23:22:26.196 INFO [1] No action required ReconciliationId(618)
48966 Sep 22 23:22:26.196 INFO [2] received reconcile message
48967 Sep 22 23:22:26.196 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(618), op: ExtentRepair { repair_id: ReconciliationId(618), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48968 Sep 22 23:22:26.196 INFO [2] client ExtentRepair { repair_id: ReconciliationId(618), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
48969 Sep 22 23:22:26.196 INFO [2] No action required ReconciliationId(618)
48970 Sep 22 23:22:26.196 DEBG 618 Repair extent 76 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
48971 Sep 22 23:22:26.196 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/04C.copy"
48972 Sep 22 23:22:26.262 INFO accepted connection, remote_addr: 127.0.0.1:40842, local_addr: 127.0.0.1:52864, task: repair
48973 Sep 22 23:22:26.262 TRCE incoming request, uri: /extent/76/files, method: GET, req_id: d18f3259-1576-4bf3-9c8e-af09bb3a911d, remote_addr: 127.0.0.1:40842, local_addr: 127.0.0.1:52864, task: repair
48974 Sep 22 23:22:26.262 INFO request completed, latency_us: 200, response_code: 200, uri: /extent/76/files, method: GET, req_id: d18f3259-1576-4bf3-9c8e-af09bb3a911d, remote_addr: 127.0.0.1:40842, local_addr: 127.0.0.1:52864, task: repair
48975 Sep 22 23:22:26.263 INFO eid:76 Found repair files: ["04C", "04C.db"]
48976 Sep 22 23:22:26.263 TRCE incoming request, uri: /newextent/76/data, method: GET, req_id: f99fadc7-eff6-4028-983c-e2503292abb5, remote_addr: 127.0.0.1:40842, local_addr: 127.0.0.1:52864, task: repair
48977 Sep 22 23:22:26.263 INFO request completed, latency_us: 321, response_code: 200, uri: /newextent/76/data, method: GET, req_id: f99fadc7-eff6-4028-983c-e2503292abb5, remote_addr: 127.0.0.1:40842, local_addr: 127.0.0.1:52864, task: repair
48978 Sep 22 23:22:26.268 TRCE incoming request, uri: /newextent/76/db, method: GET, req_id: 6a537014-bbea-46e8-b972-43c3a9367c42, remote_addr: 127.0.0.1:40842, local_addr: 127.0.0.1:52864, task: repair
48979 Sep 22 23:22:26.268 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/76/db, method: GET, req_id: 6a537014-bbea-46e8-b972-43c3a9367c42, remote_addr: 127.0.0.1:40842, local_addr: 127.0.0.1:52864, task: repair
48980 Sep 22 23:22:26.269 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/04C.copy" to "/tmp/downstairs-zrMnlo6G/00/000/04C.replace"
48981 Sep 22 23:22:26.269 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48982 Sep 22 23:22:26.270 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/04C.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
48983 Sep 22 23:22:26.271 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/04C"
48984 Sep 22 23:22:26.271 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/04C.db"
48985 Sep 22 23:22:26.271 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48986 Sep 22 23:22:26.271 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/04C.replace" to "/tmp/downstairs-zrMnlo6G/00/000/04C.completed"
48987 Sep 22 23:22:26.271 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48988 Sep 22 23:22:26.271 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
48989 Sep 22 23:22:26.271 DEBG [0] It's time to notify for 618
48990 Sep 22 23:22:26.271 INFO Completion from [0] id:618 status:true
48991 Sep 22 23:22:26.271 INFO [619/752] Repair commands completed
48992 Sep 22 23:22:26.271 INFO Pop front: ReconcileIO { id: ReconciliationId(619), op: ExtentReopen { repair_id: ReconciliationId(619), extent_id: 76 }, state: ClientData([New, New, New]) }
48993 Sep 22 23:22:26.271 INFO Sent repair work, now wait for resp
48994 Sep 22 23:22:26.271 INFO [0] received reconcile message
48995 Sep 22 23:22:26.271 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(619), op: ExtentReopen { repair_id: ReconciliationId(619), extent_id: 76 }, state: ClientData([InProgress, New, New]) }, : downstairs
48996 Sep 22 23:22:26.271 INFO [0] client ExtentReopen { repair_id: ReconciliationId(619), extent_id: 76 }
48997 Sep 22 23:22:26.271 INFO [1] received reconcile message
48998 Sep 22 23:22:26.271 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(619), op: ExtentReopen { repair_id: ReconciliationId(619), extent_id: 76 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48999 Sep 22 23:22:26.271 INFO [1] client ExtentReopen { repair_id: ReconciliationId(619), extent_id: 76 }
49000 Sep 22 23:22:26.271 INFO [2] received reconcile message
49001 Sep 22 23:22:26.271 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(619), op: ExtentReopen { repair_id: ReconciliationId(619), extent_id: 76 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49002 Sep 22 23:22:26.271 INFO [2] client ExtentReopen { repair_id: ReconciliationId(619), extent_id: 76 }
49003 Sep 22 23:22:26.272 DEBG 619 Reopen extent 76
49004 Sep 22 23:22:26.272 DEBG 619 Reopen extent 76
49005 Sep 22 23:22:26.273 DEBG 619 Reopen extent 76
49006 Sep 22 23:22:26.273 DEBG [2] It's time to notify for 619
49007 Sep 22 23:22:26.273 INFO Completion from [2] id:619 status:true
49008 Sep 22 23:22:26.273 INFO [620/752] Repair commands completed
49009 Sep 22 23:22:26.273 INFO Pop front: ReconcileIO { id: ReconciliationId(620), op: ExtentFlush { repair_id: ReconciliationId(620), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49010 Sep 22 23:22:26.273 INFO Sent repair work, now wait for resp
49011 Sep 22 23:22:26.273 INFO [0] received reconcile message
49012 Sep 22 23:22:26.273 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(620), op: ExtentFlush { repair_id: ReconciliationId(620), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49013 Sep 22 23:22:26.273 INFO [0] client ExtentFlush { repair_id: ReconciliationId(620), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49014 Sep 22 23:22:26.273 INFO [1] received reconcile message
49015 Sep 22 23:22:26.273 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(620), op: ExtentFlush { repair_id: ReconciliationId(620), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49016 Sep 22 23:22:26.273 INFO [1] client ExtentFlush { repair_id: ReconciliationId(620), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49017 Sep 22 23:22:26.273 INFO [2] received reconcile message
49018 Sep 22 23:22:26.273 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(620), op: ExtentFlush { repair_id: ReconciliationId(620), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49019 Sep 22 23:22:26.274 INFO [2] client ExtentFlush { repair_id: ReconciliationId(620), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49020 Sep 22 23:22:26.274 DEBG 620 Flush extent 113 with f:2 g:2
49021 Sep 22 23:22:26.274 DEBG Flush just extent 113 with f:2 and g:2
49022 Sep 22 23:22:26.274 DEBG [1] It's time to notify for 620
49023 Sep 22 23:22:26.274 INFO Completion from [1] id:620 status:true
49024 Sep 22 23:22:26.274 INFO [621/752] Repair commands completed
49025 Sep 22 23:22:26.274 INFO Pop front: ReconcileIO { id: ReconciliationId(621), op: ExtentClose { repair_id: ReconciliationId(621), extent_id: 113 }, state: ClientData([New, New, New]) }
49026 Sep 22 23:22:26.274 INFO Sent repair work, now wait for resp
49027 Sep 22 23:22:26.274 INFO [0] received reconcile message
49028 Sep 22 23:22:26.274 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(621), op: ExtentClose { repair_id: ReconciliationId(621), extent_id: 113 }, state: ClientData([InProgress, New, New]) }, : downstairs
49029 Sep 22 23:22:26.274 INFO [0] client ExtentClose { repair_id: ReconciliationId(621), extent_id: 113 }
49030 Sep 22 23:22:26.274 INFO [1] received reconcile message
49031 Sep 22 23:22:26.274 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(621), op: ExtentClose { repair_id: ReconciliationId(621), extent_id: 113 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49032 Sep 22 23:22:26.274 INFO [1] client ExtentClose { repair_id: ReconciliationId(621), extent_id: 113 }
49033 Sep 22 23:22:26.274 INFO [2] received reconcile message
49034 Sep 22 23:22:26.274 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(621), op: ExtentClose { repair_id: ReconciliationId(621), extent_id: 113 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49035 Sep 22 23:22:26.274 INFO [2] client ExtentClose { repair_id: ReconciliationId(621), extent_id: 113 }
49036 Sep 22 23:22:26.274 DEBG 621 Close extent 113
49037 Sep 22 23:22:26.274 DEBG 621 Close extent 113
49038 Sep 22 23:22:26.275 DEBG 621 Close extent 113
49039 Sep 22 23:22:26.275 DEBG [2] It's time to notify for 621
49040 Sep 22 23:22:26.275 INFO Completion from [2] id:621 status:true
49041 Sep 22 23:22:26.275 INFO [622/752] Repair commands completed
49042 Sep 22 23:22:26.275 INFO Pop front: ReconcileIO { id: ReconciliationId(622), op: ExtentRepair { repair_id: ReconciliationId(622), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49043 Sep 22 23:22:26.275 INFO Sent repair work, now wait for resp
49044 Sep 22 23:22:26.275 INFO [0] received reconcile message
49045 Sep 22 23:22:26.275 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(622), op: ExtentRepair { repair_id: ReconciliationId(622), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49046 Sep 22 23:22:26.275 INFO [0] client ExtentRepair { repair_id: ReconciliationId(622), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49047 Sep 22 23:22:26.275 INFO [0] Sending repair request ReconciliationId(622)
49048 Sep 22 23:22:26.275 INFO [1] received reconcile message
49049 Sep 22 23:22:26.275 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(622), op: ExtentRepair { repair_id: ReconciliationId(622), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49050 Sep 22 23:22:26.275 INFO [1] client ExtentRepair { repair_id: ReconciliationId(622), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49051 Sep 22 23:22:26.275 INFO [1] No action required ReconciliationId(622)
49052 Sep 22 23:22:26.276 INFO [2] received reconcile message
49053 Sep 22 23:22:26.276 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(622), op: ExtentRepair { repair_id: ReconciliationId(622), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49054 Sep 22 23:22:26.276 INFO [2] client ExtentRepair { repair_id: ReconciliationId(622), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49055 Sep 22 23:22:26.276 INFO [2] No action required ReconciliationId(622)
49056 Sep 22 23:22:26.276 DEBG 622 Repair extent 113 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
49057 Sep 22 23:22:26.276 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/071.copy"
49058 Sep 22 23:22:26.340 INFO accepted connection, remote_addr: 127.0.0.1:49516, local_addr: 127.0.0.1:52864, task: repair
49059 Sep 22 23:22:26.341 TRCE incoming request, uri: /extent/113/files, method: GET, req_id: cdbef51d-a593-4262-8312-e01cab453433, remote_addr: 127.0.0.1:49516, local_addr: 127.0.0.1:52864, task: repair
49060 Sep 22 23:22:26.341 INFO request completed, latency_us: 199, response_code: 200, uri: /extent/113/files, method: GET, req_id: cdbef51d-a593-4262-8312-e01cab453433, remote_addr: 127.0.0.1:49516, local_addr: 127.0.0.1:52864, task: repair
49061 Sep 22 23:22:26.341 INFO eid:113 Found repair files: ["071", "071.db"]
49062 Sep 22 23:22:26.341 TRCE incoming request, uri: /newextent/113/data, method: GET, req_id: aeb9ca80-ef5f-45e8-baf6-77a32111f503, remote_addr: 127.0.0.1:49516, local_addr: 127.0.0.1:52864, task: repair
49063 Sep 22 23:22:26.342 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/113/data, method: GET, req_id: aeb9ca80-ef5f-45e8-baf6-77a32111f503, remote_addr: 127.0.0.1:49516, local_addr: 127.0.0.1:52864, task: repair
49064 Sep 22 23:22:26.346 TRCE incoming request, uri: /newextent/113/db, method: GET, req_id: fe517cf5-bd5c-4a70-ad5e-c7994609d22b, remote_addr: 127.0.0.1:49516, local_addr: 127.0.0.1:52864, task: repair
49065 Sep 22 23:22:26.347 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/113/db, method: GET, req_id: fe517cf5-bd5c-4a70-ad5e-c7994609d22b, remote_addr: 127.0.0.1:49516, local_addr: 127.0.0.1:52864, task: repair
49066 Sep 22 23:22:26.348 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/071.copy" to "/tmp/downstairs-zrMnlo6G/00/000/071.replace"
49067 Sep 22 23:22:26.348 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49068 Sep 22 23:22:26.349 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/071.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
49069 Sep 22 23:22:26.349 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/071"
49070 Sep 22 23:22:26.349 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/071.db"
49071 Sep 22 23:22:26.349 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49072 Sep 22 23:22:26.349 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/071.replace" to "/tmp/downstairs-zrMnlo6G/00/000/071.completed"
49073 Sep 22 23:22:26.349 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49074 Sep 22 23:22:26.349 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49075 Sep 22 23:22:26.349 DEBG [0] It's time to notify for 622
49076 Sep 22 23:22:26.349 INFO Completion from [0] id:622 status:true
49077 Sep 22 23:22:26.349 INFO [623/752] Repair commands completed
49078 Sep 22 23:22:26.350 INFO Pop front: ReconcileIO { id: ReconciliationId(623), op: ExtentReopen { repair_id: ReconciliationId(623), extent_id: 113 }, state: ClientData([New, New, New]) }
49079 Sep 22 23:22:26.350 INFO Sent repair work, now wait for resp
49080 Sep 22 23:22:26.350 INFO [0] received reconcile message
49081 Sep 22 23:22:26.350 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(623), op: ExtentReopen { repair_id: ReconciliationId(623), extent_id: 113 }, state: ClientData([InProgress, New, New]) }, : downstairs
49082 Sep 22 23:22:26.350 INFO [0] client ExtentReopen { repair_id: ReconciliationId(623), extent_id: 113 }
49083 Sep 22 23:22:26.350 INFO [1] received reconcile message
49084 Sep 22 23:22:26.350 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(623), op: ExtentReopen { repair_id: ReconciliationId(623), extent_id: 113 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49085 Sep 22 23:22:26.350 INFO [1] client ExtentReopen { repair_id: ReconciliationId(623), extent_id: 113 }
49086 Sep 22 23:22:26.350 INFO [2] received reconcile message
49087 Sep 22 23:22:26.350 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(623), op: ExtentReopen { repair_id: ReconciliationId(623), extent_id: 113 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49088 Sep 22 23:22:26.350 INFO [2] client ExtentReopen { repair_id: ReconciliationId(623), extent_id: 113 }
49089 Sep 22 23:22:26.350 DEBG 623 Reopen extent 113
49090 Sep 22 23:22:26.350 DEBG 623 Reopen extent 113
49091 Sep 22 23:22:26.351 DEBG 623 Reopen extent 113
49092 Sep 22 23:22:26.352 DEBG [2] It's time to notify for 623
49093 Sep 22 23:22:26.352 INFO Completion from [2] id:623 status:true
49094 Sep 22 23:22:26.352 INFO [624/752] Repair commands completed
49095 Sep 22 23:22:26.352 INFO Pop front: ReconcileIO { id: ReconciliationId(624), op: ExtentFlush { repair_id: ReconciliationId(624), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49096 Sep 22 23:22:26.352 INFO Sent repair work, now wait for resp
49097 Sep 22 23:22:26.352 INFO [0] received reconcile message
49098 Sep 22 23:22:26.352 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(624), op: ExtentFlush { repair_id: ReconciliationId(624), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49099 Sep 22 23:22:26.352 INFO [0] client ExtentFlush { repair_id: ReconciliationId(624), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49100 Sep 22 23:22:26.352 INFO [1] received reconcile message
49101 Sep 22 23:22:26.352 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(624), op: ExtentFlush { repair_id: ReconciliationId(624), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49102 Sep 22 23:22:26.352 INFO [1] client ExtentFlush { repair_id: ReconciliationId(624), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49103 Sep 22 23:22:26.352 INFO [2] received reconcile message
49104 Sep 22 23:22:26.352 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(624), op: ExtentFlush { repair_id: ReconciliationId(624), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49105 Sep 22 23:22:26.352 INFO [2] client ExtentFlush { repair_id: ReconciliationId(624), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49106 Sep 22 23:22:26.352 DEBG 624 Flush extent 108 with f:2 g:2
49107 Sep 22 23:22:26.352 DEBG Flush just extent 108 with f:2 and g:2
49108 Sep 22 23:22:26.352 DEBG [1] It's time to notify for 624
49109 Sep 22 23:22:26.352 INFO Completion from [1] id:624 status:true
49110 Sep 22 23:22:26.352 INFO [625/752] Repair commands completed
49111 Sep 22 23:22:26.352 INFO Pop front: ReconcileIO { id: ReconciliationId(625), op: ExtentClose { repair_id: ReconciliationId(625), extent_id: 108 }, state: ClientData([New, New, New]) }
49112 Sep 22 23:22:26.352 INFO Sent repair work, now wait for resp
49113 Sep 22 23:22:26.352 INFO [0] received reconcile message
49114 Sep 22 23:22:26.352 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(625), op: ExtentClose { repair_id: ReconciliationId(625), extent_id: 108 }, state: ClientData([InProgress, New, New]) }, : downstairs
49115 Sep 22 23:22:26.352 INFO [0] client ExtentClose { repair_id: ReconciliationId(625), extent_id: 108 }
49116 Sep 22 23:22:26.352 INFO [1] received reconcile message
49117 Sep 22 23:22:26.352 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(625), op: ExtentClose { repair_id: ReconciliationId(625), extent_id: 108 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49118 Sep 22 23:22:26.352 INFO [1] client ExtentClose { repair_id: ReconciliationId(625), extent_id: 108 }
49119 Sep 22 23:22:26.352 INFO [2] received reconcile message
49120 Sep 22 23:22:26.352 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(625), op: ExtentClose { repair_id: ReconciliationId(625), extent_id: 108 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49121 Sep 22 23:22:26.353 INFO [2] client ExtentClose { repair_id: ReconciliationId(625), extent_id: 108 }
49122 Sep 22 23:22:26.353 DEBG 625 Close extent 108
49123 Sep 22 23:22:26.353 DEBG 625 Close extent 108
49124 Sep 22 23:22:26.353 DEBG 625 Close extent 108
49125 Sep 22 23:22:26.354 DEBG [2] It's time to notify for 625
49126 Sep 22 23:22:26.354 INFO Completion from [2] id:625 status:true
49127 Sep 22 23:22:26.354 INFO [626/752] Repair commands completed
49128 Sep 22 23:22:26.354 INFO Pop front: ReconcileIO { id: ReconciliationId(626), op: ExtentRepair { repair_id: ReconciliationId(626), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49129 Sep 22 23:22:26.354 INFO Sent repair work, now wait for resp
49130 Sep 22 23:22:26.354 INFO [0] received reconcile message
49131 Sep 22 23:22:26.354 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(626), op: ExtentRepair { repair_id: ReconciliationId(626), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49132 Sep 22 23:22:26.354 INFO [0] client ExtentRepair { repair_id: ReconciliationId(626), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49133 Sep 22 23:22:26.354 INFO [0] Sending repair request ReconciliationId(626)
49134 Sep 22 23:22:26.354 INFO [1] received reconcile message
49135 Sep 22 23:22:26.354 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(626), op: ExtentRepair { repair_id: ReconciliationId(626), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49136 Sep 22 23:22:26.354 INFO [1] client ExtentRepair { repair_id: ReconciliationId(626), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49137 Sep 22 23:22:26.354 INFO [1] No action required ReconciliationId(626)
49138 Sep 22 23:22:26.354 INFO [2] received reconcile message
49139 Sep 22 23:22:26.354 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(626), op: ExtentRepair { repair_id: ReconciliationId(626), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49140 Sep 22 23:22:26.354 INFO [2] client ExtentRepair { repair_id: ReconciliationId(626), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49141 Sep 22 23:22:26.354 INFO [2] No action required ReconciliationId(626)
49142 Sep 22 23:22:26.354 DEBG 626 Repair extent 108 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
49143 Sep 22 23:22:26.354 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/06C.copy"
49144 Sep 22 23:22:26.418 INFO accepted connection, remote_addr: 127.0.0.1:40951, local_addr: 127.0.0.1:52864, task: repair
49145 Sep 22 23:22:26.418 TRCE incoming request, uri: /extent/108/files, method: GET, req_id: b48acfff-076f-44d5-af5c-31280e28074a, remote_addr: 127.0.0.1:40951, local_addr: 127.0.0.1:52864, task: repair
49146 Sep 22 23:22:26.418 INFO request completed, latency_us: 199, response_code: 200, uri: /extent/108/files, method: GET, req_id: b48acfff-076f-44d5-af5c-31280e28074a, remote_addr: 127.0.0.1:40951, local_addr: 127.0.0.1:52864, task: repair
49147 Sep 22 23:22:26.419 INFO eid:108 Found repair files: ["06C", "06C.db"]
49148 Sep 22 23:22:26.419 TRCE incoming request, uri: /newextent/108/data, method: GET, req_id: eea66984-3fd3-454c-acb9-aab21b779a4f, remote_addr: 127.0.0.1:40951, local_addr: 127.0.0.1:52864, task: repair
49149 Sep 22 23:22:26.419 INFO request completed, latency_us: 320, response_code: 200, uri: /newextent/108/data, method: GET, req_id: eea66984-3fd3-454c-acb9-aab21b779a4f, remote_addr: 127.0.0.1:40951, local_addr: 127.0.0.1:52864, task: repair
49150 Sep 22 23:22:26.424 TRCE incoming request, uri: /newextent/108/db, method: GET, req_id: f55f3b33-285f-4e16-bc8b-1ff8f2684ed4, remote_addr: 127.0.0.1:40951, local_addr: 127.0.0.1:52864, task: repair
49151 Sep 22 23:22:26.424 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/108/db, method: GET, req_id: f55f3b33-285f-4e16-bc8b-1ff8f2684ed4, remote_addr: 127.0.0.1:40951, local_addr: 127.0.0.1:52864, task: repair
49152 Sep 22 23:22:26.425 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/06C.copy" to "/tmp/downstairs-zrMnlo6G/00/000/06C.replace"
49153 Sep 22 23:22:26.425 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49154 Sep 22 23:22:26.426 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/06C.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
49155 Sep 22 23:22:26.426 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/06C"
49156 Sep 22 23:22:26.427 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/06C.db"
49157 Sep 22 23:22:26.427 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49158 Sep 22 23:22:26.427 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/06C.replace" to "/tmp/downstairs-zrMnlo6G/00/000/06C.completed"
49159 Sep 22 23:22:26.427 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49160 Sep 22 23:22:26.427 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49161 Sep 22 23:22:26.427 DEBG [0] It's time to notify for 626
49162 Sep 22 23:22:26.427 INFO Completion from [0] id:626 status:true
49163 Sep 22 23:22:26.427 INFO [627/752] Repair commands completed
49164 Sep 22 23:22:26.427 INFO Pop front: ReconcileIO { id: ReconciliationId(627), op: ExtentReopen { repair_id: ReconciliationId(627), extent_id: 108 }, state: ClientData([New, New, New]) }
49165 Sep 22 23:22:26.427 INFO Sent repair work, now wait for resp
49166 Sep 22 23:22:26.427 INFO [0] received reconcile message
49167 Sep 22 23:22:26.427 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(627), op: ExtentReopen { repair_id: ReconciliationId(627), extent_id: 108 }, state: ClientData([InProgress, New, New]) }, : downstairs
49168 Sep 22 23:22:26.427 INFO [0] client ExtentReopen { repair_id: ReconciliationId(627), extent_id: 108 }
49169 Sep 22 23:22:26.427 INFO [1] received reconcile message
49170 Sep 22 23:22:26.427 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(627), op: ExtentReopen { repair_id: ReconciliationId(627), extent_id: 108 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49171 Sep 22 23:22:26.427 INFO [1] client ExtentReopen { repair_id: ReconciliationId(627), extent_id: 108 }
49172 Sep 22 23:22:26.427 INFO [2] received reconcile message
49173 Sep 22 23:22:26.427 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(627), op: ExtentReopen { repair_id: ReconciliationId(627), extent_id: 108 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49174 Sep 22 23:22:26.427 INFO [2] client ExtentReopen { repair_id: ReconciliationId(627), extent_id: 108 }
49175 Sep 22 23:22:26.427 DEBG 627 Reopen extent 108
49176 Sep 22 23:22:26.428 DEBG 627 Reopen extent 108
49177 Sep 22 23:22:26.428 DEBG 627 Reopen extent 108
49178 Sep 22 23:22:26.429 DEBG [2] It's time to notify for 627
49179 Sep 22 23:22:26.429 INFO Completion from [2] id:627 status:true
49180 Sep 22 23:22:26.429 INFO [628/752] Repair commands completed
49181 Sep 22 23:22:26.429 INFO Pop front: ReconcileIO { id: ReconciliationId(628), op: ExtentFlush { repair_id: ReconciliationId(628), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49182 Sep 22 23:22:26.429 INFO Sent repair work, now wait for resp
49183 Sep 22 23:22:26.429 INFO [0] received reconcile message
49184 Sep 22 23:22:26.429 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(628), op: ExtentFlush { repair_id: ReconciliationId(628), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49185 Sep 22 23:22:26.429 INFO [0] client ExtentFlush { repair_id: ReconciliationId(628), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49186 Sep 22 23:22:26.429 INFO [1] received reconcile message
49187 Sep 22 23:22:26.429 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(628), op: ExtentFlush { repair_id: ReconciliationId(628), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49188 Sep 22 23:22:26.429 INFO [1] client ExtentFlush { repair_id: ReconciliationId(628), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49189 Sep 22 23:22:26.429 INFO [2] received reconcile message
49190 Sep 22 23:22:26.429 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(628), op: ExtentFlush { repair_id: ReconciliationId(628), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49191 Sep 22 23:22:26.429 INFO [2] client ExtentFlush { repair_id: ReconciliationId(628), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49192 Sep 22 23:22:26.429 DEBG 628 Flush extent 42 with f:2 g:2
49193 Sep 22 23:22:26.430 DEBG Flush just extent 42 with f:2 and g:2
49194 Sep 22 23:22:26.430 DEBG [1] It's time to notify for 628
49195 Sep 22 23:22:26.430 INFO Completion from [1] id:628 status:true
49196 Sep 22 23:22:26.430 INFO [629/752] Repair commands completed
49197 Sep 22 23:22:26.430 INFO Pop front: ReconcileIO { id: ReconciliationId(629), op: ExtentClose { repair_id: ReconciliationId(629), extent_id: 42 }, state: ClientData([New, New, New]) }
49198 Sep 22 23:22:26.430 INFO Sent repair work, now wait for resp
49199 Sep 22 23:22:26.430 INFO [0] received reconcile message
49200 Sep 22 23:22:26.430 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(629), op: ExtentClose { repair_id: ReconciliationId(629), extent_id: 42 }, state: ClientData([InProgress, New, New]) }, : downstairs
49201 Sep 22 23:22:26.430 INFO [0] client ExtentClose { repair_id: ReconciliationId(629), extent_id: 42 }
49202 Sep 22 23:22:26.430 INFO [1] received reconcile message
49203 Sep 22 23:22:26.430 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(629), op: ExtentClose { repair_id: ReconciliationId(629), extent_id: 42 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49204 Sep 22 23:22:26.430 INFO [1] client ExtentClose { repair_id: ReconciliationId(629), extent_id: 42 }
49205 Sep 22 23:22:26.430 INFO [2] received reconcile message
49206 Sep 22 23:22:26.430 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(629), op: ExtentClose { repair_id: ReconciliationId(629), extent_id: 42 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49207 Sep 22 23:22:26.430 INFO [2] client ExtentClose { repair_id: ReconciliationId(629), extent_id: 42 }
49208 Sep 22 23:22:26.430 DEBG 629 Close extent 42
49209 Sep 22 23:22:26.430 DEBG 629 Close extent 42
49210 Sep 22 23:22:26.431 DEBG 629 Close extent 42
49211 Sep 22 23:22:26.431 DEBG [2] It's time to notify for 629
49212 Sep 22 23:22:26.431 INFO Completion from [2] id:629 status:true
49213 Sep 22 23:22:26.431 INFO [630/752] Repair commands completed
49214 Sep 22 23:22:26.431 INFO Pop front: ReconcileIO { id: ReconciliationId(630), op: ExtentRepair { repair_id: ReconciliationId(630), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49215 Sep 22 23:22:26.431 INFO Sent repair work, now wait for resp
49216 Sep 22 23:22:26.431 INFO [0] received reconcile message
49217 Sep 22 23:22:26.431 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(630), op: ExtentRepair { repair_id: ReconciliationId(630), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49218 Sep 22 23:22:26.431 INFO [0] client ExtentRepair { repair_id: ReconciliationId(630), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49219 Sep 22 23:22:26.431 INFO [0] Sending repair request ReconciliationId(630)
49220 Sep 22 23:22:26.431 INFO [1] received reconcile message
49221 Sep 22 23:22:26.431 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(630), op: ExtentRepair { repair_id: ReconciliationId(630), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49222 Sep 22 23:22:26.431 INFO [1] client ExtentRepair { repair_id: ReconciliationId(630), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49223 Sep 22 23:22:26.431 INFO [1] No action required ReconciliationId(630)
49224 Sep 22 23:22:26.431 INFO [2] received reconcile message
49225 Sep 22 23:22:26.431 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(630), op: ExtentRepair { repair_id: ReconciliationId(630), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49226 Sep 22 23:22:26.431 INFO [2] client ExtentRepair { repair_id: ReconciliationId(630), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49227 Sep 22 23:22:26.431 INFO [2] No action required ReconciliationId(630)
49228 Sep 22 23:22:26.432 DEBG 630 Repair extent 42 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
49229 Sep 22 23:22:26.432 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/02A.copy"
49230 Sep 22 23:22:26.495 INFO accepted connection, remote_addr: 127.0.0.1:33983, local_addr: 127.0.0.1:52864, task: repair
49231 Sep 22 23:22:26.495 TRCE incoming request, uri: /extent/42/files, method: GET, req_id: 315a7e9e-66e2-4a6e-ba44-4d7132c03715, remote_addr: 127.0.0.1:33983, local_addr: 127.0.0.1:52864, task: repair
49232 Sep 22 23:22:26.495 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/42/files, method: GET, req_id: 315a7e9e-66e2-4a6e-ba44-4d7132c03715, remote_addr: 127.0.0.1:33983, local_addr: 127.0.0.1:52864, task: repair
49233 Sep 22 23:22:26.496 INFO eid:42 Found repair files: ["02A", "02A.db"]
49234 Sep 22 23:22:26.496 TRCE incoming request, uri: /newextent/42/data, method: GET, req_id: 7669d07d-9705-4179-8c8e-dd39ba4c1a2e, remote_addr: 127.0.0.1:33983, local_addr: 127.0.0.1:52864, task: repair
49235 Sep 22 23:22:26.496 INFO request completed, latency_us: 329, response_code: 200, uri: /newextent/42/data, method: GET, req_id: 7669d07d-9705-4179-8c8e-dd39ba4c1a2e, remote_addr: 127.0.0.1:33983, local_addr: 127.0.0.1:52864, task: repair
49236 Sep 22 23:22:26.501 TRCE incoming request, uri: /newextent/42/db, method: GET, req_id: db000148-a40b-45e2-bb74-e067b1224381, remote_addr: 127.0.0.1:33983, local_addr: 127.0.0.1:52864, task: repair
49237 Sep 22 23:22:26.501 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/42/db, method: GET, req_id: db000148-a40b-45e2-bb74-e067b1224381, remote_addr: 127.0.0.1:33983, local_addr: 127.0.0.1:52864, task: repair
49238 Sep 22 23:22:26.502 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/02A.copy" to "/tmp/downstairs-zrMnlo6G/00/000/02A.replace"
49239 Sep 22 23:22:26.502 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49240 Sep 22 23:22:26.503 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/02A.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
49241 Sep 22 23:22:26.503 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/02A"
49242 Sep 22 23:22:26.503 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/02A.db"
49243 Sep 22 23:22:26.503 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49244 Sep 22 23:22:26.503 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/02A.replace" to "/tmp/downstairs-zrMnlo6G/00/000/02A.completed"
49245 Sep 22 23:22:26.503 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49246 Sep 22 23:22:26.504 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49247 Sep 22 23:22:26.504 DEBG [0] It's time to notify for 630
49248 Sep 22 23:22:26.504 INFO Completion from [0] id:630 status:true
49249 Sep 22 23:22:26.504 INFO [631/752] Repair commands completed
49250 Sep 22 23:22:26.504 INFO Pop front: ReconcileIO { id: ReconciliationId(631), op: ExtentReopen { repair_id: ReconciliationId(631), extent_id: 42 }, state: ClientData([New, New, New]) }
49251 Sep 22 23:22:26.504 INFO Sent repair work, now wait for resp
49252 Sep 22 23:22:26.504 INFO [0] received reconcile message
49253 Sep 22 23:22:26.504 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(631), op: ExtentReopen { repair_id: ReconciliationId(631), extent_id: 42 }, state: ClientData([InProgress, New, New]) }, : downstairs
49254 Sep 22 23:22:26.504 INFO [0] client ExtentReopen { repair_id: ReconciliationId(631), extent_id: 42 }
49255 Sep 22 23:22:26.504 INFO [1] received reconcile message
49256 Sep 22 23:22:26.504 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(631), op: ExtentReopen { repair_id: ReconciliationId(631), extent_id: 42 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49257 Sep 22 23:22:26.504 INFO [1] client ExtentReopen { repair_id: ReconciliationId(631), extent_id: 42 }
49258 Sep 22 23:22:26.504 INFO [2] received reconcile message
49259 Sep 22 23:22:26.504 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(631), op: ExtentReopen { repair_id: ReconciliationId(631), extent_id: 42 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49260 Sep 22 23:22:26.504 INFO [2] client ExtentReopen { repair_id: ReconciliationId(631), extent_id: 42 }
49261 Sep 22 23:22:26.504 DEBG 631 Reopen extent 42
49262 Sep 22 23:22:26.505 DEBG 631 Reopen extent 42
49263 Sep 22 23:22:26.505 DEBG 631 Reopen extent 42
49264 Sep 22 23:22:26.506 DEBG [2] It's time to notify for 631
49265 Sep 22 23:22:26.506 INFO Completion from [2] id:631 status:true
49266 Sep 22 23:22:26.506 INFO [632/752] Repair commands completed
49267 Sep 22 23:22:26.506 INFO Pop front: ReconcileIO { id: ReconciliationId(632), op: ExtentFlush { repair_id: ReconciliationId(632), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49268 Sep 22 23:22:26.506 INFO Sent repair work, now wait for resp
49269 Sep 22 23:22:26.506 INFO [0] received reconcile message
49270 Sep 22 23:22:26.506 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(632), op: ExtentFlush { repair_id: ReconciliationId(632), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49271 Sep 22 23:22:26.506 INFO [0] client ExtentFlush { repair_id: ReconciliationId(632), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49272 Sep 22 23:22:26.506 INFO [1] received reconcile message
49273 Sep 22 23:22:26.506 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(632), op: ExtentFlush { repair_id: ReconciliationId(632), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49274 Sep 22 23:22:26.506 INFO [1] client ExtentFlush { repair_id: ReconciliationId(632), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49275 Sep 22 23:22:26.506 INFO [2] received reconcile message
49276 Sep 22 23:22:26.506 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(632), op: ExtentFlush { repair_id: ReconciliationId(632), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49277 Sep 22 23:22:26.506 INFO [2] client ExtentFlush { repair_id: ReconciliationId(632), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49278 Sep 22 23:22:26.506 DEBG 632 Flush extent 140 with f:2 g:2
49279 Sep 22 23:22:26.506 DEBG Flush just extent 140 with f:2 and g:2
49280 Sep 22 23:22:26.507 DEBG [1] It's time to notify for 632
49281 Sep 22 23:22:26.507 INFO Completion from [1] id:632 status:true
49282 Sep 22 23:22:26.507 INFO [633/752] Repair commands completed
49283 Sep 22 23:22:26.507 INFO Pop front: ReconcileIO { id: ReconciliationId(633), op: ExtentClose { repair_id: ReconciliationId(633), extent_id: 140 }, state: ClientData([New, New, New]) }
49284 Sep 22 23:22:26.507 INFO Sent repair work, now wait for resp
49285 Sep 22 23:22:26.507 INFO [0] received reconcile message
49286 Sep 22 23:22:26.507 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(633), op: ExtentClose { repair_id: ReconciliationId(633), extent_id: 140 }, state: ClientData([InProgress, New, New]) }, : downstairs
49287 Sep 22 23:22:26.507 INFO [0] client ExtentClose { repair_id: ReconciliationId(633), extent_id: 140 }
49288 Sep 22 23:22:26.507 INFO [1] received reconcile message
49289 Sep 22 23:22:26.507 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(633), op: ExtentClose { repair_id: ReconciliationId(633), extent_id: 140 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49290 Sep 22 23:22:26.507 INFO [1] client ExtentClose { repair_id: ReconciliationId(633), extent_id: 140 }
49291 Sep 22 23:22:26.507 INFO [2] received reconcile message
49292 Sep 22 23:22:26.507 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(633), op: ExtentClose { repair_id: ReconciliationId(633), extent_id: 140 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49293 Sep 22 23:22:26.507 INFO [2] client ExtentClose { repair_id: ReconciliationId(633), extent_id: 140 }
49294 Sep 22 23:22:26.507 DEBG 633 Close extent 140
49295 Sep 22 23:22:26.507 DEBG 633 Close extent 140
49296 Sep 22 23:22:26.508 DEBG 633 Close extent 140
49297 Sep 22 23:22:26.508 DEBG [2] It's time to notify for 633
49298 Sep 22 23:22:26.508 INFO Completion from [2] id:633 status:true
49299 Sep 22 23:22:26.508 INFO [634/752] Repair commands completed
49300 Sep 22 23:22:26.508 INFO Pop front: ReconcileIO { id: ReconciliationId(634), op: ExtentRepair { repair_id: ReconciliationId(634), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49301 Sep 22 23:22:26.508 INFO Sent repair work, now wait for resp
49302 Sep 22 23:22:26.508 INFO [0] received reconcile message
49303 Sep 22 23:22:26.508 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(634), op: ExtentRepair { repair_id: ReconciliationId(634), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49304 Sep 22 23:22:26.508 INFO [0] client ExtentRepair { repair_id: ReconciliationId(634), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49305 Sep 22 23:22:26.508 INFO [0] Sending repair request ReconciliationId(634)
49306 Sep 22 23:22:26.508 INFO [1] received reconcile message
49307 Sep 22 23:22:26.508 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(634), op: ExtentRepair { repair_id: ReconciliationId(634), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49308 Sep 22 23:22:26.508 INFO [1] client ExtentRepair { repair_id: ReconciliationId(634), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49309 Sep 22 23:22:26.508 INFO [1] No action required ReconciliationId(634)
49310 Sep 22 23:22:26.508 INFO [2] received reconcile message
49311 Sep 22 23:22:26.508 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(634), op: ExtentRepair { repair_id: ReconciliationId(634), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49312 Sep 22 23:22:26.508 INFO [2] client ExtentRepair { repair_id: ReconciliationId(634), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49313 Sep 22 23:22:26.508 INFO [2] No action required ReconciliationId(634)
49314 Sep 22 23:22:26.508 DEBG 634 Repair extent 140 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
49315 Sep 22 23:22:26.509 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/08C.copy"
49316 Sep 22 23:22:26.524 DEBG [rc] retire 1088 clears [JobId(1087), JobId(1088)], : downstairs
49317 Sep 22 23:22:26.524 ERRO [2] job id 1089 saw error GenericError("test error")
49318 Sep 22 23:22:26.539 INFO [lossy] sleeping 1 second
49319 Sep 22 23:22:26.540 INFO [lossy] skipping 1091
49320 Sep 22 23:22:26.540 DEBG Flush :1090 extent_limit None deps:[JobId(1089), JobId(1088)] res:true f:34 g:1
49321 Sep 22 23:22:26.546 DEBG Read :1091 deps:[JobId(1090)] res:true
49322 Sep 22 23:22:26.574 INFO accepted connection, remote_addr: 127.0.0.1:44342, local_addr: 127.0.0.1:52864, task: repair
49323 Sep 22 23:22:26.574 TRCE incoming request, uri: /extent/140/files, method: GET, req_id: dd918bdf-ab9d-47b1-a139-dcfeb07a9c06, remote_addr: 127.0.0.1:44342, local_addr: 127.0.0.1:52864, task: repair
49324 Sep 22 23:22:26.574 INFO request completed, latency_us: 258, response_code: 200, uri: /extent/140/files, method: GET, req_id: dd918bdf-ab9d-47b1-a139-dcfeb07a9c06, remote_addr: 127.0.0.1:44342, local_addr: 127.0.0.1:52864, task: repair
49325 Sep 22 23:22:26.575 INFO eid:140 Found repair files: ["08C", "08C.db"]
49326 Sep 22 23:22:26.575 TRCE incoming request, uri: /newextent/140/data, method: GET, req_id: 5ce8f75a-1d0d-476f-8932-ddb5ec8d0025, remote_addr: 127.0.0.1:44342, local_addr: 127.0.0.1:52864, task: repair
49327 Sep 22 23:22:26.575 INFO request completed, latency_us: 361, response_code: 200, uri: /newextent/140/data, method: GET, req_id: 5ce8f75a-1d0d-476f-8932-ddb5ec8d0025, remote_addr: 127.0.0.1:44342, local_addr: 127.0.0.1:52864, task: repair
49328 Sep 22 23:22:26.580 TRCE incoming request, uri: /newextent/140/db, method: GET, req_id: bd1862d3-c3f5-47c9-97e5-d2ed282438aa, remote_addr: 127.0.0.1:44342, local_addr: 127.0.0.1:52864, task: repair
49329 Sep 22 23:22:26.580 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/140/db, method: GET, req_id: bd1862d3-c3f5-47c9-97e5-d2ed282438aa, remote_addr: 127.0.0.1:44342, local_addr: 127.0.0.1:52864, task: repair
49330 Sep 22 23:22:26.581 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/08C.copy" to "/tmp/downstairs-zrMnlo6G/00/000/08C.replace"
49331 Sep 22 23:22:26.582 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49332 Sep 22 23:22:26.583 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/08C.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
49333 Sep 22 23:22:26.583 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/08C"
49334 Sep 22 23:22:26.583 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/08C.db"
49335 Sep 22 23:22:26.583 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49336 Sep 22 23:22:26.583 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/08C.replace" to "/tmp/downstairs-zrMnlo6G/00/000/08C.completed"
49337 Sep 22 23:22:26.583 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49338 Sep 22 23:22:26.583 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49339 Sep 22 23:22:26.583 DEBG [0] It's time to notify for 634
49340 Sep 22 23:22:26.583 INFO Completion from [0] id:634 status:true
49341 Sep 22 23:22:26.583 INFO [635/752] Repair commands completed
49342 Sep 22 23:22:26.583 INFO Pop front: ReconcileIO { id: ReconciliationId(635), op: ExtentReopen { repair_id: ReconciliationId(635), extent_id: 140 }, state: ClientData([New, New, New]) }
49343 Sep 22 23:22:26.584 INFO Sent repair work, now wait for resp
49344 Sep 22 23:22:26.584 INFO [0] received reconcile message
49345 Sep 22 23:22:26.584 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(635), op: ExtentReopen { repair_id: ReconciliationId(635), extent_id: 140 }, state: ClientData([InProgress, New, New]) }, : downstairs
49346 Sep 22 23:22:26.584 INFO [0] client ExtentReopen { repair_id: ReconciliationId(635), extent_id: 140 }
49347 Sep 22 23:22:26.584 INFO [1] received reconcile message
49348 Sep 22 23:22:26.584 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(635), op: ExtentReopen { repair_id: ReconciliationId(635), extent_id: 140 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49349 Sep 22 23:22:26.584 INFO [1] client ExtentReopen { repair_id: ReconciliationId(635), extent_id: 140 }
49350 Sep 22 23:22:26.584 INFO [2] received reconcile message
49351 Sep 22 23:22:26.584 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(635), op: ExtentReopen { repair_id: ReconciliationId(635), extent_id: 140 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49352 Sep 22 23:22:26.584 INFO [2] client ExtentReopen { repair_id: ReconciliationId(635), extent_id: 140 }
49353 Sep 22 23:22:26.584 DEBG 635 Reopen extent 140
49354 Sep 22 23:22:26.585 DEBG 635 Reopen extent 140
49355 Sep 22 23:22:26.585 DEBG 635 Reopen extent 140
49356 Sep 22 23:22:26.586 DEBG [2] It's time to notify for 635
49357 Sep 22 23:22:26.586 INFO Completion from [2] id:635 status:true
49358 Sep 22 23:22:26.586 INFO [636/752] Repair commands completed
49359 Sep 22 23:22:26.586 INFO Pop front: ReconcileIO { id: ReconciliationId(636), op: ExtentFlush { repair_id: ReconciliationId(636), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49360 Sep 22 23:22:26.586 INFO Sent repair work, now wait for resp
49361 Sep 22 23:22:26.586 INFO [0] received reconcile message
49362 Sep 22 23:22:26.586 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(636), op: ExtentFlush { repair_id: ReconciliationId(636), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49363 Sep 22 23:22:26.586 INFO [0] client ExtentFlush { repair_id: ReconciliationId(636), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49364 Sep 22 23:22:26.586 INFO [1] received reconcile message
49365 Sep 22 23:22:26.586 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(636), op: ExtentFlush { repair_id: ReconciliationId(636), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49366 Sep 22 23:22:26.586 INFO [1] client ExtentFlush { repair_id: ReconciliationId(636), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49367 Sep 22 23:22:26.586 INFO [2] received reconcile message
49368 Sep 22 23:22:26.586 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(636), op: ExtentFlush { repair_id: ReconciliationId(636), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49369 Sep 22 23:22:26.586 INFO [2] client ExtentFlush { repair_id: ReconciliationId(636), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49370 Sep 22 23:22:26.586 DEBG 636 Flush extent 178 with f:2 g:2
49371 Sep 22 23:22:26.586 DEBG Flush just extent 178 with f:2 and g:2
49372 Sep 22 23:22:26.586 DEBG [1] It's time to notify for 636
49373 Sep 22 23:22:26.586 INFO Completion from [1] id:636 status:true
49374 Sep 22 23:22:26.586 INFO [637/752] Repair commands completed
49375 Sep 22 23:22:26.586 INFO Pop front: ReconcileIO { id: ReconciliationId(637), op: ExtentClose { repair_id: ReconciliationId(637), extent_id: 178 }, state: ClientData([New, New, New]) }
49376 Sep 22 23:22:26.587 INFO Sent repair work, now wait for resp
49377 Sep 22 23:22:26.587 INFO [0] received reconcile message
49378 Sep 22 23:22:26.587 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(637), op: ExtentClose { repair_id: ReconciliationId(637), extent_id: 178 }, state: ClientData([InProgress, New, New]) }, : downstairs
49379 Sep 22 23:22:26.587 INFO [0] client ExtentClose { repair_id: ReconciliationId(637), extent_id: 178 }
49380 Sep 22 23:22:26.587 INFO [1] received reconcile message
49381 Sep 22 23:22:26.587 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(637), op: ExtentClose { repair_id: ReconciliationId(637), extent_id: 178 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49382 Sep 22 23:22:26.587 INFO [1] client ExtentClose { repair_id: ReconciliationId(637), extent_id: 178 }
49383 Sep 22 23:22:26.587 INFO [2] received reconcile message
49384 Sep 22 23:22:26.587 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(637), op: ExtentClose { repair_id: ReconciliationId(637), extent_id: 178 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49385 Sep 22 23:22:26.587 INFO [2] client ExtentClose { repair_id: ReconciliationId(637), extent_id: 178 }
49386 Sep 22 23:22:26.587 DEBG 637 Close extent 178
49387 Sep 22 23:22:26.587 DEBG 637 Close extent 178
49388 Sep 22 23:22:26.587 DEBG 637 Close extent 178
49389 Sep 22 23:22:26.588 DEBG [2] It's time to notify for 637
49390 Sep 22 23:22:26.588 INFO Completion from [2] id:637 status:true
49391 Sep 22 23:22:26.588 INFO [638/752] Repair commands completed
49392 Sep 22 23:22:26.588 INFO Pop front: ReconcileIO { id: ReconciliationId(638), op: ExtentRepair { repair_id: ReconciliationId(638), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49393 Sep 22 23:22:26.588 INFO Sent repair work, now wait for resp
49394 Sep 22 23:22:26.588 INFO [0] received reconcile message
49395 Sep 22 23:22:26.588 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(638), op: ExtentRepair { repair_id: ReconciliationId(638), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49396 Sep 22 23:22:26.588 INFO [0] client ExtentRepair { repair_id: ReconciliationId(638), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49397 Sep 22 23:22:26.588 INFO [0] Sending repair request ReconciliationId(638)
49398 Sep 22 23:22:26.588 INFO [1] received reconcile message
49399 Sep 22 23:22:26.588 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(638), op: ExtentRepair { repair_id: ReconciliationId(638), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49400 Sep 22 23:22:26.588 INFO [1] client ExtentRepair { repair_id: ReconciliationId(638), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49401 Sep 22 23:22:26.588 INFO [1] No action required ReconciliationId(638)
49402 Sep 22 23:22:26.588 INFO [2] received reconcile message
49403 Sep 22 23:22:26.588 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(638), op: ExtentRepair { repair_id: ReconciliationId(638), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49404 Sep 22 23:22:26.588 INFO [2] client ExtentRepair { repair_id: ReconciliationId(638), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49405 Sep 22 23:22:26.588 INFO [2] No action required ReconciliationId(638)
49406 Sep 22 23:22:26.588 DEBG 638 Repair extent 178 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
49407 Sep 22 23:22:26.588 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0B2.copy"
49408 Sep 22 23:22:26.652 INFO accepted connection, remote_addr: 127.0.0.1:36395, local_addr: 127.0.0.1:52864, task: repair
49409 Sep 22 23:22:26.653 TRCE incoming request, uri: /extent/178/files, method: GET, req_id: d15cee2d-0f33-4abd-b819-7e36806b3cbf, remote_addr: 127.0.0.1:36395, local_addr: 127.0.0.1:52864, task: repair
49410 Sep 22 23:22:26.653 INFO request completed, latency_us: 265, response_code: 200, uri: /extent/178/files, method: GET, req_id: d15cee2d-0f33-4abd-b819-7e36806b3cbf, remote_addr: 127.0.0.1:36395, local_addr: 127.0.0.1:52864, task: repair
49411 Sep 22 23:22:26.653 INFO eid:178 Found repair files: ["0B2", "0B2.db"]
49412 Sep 22 23:22:26.654 TRCE incoming request, uri: /newextent/178/data, method: GET, req_id: fe8de25e-8017-44f2-97e8-4a1dc89e00fd, remote_addr: 127.0.0.1:36395, local_addr: 127.0.0.1:52864, task: repair
49413 Sep 22 23:22:26.654 INFO request completed, latency_us: 324, response_code: 200, uri: /newextent/178/data, method: GET, req_id: fe8de25e-8017-44f2-97e8-4a1dc89e00fd, remote_addr: 127.0.0.1:36395, local_addr: 127.0.0.1:52864, task: repair
49414 Sep 22 23:22:26.659 TRCE incoming request, uri: /newextent/178/db, method: GET, req_id: 754264d1-f973-40c0-a90b-ac4ba454aada, remote_addr: 127.0.0.1:36395, local_addr: 127.0.0.1:52864, task: repair
49415 Sep 22 23:22:26.659 INFO request completed, latency_us: 307, response_code: 200, uri: /newextent/178/db, method: GET, req_id: 754264d1-f973-40c0-a90b-ac4ba454aada, remote_addr: 127.0.0.1:36395, local_addr: 127.0.0.1:52864, task: repair
49416 Sep 22 23:22:26.660 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0B2.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0B2.replace"
49417 Sep 22 23:22:26.660 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49418 Sep 22 23:22:26.662 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0B2.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
49419 Sep 22 23:22:26.662 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B2"
49420 Sep 22 23:22:26.662 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B2.db"
49421 Sep 22 23:22:26.662 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49422 Sep 22 23:22:26.662 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0B2.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0B2.completed"
49423 Sep 22 23:22:26.662 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49424 Sep 22 23:22:26.662 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49425 Sep 22 23:22:26.662 DEBG [0] It's time to notify for 638
49426 Sep 22 23:22:26.662 INFO Completion from [0] id:638 status:true
49427 Sep 22 23:22:26.663 INFO [639/752] Repair commands completed
49428 Sep 22 23:22:26.663 INFO Pop front: ReconcileIO { id: ReconciliationId(639), op: ExtentReopen { repair_id: ReconciliationId(639), extent_id: 178 }, state: ClientData([New, New, New]) }
49429 Sep 22 23:22:26.663 INFO Sent repair work, now wait for resp
49430 Sep 22 23:22:26.663 INFO [0] received reconcile message
49431 Sep 22 23:22:26.663 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(639), op: ExtentReopen { repair_id: ReconciliationId(639), extent_id: 178 }, state: ClientData([InProgress, New, New]) }, : downstairs
49432 Sep 22 23:22:26.663 INFO [0] client ExtentReopen { repair_id: ReconciliationId(639), extent_id: 178 }
49433 Sep 22 23:22:26.663 INFO [1] received reconcile message
49434 Sep 22 23:22:26.663 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(639), op: ExtentReopen { repair_id: ReconciliationId(639), extent_id: 178 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49435 Sep 22 23:22:26.663 INFO [1] client ExtentReopen { repair_id: ReconciliationId(639), extent_id: 178 }
49436 Sep 22 23:22:26.663 INFO [2] received reconcile message
49437 Sep 22 23:22:26.663 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(639), op: ExtentReopen { repair_id: ReconciliationId(639), extent_id: 178 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49438 Sep 22 23:22:26.663 INFO [2] client ExtentReopen { repair_id: ReconciliationId(639), extent_id: 178 }
49439 Sep 22 23:22:26.663 DEBG 639 Reopen extent 178
49440 Sep 22 23:22:26.664 DEBG 639 Reopen extent 178
49441 Sep 22 23:22:26.664 DEBG 639 Reopen extent 178
49442 Sep 22 23:22:26.665 DEBG [2] It's time to notify for 639
49443 Sep 22 23:22:26.665 INFO Completion from [2] id:639 status:true
49444 Sep 22 23:22:26.665 INFO [640/752] Repair commands completed
49445 Sep 22 23:22:26.665 INFO Pop front: ReconcileIO { id: ReconciliationId(640), op: ExtentFlush { repair_id: ReconciliationId(640), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49446 Sep 22 23:22:26.665 INFO Sent repair work, now wait for resp
49447 Sep 22 23:22:26.665 INFO [0] received reconcile message
49448 Sep 22 23:22:26.665 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(640), op: ExtentFlush { repair_id: ReconciliationId(640), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49449 Sep 22 23:22:26.665 INFO [0] client ExtentFlush { repair_id: ReconciliationId(640), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49450 Sep 22 23:22:26.665 INFO [1] received reconcile message
49451 Sep 22 23:22:26.665 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(640), op: ExtentFlush { repair_id: ReconciliationId(640), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49452 Sep 22 23:22:26.665 INFO [1] client ExtentFlush { repair_id: ReconciliationId(640), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49453 Sep 22 23:22:26.665 INFO [2] received reconcile message
49454 Sep 22 23:22:26.665 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(640), op: ExtentFlush { repair_id: ReconciliationId(640), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49455 Sep 22 23:22:26.665 INFO [2] client ExtentFlush { repair_id: ReconciliationId(640), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49456 Sep 22 23:22:26.665 DEBG 640 Flush extent 181 with f:2 g:2
49457 Sep 22 23:22:26.665 DEBG Flush just extent 181 with f:2 and g:2
49458 Sep 22 23:22:26.665 DEBG [1] It's time to notify for 640
49459 Sep 22 23:22:26.666 INFO Completion from [1] id:640 status:true
49460 Sep 22 23:22:26.666 INFO [641/752] Repair commands completed
49461 Sep 22 23:22:26.666 INFO Pop front: ReconcileIO { id: ReconciliationId(641), op: ExtentClose { repair_id: ReconciliationId(641), extent_id: 181 }, state: ClientData([New, New, New]) }
49462 Sep 22 23:22:26.666 INFO Sent repair work, now wait for resp
49463 Sep 22 23:22:26.666 INFO [0] received reconcile message
49464 Sep 22 23:22:26.666 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(641), op: ExtentClose { repair_id: ReconciliationId(641), extent_id: 181 }, state: ClientData([InProgress, New, New]) }, : downstairs
49465 Sep 22 23:22:26.666 INFO [0] client ExtentClose { repair_id: ReconciliationId(641), extent_id: 181 }
49466 Sep 22 23:22:26.666 INFO [1] received reconcile message
49467 Sep 22 23:22:26.666 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(641), op: ExtentClose { repair_id: ReconciliationId(641), extent_id: 181 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49468 Sep 22 23:22:26.666 INFO [1] client ExtentClose { repair_id: ReconciliationId(641), extent_id: 181 }
49469 Sep 22 23:22:26.666 INFO [2] received reconcile message
49470 Sep 22 23:22:26.666 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(641), op: ExtentClose { repair_id: ReconciliationId(641), extent_id: 181 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49471 Sep 22 23:22:26.666 INFO [2] client ExtentClose { repair_id: ReconciliationId(641), extent_id: 181 }
49472 Sep 22 23:22:26.666 DEBG 641 Close extent 181
49473 Sep 22 23:22:26.666 DEBG 641 Close extent 181
49474 Sep 22 23:22:26.667 DEBG 641 Close extent 181
49475 Sep 22 23:22:26.667 DEBG [2] It's time to notify for 641
49476 Sep 22 23:22:26.667 INFO Completion from [2] id:641 status:true
49477 Sep 22 23:22:26.667 INFO [642/752] Repair commands completed
49478 Sep 22 23:22:26.667 INFO Pop front: ReconcileIO { id: ReconciliationId(642), op: ExtentRepair { repair_id: ReconciliationId(642), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49479 Sep 22 23:22:26.667 INFO Sent repair work, now wait for resp
49480 Sep 22 23:22:26.667 INFO [0] received reconcile message
49481 Sep 22 23:22:26.667 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(642), op: ExtentRepair { repair_id: ReconciliationId(642), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49482 Sep 22 23:22:26.667 INFO [0] client ExtentRepair { repair_id: ReconciliationId(642), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49483 Sep 22 23:22:26.667 INFO [0] Sending repair request ReconciliationId(642)
49484 Sep 22 23:22:26.667 INFO [1] received reconcile message
49485 Sep 22 23:22:26.667 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(642), op: ExtentRepair { repair_id: ReconciliationId(642), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49486 Sep 22 23:22:26.667 INFO [1] client ExtentRepair { repair_id: ReconciliationId(642), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49487 Sep 22 23:22:26.667 INFO [1] No action required ReconciliationId(642)
49488 Sep 22 23:22:26.667 INFO [2] received reconcile message
49489 Sep 22 23:22:26.667 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(642), op: ExtentRepair { repair_id: ReconciliationId(642), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49490 Sep 22 23:22:26.667 INFO [2] client ExtentRepair { repair_id: ReconciliationId(642), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49491 Sep 22 23:22:26.667 INFO [2] No action required ReconciliationId(642)
49492 Sep 22 23:22:26.667 DEBG 642 Repair extent 181 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
49493 Sep 22 23:22:26.668 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0B5.copy"
49494 Sep 22 23:22:26.731 INFO accepted connection, remote_addr: 127.0.0.1:38885, local_addr: 127.0.0.1:52864, task: repair
49495 Sep 22 23:22:26.732 TRCE incoming request, uri: /extent/181/files, method: GET, req_id: 3c1c3b8a-a104-4c00-8a6d-077047e4496f, remote_addr: 127.0.0.1:38885, local_addr: 127.0.0.1:52864, task: repair
49496 Sep 22 23:22:26.732 INFO request completed, latency_us: 283, response_code: 200, uri: /extent/181/files, method: GET, req_id: 3c1c3b8a-a104-4c00-8a6d-077047e4496f, remote_addr: 127.0.0.1:38885, local_addr: 127.0.0.1:52864, task: repair
49497 Sep 22 23:22:26.732 INFO eid:181 Found repair files: ["0B5", "0B5.db"]
49498 Sep 22 23:22:26.733 TRCE incoming request, uri: /newextent/181/data, method: GET, req_id: f0492bd6-6c63-4f33-b2cf-eddba7948333, remote_addr: 127.0.0.1:38885, local_addr: 127.0.0.1:52864, task: repair
49499 Sep 22 23:22:26.733 INFO request completed, latency_us: 326, response_code: 200, uri: /newextent/181/data, method: GET, req_id: f0492bd6-6c63-4f33-b2cf-eddba7948333, remote_addr: 127.0.0.1:38885, local_addr: 127.0.0.1:52864, task: repair
49500 Sep 22 23:22:26.738 TRCE incoming request, uri: /newextent/181/db, method: GET, req_id: 29afa013-65a5-4a51-a49a-cf67d1d6e345, remote_addr: 127.0.0.1:38885, local_addr: 127.0.0.1:52864, task: repair
49501 Sep 22 23:22:26.738 INFO request completed, latency_us: 306, response_code: 200, uri: /newextent/181/db, method: GET, req_id: 29afa013-65a5-4a51-a49a-cf67d1d6e345, remote_addr: 127.0.0.1:38885, local_addr: 127.0.0.1:52864, task: repair
49502 Sep 22 23:22:26.739 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0B5.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0B5.replace"
49503 Sep 22 23:22:26.739 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49504 Sep 22 23:22:26.741 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0B5.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
49505 Sep 22 23:22:26.741 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B5"
49506 Sep 22 23:22:26.741 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B5.db"
49507 Sep 22 23:22:26.741 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49508 Sep 22 23:22:26.741 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0B5.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0B5.completed"
49509 Sep 22 23:22:26.741 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49510 Sep 22 23:22:26.741 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49511 Sep 22 23:22:26.741 DEBG [0] It's time to notify for 642
49512 Sep 22 23:22:26.741 INFO Completion from [0] id:642 status:true
49513 Sep 22 23:22:26.741 INFO [643/752] Repair commands completed
49514 Sep 22 23:22:26.742 INFO Pop front: ReconcileIO { id: ReconciliationId(643), op: ExtentReopen { repair_id: ReconciliationId(643), extent_id: 181 }, state: ClientData([New, New, New]) }
49515 Sep 22 23:22:26.742 INFO Sent repair work, now wait for resp
49516 Sep 22 23:22:26.742 INFO [0] received reconcile message
49517 Sep 22 23:22:26.742 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(643), op: ExtentReopen { repair_id: ReconciliationId(643), extent_id: 181 }, state: ClientData([InProgress, New, New]) }, : downstairs
49518 Sep 22 23:22:26.742 INFO [0] client ExtentReopen { repair_id: ReconciliationId(643), extent_id: 181 }
49519 Sep 22 23:22:26.742 INFO [1] received reconcile message
49520 Sep 22 23:22:26.742 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(643), op: ExtentReopen { repair_id: ReconciliationId(643), extent_id: 181 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49521 Sep 22 23:22:26.742 INFO [1] client ExtentReopen { repair_id: ReconciliationId(643), extent_id: 181 }
49522 Sep 22 23:22:26.742 INFO [2] received reconcile message
49523 Sep 22 23:22:26.742 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(643), op: ExtentReopen { repair_id: ReconciliationId(643), extent_id: 181 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49524 Sep 22 23:22:26.742 INFO [2] client ExtentReopen { repair_id: ReconciliationId(643), extent_id: 181 }
49525 Sep 22 23:22:26.742 DEBG 643 Reopen extent 181
49526 Sep 22 23:22:26.743 DEBG 643 Reopen extent 181
49527 Sep 22 23:22:26.743 DEBG 643 Reopen extent 181
49528 Sep 22 23:22:26.744 DEBG [2] It's time to notify for 643
49529 Sep 22 23:22:26.744 INFO Completion from [2] id:643 status:true
49530 Sep 22 23:22:26.744 INFO [644/752] Repair commands completed
49531 Sep 22 23:22:26.744 INFO Pop front: ReconcileIO { id: ReconciliationId(644), op: ExtentFlush { repair_id: ReconciliationId(644), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49532 Sep 22 23:22:26.744 INFO Sent repair work, now wait for resp
49533 Sep 22 23:22:26.744 INFO [0] received reconcile message
49534 Sep 22 23:22:26.744 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(644), op: ExtentFlush { repair_id: ReconciliationId(644), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49535 Sep 22 23:22:26.744 INFO [0] client ExtentFlush { repair_id: ReconciliationId(644), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49536 Sep 22 23:22:26.744 INFO [1] received reconcile message
49537 Sep 22 23:22:26.744 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(644), op: ExtentFlush { repair_id: ReconciliationId(644), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49538 Sep 22 23:22:26.744 INFO [1] client ExtentFlush { repair_id: ReconciliationId(644), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49539 Sep 22 23:22:26.744 INFO [2] received reconcile message
49540 Sep 22 23:22:26.744 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(644), op: ExtentFlush { repair_id: ReconciliationId(644), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49541 Sep 22 23:22:26.744 INFO [2] client ExtentFlush { repair_id: ReconciliationId(644), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49542 Sep 22 23:22:26.744 DEBG 644 Flush extent 82 with f:2 g:2
49543 Sep 22 23:22:26.744 DEBG Flush just extent 82 with f:2 and g:2
49544 Sep 22 23:22:26.744 DEBG [1] It's time to notify for 644
49545 Sep 22 23:22:26.745 INFO Completion from [1] id:644 status:true
49546 Sep 22 23:22:26.745 INFO [645/752] Repair commands completed
49547 Sep 22 23:22:26.745 INFO Pop front: ReconcileIO { id: ReconciliationId(645), op: ExtentClose { repair_id: ReconciliationId(645), extent_id: 82 }, state: ClientData([New, New, New]) }
49548 Sep 22 23:22:26.745 INFO Sent repair work, now wait for resp
49549 Sep 22 23:22:26.745 INFO [0] received reconcile message
49550 Sep 22 23:22:26.745 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(645), op: ExtentClose { repair_id: ReconciliationId(645), extent_id: 82 }, state: ClientData([InProgress, New, New]) }, : downstairs
49551 Sep 22 23:22:26.745 INFO [0] client ExtentClose { repair_id: ReconciliationId(645), extent_id: 82 }
49552 Sep 22 23:22:26.745 INFO [1] received reconcile message
49553 Sep 22 23:22:26.745 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(645), op: ExtentClose { repair_id: ReconciliationId(645), extent_id: 82 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49554 Sep 22 23:22:26.745 INFO [1] client ExtentClose { repair_id: ReconciliationId(645), extent_id: 82 }
49555 Sep 22 23:22:26.745 INFO [2] received reconcile message
49556 Sep 22 23:22:26.745 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(645), op: ExtentClose { repair_id: ReconciliationId(645), extent_id: 82 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49557 Sep 22 23:22:26.745 INFO [2] client ExtentClose { repair_id: ReconciliationId(645), extent_id: 82 }
49558 Sep 22 23:22:26.745 DEBG 645 Close extent 82
49559 Sep 22 23:22:26.745 DEBG 645 Close extent 82
49560 Sep 22 23:22:26.746 DEBG 645 Close extent 82
49561 Sep 22 23:22:26.746 DEBG [2] It's time to notify for 645
49562 Sep 22 23:22:26.746 INFO Completion from [2] id:645 status:true
49563 Sep 22 23:22:26.746 INFO [646/752] Repair commands completed
49564 Sep 22 23:22:26.746 INFO Pop front: ReconcileIO { id: ReconciliationId(646), op: ExtentRepair { repair_id: ReconciliationId(646), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49565 Sep 22 23:22:26.746 INFO Sent repair work, now wait for resp
49566 Sep 22 23:22:26.746 INFO [0] received reconcile message
49567 Sep 22 23:22:26.746 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(646), op: ExtentRepair { repair_id: ReconciliationId(646), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49568 Sep 22 23:22:26.746 INFO [0] client ExtentRepair { repair_id: ReconciliationId(646), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49569 Sep 22 23:22:26.746 INFO [0] Sending repair request ReconciliationId(646)
49570 Sep 22 23:22:26.746 INFO [1] received reconcile message
49571 Sep 22 23:22:26.746 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(646), op: ExtentRepair { repair_id: ReconciliationId(646), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49572 Sep 22 23:22:26.746 INFO [1] client ExtentRepair { repair_id: ReconciliationId(646), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49573 Sep 22 23:22:26.746 INFO [1] No action required ReconciliationId(646)
49574 Sep 22 23:22:26.746 INFO [2] received reconcile message
49575 Sep 22 23:22:26.746 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(646), op: ExtentRepair { repair_id: ReconciliationId(646), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49576 Sep 22 23:22:26.746 INFO [2] client ExtentRepair { repair_id: ReconciliationId(646), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49577 Sep 22 23:22:26.746 INFO [2] No action required ReconciliationId(646)
49578 Sep 22 23:22:26.746 DEBG 646 Repair extent 82 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
49579 Sep 22 23:22:26.747 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/052.copy"
49580 Sep 22 23:22:26.812 INFO accepted connection, remote_addr: 127.0.0.1:38493, local_addr: 127.0.0.1:52864, task: repair
49581 Sep 22 23:22:26.812 TRCE incoming request, uri: /extent/82/files, method: GET, req_id: 803b9bb7-8110-4e65-a5b7-73d5d0c75513, remote_addr: 127.0.0.1:38493, local_addr: 127.0.0.1:52864, task: repair
49582 Sep 22 23:22:26.812 INFO request completed, latency_us: 206, response_code: 200, uri: /extent/82/files, method: GET, req_id: 803b9bb7-8110-4e65-a5b7-73d5d0c75513, remote_addr: 127.0.0.1:38493, local_addr: 127.0.0.1:52864, task: repair
49583 Sep 22 23:22:26.813 INFO eid:82 Found repair files: ["052", "052.db"]
49584 Sep 22 23:22:26.813 TRCE incoming request, uri: /newextent/82/data, method: GET, req_id: e9b4c0c9-6d3e-4465-a753-616b28d05957, remote_addr: 127.0.0.1:38493, local_addr: 127.0.0.1:52864, task: repair
49585 Sep 22 23:22:26.813 INFO request completed, latency_us: 324, response_code: 200, uri: /newextent/82/data, method: GET, req_id: e9b4c0c9-6d3e-4465-a753-616b28d05957, remote_addr: 127.0.0.1:38493, local_addr: 127.0.0.1:52864, task: repair
49586 Sep 22 23:22:26.818 TRCE incoming request, uri: /newextent/82/db, method: GET, req_id: aecea6b8-7f47-4609-8493-e862a8d94cf5, remote_addr: 127.0.0.1:38493, local_addr: 127.0.0.1:52864, task: repair
49587 Sep 22 23:22:26.818 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/82/db, method: GET, req_id: aecea6b8-7f47-4609-8493-e862a8d94cf5, remote_addr: 127.0.0.1:38493, local_addr: 127.0.0.1:52864, task: repair
49588 Sep 22 23:22:26.819 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/052.copy" to "/tmp/downstairs-zrMnlo6G/00/000/052.replace"
49589 Sep 22 23:22:26.819 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49590 Sep 22 23:22:26.820 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/052.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
49591 Sep 22 23:22:26.820 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/052"
49592 Sep 22 23:22:26.820 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/052.db"
49593 Sep 22 23:22:26.820 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49594 Sep 22 23:22:26.820 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/052.replace" to "/tmp/downstairs-zrMnlo6G/00/000/052.completed"
49595 Sep 22 23:22:26.820 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49596 Sep 22 23:22:26.821 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49597 Sep 22 23:22:26.821 DEBG [0] It's time to notify for 646
49598 Sep 22 23:22:26.821 INFO Completion from [0] id:646 status:true
49599 Sep 22 23:22:26.821 INFO [647/752] Repair commands completed
49600 Sep 22 23:22:26.821 INFO Pop front: ReconcileIO { id: ReconciliationId(647), op: ExtentReopen { repair_id: ReconciliationId(647), extent_id: 82 }, state: ClientData([New, New, New]) }
49601 Sep 22 23:22:26.821 INFO Sent repair work, now wait for resp
49602 Sep 22 23:22:26.821 INFO [0] received reconcile message
49603 Sep 22 23:22:26.821 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(647), op: ExtentReopen { repair_id: ReconciliationId(647), extent_id: 82 }, state: ClientData([InProgress, New, New]) }, : downstairs
49604 Sep 22 23:22:26.821 INFO [0] client ExtentReopen { repair_id: ReconciliationId(647), extent_id: 82 }
49605 Sep 22 23:22:26.821 INFO [1] received reconcile message
49606 Sep 22 23:22:26.821 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(647), op: ExtentReopen { repair_id: ReconciliationId(647), extent_id: 82 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49607 Sep 22 23:22:26.821 INFO [1] client ExtentReopen { repair_id: ReconciliationId(647), extent_id: 82 }
49608 Sep 22 23:22:26.821 INFO [2] received reconcile message
49609 Sep 22 23:22:26.821 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(647), op: ExtentReopen { repair_id: ReconciliationId(647), extent_id: 82 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49610 Sep 22 23:22:26.821 INFO [2] client ExtentReopen { repair_id: ReconciliationId(647), extent_id: 82 }
49611 Sep 22 23:22:26.821 DEBG 647 Reopen extent 82
49612 Sep 22 23:22:26.822 DEBG 647 Reopen extent 82
49613 Sep 22 23:22:26.822 DEBG 647 Reopen extent 82
49614 Sep 22 23:22:26.823 DEBG [2] It's time to notify for 647
49615 Sep 22 23:22:26.823 INFO Completion from [2] id:647 status:true
49616 Sep 22 23:22:26.823 INFO [648/752] Repair commands completed
49617 Sep 22 23:22:26.823 INFO Pop front: ReconcileIO { id: ReconciliationId(648), op: ExtentFlush { repair_id: ReconciliationId(648), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49618 Sep 22 23:22:26.823 INFO Sent repair work, now wait for resp
49619 Sep 22 23:22:26.823 INFO [0] received reconcile message
49620 Sep 22 23:22:26.823 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(648), op: ExtentFlush { repair_id: ReconciliationId(648), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49621 Sep 22 23:22:26.823 INFO [0] client ExtentFlush { repair_id: ReconciliationId(648), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49622 Sep 22 23:22:26.823 INFO [1] received reconcile message
49623 Sep 22 23:22:26.823 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(648), op: ExtentFlush { repair_id: ReconciliationId(648), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49624 Sep 22 23:22:26.823 INFO [1] client ExtentFlush { repair_id: ReconciliationId(648), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49625 Sep 22 23:22:26.823 INFO [2] received reconcile message
49626 Sep 22 23:22:26.823 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(648), op: ExtentFlush { repair_id: ReconciliationId(648), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49627 Sep 22 23:22:26.823 INFO [2] client ExtentFlush { repair_id: ReconciliationId(648), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49628 Sep 22 23:22:26.823 DEBG 648 Flush extent 174 with f:2 g:2
49629 Sep 22 23:22:26.823 DEBG Flush just extent 174 with f:2 and g:2
49630 Sep 22 23:22:26.824 DEBG [1] It's time to notify for 648
49631 Sep 22 23:22:26.824 INFO Completion from [1] id:648 status:true
49632 Sep 22 23:22:26.824 INFO [649/752] Repair commands completed
49633 Sep 22 23:22:26.824 INFO Pop front: ReconcileIO { id: ReconciliationId(649), op: ExtentClose { repair_id: ReconciliationId(649), extent_id: 174 }, state: ClientData([New, New, New]) }
49634 Sep 22 23:22:26.824 INFO Sent repair work, now wait for resp
49635 Sep 22 23:22:26.824 INFO [0] received reconcile message
49636 Sep 22 23:22:26.824 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(649), op: ExtentClose { repair_id: ReconciliationId(649), extent_id: 174 }, state: ClientData([InProgress, New, New]) }, : downstairs
49637 Sep 22 23:22:26.824 INFO [0] client ExtentClose { repair_id: ReconciliationId(649), extent_id: 174 }
49638 Sep 22 23:22:26.824 INFO [1] received reconcile message
49639 Sep 22 23:22:26.824 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(649), op: ExtentClose { repair_id: ReconciliationId(649), extent_id: 174 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49640 Sep 22 23:22:26.824 INFO [1] client ExtentClose { repair_id: ReconciliationId(649), extent_id: 174 }
49641 Sep 22 23:22:26.824 INFO [2] received reconcile message
49642 Sep 22 23:22:26.824 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(649), op: ExtentClose { repair_id: ReconciliationId(649), extent_id: 174 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49643 Sep 22 23:22:26.824 INFO [2] client ExtentClose { repair_id: ReconciliationId(649), extent_id: 174 }
49644 Sep 22 23:22:26.824 DEBG 649 Close extent 174
49645 Sep 22 23:22:26.824 DEBG 649 Close extent 174
49646 Sep 22 23:22:26.825 DEBG 649 Close extent 174
49647 Sep 22 23:22:26.825 DEBG [2] It's time to notify for 649
49648 Sep 22 23:22:26.825 INFO Completion from [2] id:649 status:true
49649 Sep 22 23:22:26.825 INFO [650/752] Repair commands completed
49650 Sep 22 23:22:26.825 INFO Pop front: ReconcileIO { id: ReconciliationId(650), op: ExtentRepair { repair_id: ReconciliationId(650), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49651 Sep 22 23:22:26.825 INFO Sent repair work, now wait for resp
49652 Sep 22 23:22:26.825 INFO [0] received reconcile message
49653 Sep 22 23:22:26.825 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(650), op: ExtentRepair { repair_id: ReconciliationId(650), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49654 Sep 22 23:22:26.825 INFO [0] client ExtentRepair { repair_id: ReconciliationId(650), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49655 Sep 22 23:22:26.825 INFO [0] Sending repair request ReconciliationId(650)
49656 Sep 22 23:22:26.825 INFO [1] received reconcile message
49657 Sep 22 23:22:26.825 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(650), op: ExtentRepair { repair_id: ReconciliationId(650), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49658 Sep 22 23:22:26.825 INFO [1] client ExtentRepair { repair_id: ReconciliationId(650), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49659 Sep 22 23:22:26.825 INFO [1] No action required ReconciliationId(650)
49660 Sep 22 23:22:26.825 INFO [2] received reconcile message
49661 Sep 22 23:22:26.825 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(650), op: ExtentRepair { repair_id: ReconciliationId(650), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49662 Sep 22 23:22:26.825 INFO [2] client ExtentRepair { repair_id: ReconciliationId(650), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49663 Sep 22 23:22:26.825 INFO [2] No action required ReconciliationId(650)
49664 Sep 22 23:22:26.826 DEBG 650 Repair extent 174 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
49665 Sep 22 23:22:26.826 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0AE.copy"
49666 Sep 22 23:22:26.891 INFO accepted connection, remote_addr: 127.0.0.1:32878, local_addr: 127.0.0.1:52864, task: repair
49667 Sep 22 23:22:26.892 TRCE incoming request, uri: /extent/174/files, method: GET, req_id: 2e976354-b1d1-49e5-bba6-671cd2216493, remote_addr: 127.0.0.1:32878, local_addr: 127.0.0.1:52864, task: repair
49668 Sep 22 23:22:26.892 INFO request completed, latency_us: 208, response_code: 200, uri: /extent/174/files, method: GET, req_id: 2e976354-b1d1-49e5-bba6-671cd2216493, remote_addr: 127.0.0.1:32878, local_addr: 127.0.0.1:52864, task: repair
49669 Sep 22 23:22:26.892 INFO eid:174 Found repair files: ["0AE", "0AE.db"]
49670 Sep 22 23:22:26.892 TRCE incoming request, uri: /newextent/174/data, method: GET, req_id: 5047da10-44ce-4508-944c-26526de03bd1, remote_addr: 127.0.0.1:32878, local_addr: 127.0.0.1:52864, task: repair
49671 Sep 22 23:22:26.893 INFO request completed, latency_us: 307, response_code: 200, uri: /newextent/174/data, method: GET, req_id: 5047da10-44ce-4508-944c-26526de03bd1, remote_addr: 127.0.0.1:32878, local_addr: 127.0.0.1:52864, task: repair
49672 Sep 22 23:22:26.897 TRCE incoming request, uri: /newextent/174/db, method: GET, req_id: 4e5a52d2-5002-4df2-a936-4a2ae13a46cd, remote_addr: 127.0.0.1:32878, local_addr: 127.0.0.1:52864, task: repair
49673 Sep 22 23:22:26.898 INFO request completed, latency_us: 318, response_code: 200, uri: /newextent/174/db, method: GET, req_id: 4e5a52d2-5002-4df2-a936-4a2ae13a46cd, remote_addr: 127.0.0.1:32878, local_addr: 127.0.0.1:52864, task: repair
49674 Sep 22 23:22:26.899 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0AE.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0AE.replace"
49675 Sep 22 23:22:26.899 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49676 Sep 22 23:22:26.900 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0AE.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
49677 Sep 22 23:22:26.900 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0AE"
49678 Sep 22 23:22:26.900 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0AE.db"
49679 Sep 22 23:22:26.900 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49680 Sep 22 23:22:26.900 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0AE.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0AE.completed"
49681 Sep 22 23:22:26.900 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49682 Sep 22 23:22:26.900 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49683 Sep 22 23:22:26.900 DEBG [0] It's time to notify for 650
49684 Sep 22 23:22:26.900 INFO Completion from [0] id:650 status:true
49685 Sep 22 23:22:26.900 INFO [651/752] Repair commands completed
49686 Sep 22 23:22:26.901 INFO Pop front: ReconcileIO { id: ReconciliationId(651), op: ExtentReopen { repair_id: ReconciliationId(651), extent_id: 174 }, state: ClientData([New, New, New]) }
49687 Sep 22 23:22:26.901 INFO Sent repair work, now wait for resp
49688 Sep 22 23:22:26.901 INFO [0] received reconcile message
49689 Sep 22 23:22:26.901 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(651), op: ExtentReopen { repair_id: ReconciliationId(651), extent_id: 174 }, state: ClientData([InProgress, New, New]) }, : downstairs
49690 Sep 22 23:22:26.901 INFO [0] client ExtentReopen { repair_id: ReconciliationId(651), extent_id: 174 }
49691 Sep 22 23:22:26.901 INFO [1] received reconcile message
49692 Sep 22 23:22:26.901 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(651), op: ExtentReopen { repair_id: ReconciliationId(651), extent_id: 174 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49693 Sep 22 23:22:26.901 INFO [1] client ExtentReopen { repair_id: ReconciliationId(651), extent_id: 174 }
49694 Sep 22 23:22:26.901 INFO [2] received reconcile message
49695 Sep 22 23:22:26.901 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(651), op: ExtentReopen { repair_id: ReconciliationId(651), extent_id: 174 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49696 Sep 22 23:22:26.901 INFO [2] client ExtentReopen { repair_id: ReconciliationId(651), extent_id: 174 }
49697 Sep 22 23:22:26.901 DEBG 651 Reopen extent 174
49698 Sep 22 23:22:26.902 DEBG 651 Reopen extent 174
49699 Sep 22 23:22:26.902 DEBG 651 Reopen extent 174
49700 Sep 22 23:22:26.903 DEBG [2] It's time to notify for 651
49701 Sep 22 23:22:26.903 INFO Completion from [2] id:651 status:true
49702 Sep 22 23:22:26.903 INFO [652/752] Repair commands completed
49703 Sep 22 23:22:26.903 INFO Pop front: ReconcileIO { id: ReconciliationId(652), op: ExtentFlush { repair_id: ReconciliationId(652), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49704 Sep 22 23:22:26.903 INFO Sent repair work, now wait for resp
49705 Sep 22 23:22:26.903 INFO [0] received reconcile message
49706 Sep 22 23:22:26.903 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(652), op: ExtentFlush { repair_id: ReconciliationId(652), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49707 Sep 22 23:22:26.903 INFO [0] client ExtentFlush { repair_id: ReconciliationId(652), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49708 Sep 22 23:22:26.903 INFO [1] received reconcile message
49709 Sep 22 23:22:26.903 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(652), op: ExtentFlush { repair_id: ReconciliationId(652), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49710 Sep 22 23:22:26.903 INFO [1] client ExtentFlush { repair_id: ReconciliationId(652), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49711 Sep 22 23:22:26.903 INFO [2] received reconcile message
49712 Sep 22 23:22:26.903 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(652), op: ExtentFlush { repair_id: ReconciliationId(652), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49713 Sep 22 23:22:26.903 INFO [2] client ExtentFlush { repair_id: ReconciliationId(652), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49714 Sep 22 23:22:26.903 DEBG 652 Flush extent 17 with f:2 g:2
49715 Sep 22 23:22:26.903 DEBG Flush just extent 17 with f:2 and g:2
49716 Sep 22 23:22:26.903 DEBG [1] It's time to notify for 652
49717 Sep 22 23:22:26.903 INFO Completion from [1] id:652 status:true
49718 Sep 22 23:22:26.903 INFO [653/752] Repair commands completed
49719 Sep 22 23:22:26.903 INFO Pop front: ReconcileIO { id: ReconciliationId(653), op: ExtentClose { repair_id: ReconciliationId(653), extent_id: 17 }, state: ClientData([New, New, New]) }
49720 Sep 22 23:22:26.903 INFO Sent repair work, now wait for resp
49721 Sep 22 23:22:26.903 INFO [0] received reconcile message
49722 Sep 22 23:22:26.903 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(653), op: ExtentClose { repair_id: ReconciliationId(653), extent_id: 17 }, state: ClientData([InProgress, New, New]) }, : downstairs
49723 Sep 22 23:22:26.903 INFO [0] client ExtentClose { repair_id: ReconciliationId(653), extent_id: 17 }
49724 Sep 22 23:22:26.903 INFO [1] received reconcile message
49725 Sep 22 23:22:26.903 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(653), op: ExtentClose { repair_id: ReconciliationId(653), extent_id: 17 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49726 Sep 22 23:22:26.903 INFO [1] client ExtentClose { repair_id: ReconciliationId(653), extent_id: 17 }
49727 Sep 22 23:22:26.903 INFO [2] received reconcile message
49728 Sep 22 23:22:26.903 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(653), op: ExtentClose { repair_id: ReconciliationId(653), extent_id: 17 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49729 Sep 22 23:22:26.904 INFO [2] client ExtentClose { repair_id: ReconciliationId(653), extent_id: 17 }
49730 Sep 22 23:22:26.904 DEBG 653 Close extent 17
49731 Sep 22 23:22:26.904 DEBG 653 Close extent 17
49732 Sep 22 23:22:26.904 DEBG 653 Close extent 17
49733 Sep 22 23:22:26.905 DEBG [2] It's time to notify for 653
49734 Sep 22 23:22:26.905 INFO Completion from [2] id:653 status:true
49735 Sep 22 23:22:26.905 INFO [654/752] Repair commands completed
49736 Sep 22 23:22:26.905 INFO Pop front: ReconcileIO { id: ReconciliationId(654), op: ExtentRepair { repair_id: ReconciliationId(654), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49737 Sep 22 23:22:26.905 INFO Sent repair work, now wait for resp
49738 Sep 22 23:22:26.905 INFO [0] received reconcile message
49739 Sep 22 23:22:26.905 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(654), op: ExtentRepair { repair_id: ReconciliationId(654), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49740 Sep 22 23:22:26.905 INFO [0] client ExtentRepair { repair_id: ReconciliationId(654), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49741 Sep 22 23:22:26.905 INFO [0] Sending repair request ReconciliationId(654)
49742 Sep 22 23:22:26.905 INFO [1] received reconcile message
49743 Sep 22 23:22:26.905 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(654), op: ExtentRepair { repair_id: ReconciliationId(654), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49744 Sep 22 23:22:26.905 INFO [1] client ExtentRepair { repair_id: ReconciliationId(654), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49745 Sep 22 23:22:26.905 INFO [1] No action required ReconciliationId(654)
49746 Sep 22 23:22:26.905 INFO [2] received reconcile message
49747 Sep 22 23:22:26.905 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(654), op: ExtentRepair { repair_id: ReconciliationId(654), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49748 Sep 22 23:22:26.905 INFO [2] client ExtentRepair { repair_id: ReconciliationId(654), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49749 Sep 22 23:22:26.905 INFO [2] No action required ReconciliationId(654)
49750 Sep 22 23:22:26.905 DEBG 654 Repair extent 17 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
49751 Sep 22 23:22:26.905 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/011.copy"
49752 Sep 22 23:22:26.968 INFO accepted connection, remote_addr: 127.0.0.1:55601, local_addr: 127.0.0.1:52864, task: repair
49753 Sep 22 23:22:26.969 TRCE incoming request, uri: /extent/17/files, method: GET, req_id: 3dd08200-033d-4d1a-8a33-07b962a4d5f7, remote_addr: 127.0.0.1:55601, local_addr: 127.0.0.1:52864, task: repair
49754 Sep 22 23:22:26.969 INFO request completed, latency_us: 199, response_code: 200, uri: /extent/17/files, method: GET, req_id: 3dd08200-033d-4d1a-8a33-07b962a4d5f7, remote_addr: 127.0.0.1:55601, local_addr: 127.0.0.1:52864, task: repair
49755 Sep 22 23:22:26.969 INFO eid:17 Found repair files: ["011", "011.db"]
49756 Sep 22 23:22:26.969 TRCE incoming request, uri: /newextent/17/data, method: GET, req_id: ef664cc4-a80f-44e8-bd62-25d0276be673, remote_addr: 127.0.0.1:55601, local_addr: 127.0.0.1:52864, task: repair
49757 Sep 22 23:22:26.970 INFO request completed, latency_us: 311, response_code: 200, uri: /newextent/17/data, method: GET, req_id: ef664cc4-a80f-44e8-bd62-25d0276be673, remote_addr: 127.0.0.1:55601, local_addr: 127.0.0.1:52864, task: repair
49758 Sep 22 23:22:26.975 TRCE incoming request, uri: /newextent/17/db, method: GET, req_id: b3dd0017-0792-4a45-8259-0fb3a6bac94a, remote_addr: 127.0.0.1:55601, local_addr: 127.0.0.1:52864, task: repair
49759 Sep 22 23:22:26.975 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/17/db, method: GET, req_id: b3dd0017-0792-4a45-8259-0fb3a6bac94a, remote_addr: 127.0.0.1:55601, local_addr: 127.0.0.1:52864, task: repair
49760 Sep 22 23:22:26.976 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/011.copy" to "/tmp/downstairs-zrMnlo6G/00/000/011.replace"
49761 Sep 22 23:22:26.976 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49762 Sep 22 23:22:26.977 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/011.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
49763 Sep 22 23:22:26.977 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/011"
49764 Sep 22 23:22:26.977 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/011.db"
49765 Sep 22 23:22:26.977 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49766 Sep 22 23:22:26.977 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/011.replace" to "/tmp/downstairs-zrMnlo6G/00/000/011.completed"
49767 Sep 22 23:22:26.977 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49768 Sep 22 23:22:26.977 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49769 Sep 22 23:22:26.978 DEBG [0] It's time to notify for 654
49770 Sep 22 23:22:26.978 INFO Completion from [0] id:654 status:true
49771 Sep 22 23:22:26.978 INFO [655/752] Repair commands completed
49772 Sep 22 23:22:26.978 INFO Pop front: ReconcileIO { id: ReconciliationId(655), op: ExtentReopen { repair_id: ReconciliationId(655), extent_id: 17 }, state: ClientData([New, New, New]) }
49773 Sep 22 23:22:26.978 INFO Sent repair work, now wait for resp
49774 Sep 22 23:22:26.978 INFO [0] received reconcile message
49775 Sep 22 23:22:26.978 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(655), op: ExtentReopen { repair_id: ReconciliationId(655), extent_id: 17 }, state: ClientData([InProgress, New, New]) }, : downstairs
49776 Sep 22 23:22:26.978 INFO [0] client ExtentReopen { repair_id: ReconciliationId(655), extent_id: 17 }
49777 Sep 22 23:22:26.978 INFO [1] received reconcile message
49778 Sep 22 23:22:26.978 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(655), op: ExtentReopen { repair_id: ReconciliationId(655), extent_id: 17 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49779 Sep 22 23:22:26.978 INFO [1] client ExtentReopen { repair_id: ReconciliationId(655), extent_id: 17 }
49780 Sep 22 23:22:26.978 INFO [2] received reconcile message
49781 Sep 22 23:22:26.978 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(655), op: ExtentReopen { repair_id: ReconciliationId(655), extent_id: 17 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49782 Sep 22 23:22:26.978 INFO [2] client ExtentReopen { repair_id: ReconciliationId(655), extent_id: 17 }
49783 Sep 22 23:22:26.978 DEBG 655 Reopen extent 17
49784 Sep 22 23:22:26.979 DEBG 655 Reopen extent 17
49785 Sep 22 23:22:26.979 DEBG 655 Reopen extent 17
49786 Sep 22 23:22:26.980 DEBG [2] It's time to notify for 655
49787 Sep 22 23:22:26.980 INFO Completion from [2] id:655 status:true
49788 Sep 22 23:22:26.980 INFO [656/752] Repair commands completed
49789 Sep 22 23:22:26.980 INFO Pop front: ReconcileIO { id: ReconciliationId(656), op: ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49790 Sep 22 23:22:26.980 INFO Sent repair work, now wait for resp
49791 Sep 22 23:22:26.980 INFO [0] received reconcile message
49792 Sep 22 23:22:26.980 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(656), op: ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49793 Sep 22 23:22:26.980 INFO [0] client ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49794 Sep 22 23:22:26.980 INFO [1] received reconcile message
49795 Sep 22 23:22:26.980 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(656), op: ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49796 Sep 22 23:22:26.980 INFO [1] client ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49797 Sep 22 23:22:26.980 INFO [2] received reconcile message
49798 Sep 22 23:22:26.980 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(656), op: ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49799 Sep 22 23:22:26.980 INFO [2] client ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49800 Sep 22 23:22:26.980 DEBG 656 Flush extent 116 with f:2 g:2
49801 Sep 22 23:22:26.980 DEBG Flush just extent 116 with f:2 and g:2
49802 Sep 22 23:22:26.980 DEBG [1] It's time to notify for 656
49803 Sep 22 23:22:26.980 INFO Completion from [1] id:656 status:true
49804 Sep 22 23:22:26.980 INFO [657/752] Repair commands completed
49805 Sep 22 23:22:26.980 INFO Pop front: ReconcileIO { id: ReconciliationId(657), op: ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }, state: ClientData([New, New, New]) }
49806 Sep 22 23:22:26.980 INFO Sent repair work, now wait for resp
49807 Sep 22 23:22:26.980 INFO [0] received reconcile message
49808 Sep 22 23:22:26.981 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(657), op: ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }, state: ClientData([InProgress, New, New]) }, : downstairs
49809 Sep 22 23:22:26.981 INFO [0] client ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }
49810 Sep 22 23:22:26.981 INFO [1] received reconcile message
49811 Sep 22 23:22:26.981 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(657), op: ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49812 Sep 22 23:22:26.981 INFO [1] client ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }
49813 Sep 22 23:22:26.981 INFO [2] received reconcile message
49814 Sep 22 23:22:26.981 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(657), op: ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49815 Sep 22 23:22:26.981 INFO [2] client ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }
49816 Sep 22 23:22:26.981 DEBG 657 Close extent 116
49817 Sep 22 23:22:26.981 DEBG 657 Close extent 116
49818 Sep 22 23:22:26.981 DEBG 657 Close extent 116
49819 Sep 22 23:22:26.982 DEBG [2] It's time to notify for 657
49820 Sep 22 23:22:26.982 INFO Completion from [2] id:657 status:true
49821 Sep 22 23:22:26.982 INFO [658/752] Repair commands completed
49822 Sep 22 23:22:26.982 INFO Pop front: ReconcileIO { id: ReconciliationId(658), op: ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49823 Sep 22 23:22:26.982 INFO Sent repair work, now wait for resp
49824 Sep 22 23:22:26.982 INFO [0] received reconcile message
49825 Sep 22 23:22:26.982 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(658), op: ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49826 Sep 22 23:22:26.982 INFO [0] client ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49827 Sep 22 23:22:26.982 INFO [0] Sending repair request ReconciliationId(658)
49828 Sep 22 23:22:26.982 INFO [1] received reconcile message
49829 Sep 22 23:22:26.982 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(658), op: ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49830 Sep 22 23:22:26.982 INFO [1] client ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49831 Sep 22 23:22:26.982 INFO [1] No action required ReconciliationId(658)
49832 Sep 22 23:22:26.982 INFO [2] received reconcile message
49833 Sep 22 23:22:26.982 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(658), op: ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49834 Sep 22 23:22:26.982 INFO [2] client ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49835 Sep 22 23:22:26.982 INFO [2] No action required ReconciliationId(658)
49836 Sep 22 23:22:26.982 DEBG 658 Repair extent 116 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
49837 Sep 22 23:22:26.982 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/074.copy"
49838 Sep 22 23:22:27.048 INFO accepted connection, remote_addr: 127.0.0.1:47179, local_addr: 127.0.0.1:52864, task: repair
49839 Sep 22 23:22:27.048 TRCE incoming request, uri: /extent/116/files, method: GET, req_id: 9efeef61-d913-43a4-8513-2602c72583e8, remote_addr: 127.0.0.1:47179, local_addr: 127.0.0.1:52864, task: repair
49840 Sep 22 23:22:27.048 INFO request completed, latency_us: 258, response_code: 200, uri: /extent/116/files, method: GET, req_id: 9efeef61-d913-43a4-8513-2602c72583e8, remote_addr: 127.0.0.1:47179, local_addr: 127.0.0.1:52864, task: repair
49841 Sep 22 23:22:27.049 INFO eid:116 Found repair files: ["074", "074.db"]
49842 Sep 22 23:22:27.049 TRCE incoming request, uri: /newextent/116/data, method: GET, req_id: 98559df6-11c1-4596-8101-3775f8c6373d, remote_addr: 127.0.0.1:47179, local_addr: 127.0.0.1:52864, task: repair
49843 Sep 22 23:22:27.049 INFO request completed, latency_us: 361, response_code: 200, uri: /newextent/116/data, method: GET, req_id: 98559df6-11c1-4596-8101-3775f8c6373d, remote_addr: 127.0.0.1:47179, local_addr: 127.0.0.1:52864, task: repair
49844 Sep 22 23:22:27.054 TRCE incoming request, uri: /newextent/116/db, method: GET, req_id: abfa9dac-0d0d-4f06-8a43-a11bb6ca438f, remote_addr: 127.0.0.1:47179, local_addr: 127.0.0.1:52864, task: repair
49845 Sep 22 23:22:27.054 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/116/db, method: GET, req_id: abfa9dac-0d0d-4f06-8a43-a11bb6ca438f, remote_addr: 127.0.0.1:47179, local_addr: 127.0.0.1:52864, task: repair
49846 Sep 22 23:22:27.055 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/074.copy" to "/tmp/downstairs-zrMnlo6G/00/000/074.replace"
49847 Sep 22 23:22:27.056 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49848 Sep 22 23:22:27.056 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/074.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
49849 Sep 22 23:22:27.057 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/074"
49850 Sep 22 23:22:27.057 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/074.db"
49851 Sep 22 23:22:27.057 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49852 Sep 22 23:22:27.057 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/074.replace" to "/tmp/downstairs-zrMnlo6G/00/000/074.completed"
49853 Sep 22 23:22:27.057 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49854 Sep 22 23:22:27.057 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49855 Sep 22 23:22:27.057 DEBG [0] It's time to notify for 658
49856 Sep 22 23:22:27.057 INFO Completion from [0] id:658 status:true
49857 Sep 22 23:22:27.057 INFO [659/752] Repair commands completed
49858 Sep 22 23:22:27.057 INFO Pop front: ReconcileIO { id: ReconciliationId(659), op: ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }, state: ClientData([New, New, New]) }
49859 Sep 22 23:22:27.057 INFO Sent repair work, now wait for resp
49860 Sep 22 23:22:27.057 INFO [0] received reconcile message
49861 Sep 22 23:22:27.057 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(659), op: ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }, state: ClientData([InProgress, New, New]) }, : downstairs
49862 Sep 22 23:22:27.057 INFO [0] client ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }
49863 Sep 22 23:22:27.058 INFO [1] received reconcile message
49864 Sep 22 23:22:27.058 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(659), op: ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49865 Sep 22 23:22:27.058 INFO [1] client ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }
49866 Sep 22 23:22:27.058 INFO [2] received reconcile message
49867 Sep 22 23:22:27.058 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(659), op: ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49868 Sep 22 23:22:27.058 INFO [2] client ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }
49869 Sep 22 23:22:27.058 DEBG 659 Reopen extent 116
49870 Sep 22 23:22:27.058 DEBG 659 Reopen extent 116
49871 Sep 22 23:22:27.059 DEBG 659 Reopen extent 116
49872 Sep 22 23:22:27.060 DEBG [2] It's time to notify for 659
49873 Sep 22 23:22:27.060 INFO Completion from [2] id:659 status:true
49874 Sep 22 23:22:27.060 INFO [660/752] Repair commands completed
49875 Sep 22 23:22:27.060 INFO Pop front: ReconcileIO { id: ReconciliationId(660), op: ExtentFlush { repair_id: ReconciliationId(660), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49876 Sep 22 23:22:27.060 INFO Sent repair work, now wait for resp
49877 Sep 22 23:22:27.060 INFO [0] received reconcile message
49878 Sep 22 23:22:27.060 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(660), op: ExtentFlush { repair_id: ReconciliationId(660), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49879 Sep 22 23:22:27.060 INFO [0] client ExtentFlush { repair_id: ReconciliationId(660), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49880 Sep 22 23:22:27.060 INFO [1] received reconcile message
49881 Sep 22 23:22:27.060 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(660), op: ExtentFlush { repair_id: ReconciliationId(660), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49882 Sep 22 23:22:27.060 INFO [1] client ExtentFlush { repair_id: ReconciliationId(660), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49883 Sep 22 23:22:27.060 INFO [2] received reconcile message
49884 Sep 22 23:22:27.060 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(660), op: ExtentFlush { repair_id: ReconciliationId(660), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49885 Sep 22 23:22:27.060 INFO [2] client ExtentFlush { repair_id: ReconciliationId(660), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49886 Sep 22 23:22:27.060 DEBG 660 Flush extent 142 with f:2 g:2
49887 Sep 22 23:22:27.060 DEBG Flush just extent 142 with f:2 and g:2
49888 Sep 22 23:22:27.060 DEBG [1] It's time to notify for 660
49889 Sep 22 23:22:27.060 INFO Completion from [1] id:660 status:true
49890 Sep 22 23:22:27.060 INFO [661/752] Repair commands completed
49891 Sep 22 23:22:27.060 INFO Pop front: ReconcileIO { id: ReconciliationId(661), op: ExtentClose { repair_id: ReconciliationId(661), extent_id: 142 }, state: ClientData([New, New, New]) }
49892 Sep 22 23:22:27.060 INFO Sent repair work, now wait for resp
49893 Sep 22 23:22:27.060 INFO [0] received reconcile message
49894 Sep 22 23:22:27.060 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(661), op: ExtentClose { repair_id: ReconciliationId(661), extent_id: 142 }, state: ClientData([InProgress, New, New]) }, : downstairs
49895 Sep 22 23:22:27.060 INFO [0] client ExtentClose { repair_id: ReconciliationId(661), extent_id: 142 }
49896 Sep 22 23:22:27.060 INFO [1] received reconcile message
49897 Sep 22 23:22:27.061 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(661), op: ExtentClose { repair_id: ReconciliationId(661), extent_id: 142 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49898 Sep 22 23:22:27.061 INFO [1] client ExtentClose { repair_id: ReconciliationId(661), extent_id: 142 }
49899 Sep 22 23:22:27.061 INFO [2] received reconcile message
49900 Sep 22 23:22:27.061 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(661), op: ExtentClose { repair_id: ReconciliationId(661), extent_id: 142 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49901 Sep 22 23:22:27.061 INFO [2] client ExtentClose { repair_id: ReconciliationId(661), extent_id: 142 }
49902 Sep 22 23:22:27.061 DEBG 661 Close extent 142
49903 Sep 22 23:22:27.061 DEBG 661 Close extent 142
49904 Sep 22 23:22:27.061 DEBG 661 Close extent 142
49905 Sep 22 23:22:27.062 DEBG [2] It's time to notify for 661
49906 Sep 22 23:22:27.062 INFO Completion from [2] id:661 status:true
49907 Sep 22 23:22:27.062 INFO [662/752] Repair commands completed
49908 Sep 22 23:22:27.062 INFO Pop front: ReconcileIO { id: ReconciliationId(662), op: ExtentRepair { repair_id: ReconciliationId(662), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49909 Sep 22 23:22:27.062 INFO Sent repair work, now wait for resp
49910 Sep 22 23:22:27.062 INFO [0] received reconcile message
49911 Sep 22 23:22:27.062 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(662), op: ExtentRepair { repair_id: ReconciliationId(662), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49912 Sep 22 23:22:27.062 INFO [0] client ExtentRepair { repair_id: ReconciliationId(662), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49913 Sep 22 23:22:27.062 INFO [0] Sending repair request ReconciliationId(662)
49914 Sep 22 23:22:27.062 INFO [1] received reconcile message
49915 Sep 22 23:22:27.062 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(662), op: ExtentRepair { repair_id: ReconciliationId(662), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49916 Sep 22 23:22:27.062 INFO [1] client ExtentRepair { repair_id: ReconciliationId(662), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49917 Sep 22 23:22:27.062 INFO [1] No action required ReconciliationId(662)
49918 Sep 22 23:22:27.062 INFO [2] received reconcile message
49919 Sep 22 23:22:27.062 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(662), op: ExtentRepair { repair_id: ReconciliationId(662), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49920 Sep 22 23:22:27.062 INFO [2] client ExtentRepair { repair_id: ReconciliationId(662), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49921 Sep 22 23:22:27.062 INFO [2] No action required ReconciliationId(662)
49922 Sep 22 23:22:27.062 DEBG 662 Repair extent 142 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
49923 Sep 22 23:22:27.062 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/08E.copy"
49924 Sep 22 23:22:27.124 INFO accepted connection, remote_addr: 127.0.0.1:64380, local_addr: 127.0.0.1:52864, task: repair
49925 Sep 22 23:22:27.124 TRCE incoming request, uri: /extent/142/files, method: GET, req_id: 8e1954a0-5ac9-4b2a-b556-473cf008d5c2, remote_addr: 127.0.0.1:64380, local_addr: 127.0.0.1:52864, task: repair
49926 Sep 22 23:22:27.125 INFO request completed, latency_us: 209, response_code: 200, uri: /extent/142/files, method: GET, req_id: 8e1954a0-5ac9-4b2a-b556-473cf008d5c2, remote_addr: 127.0.0.1:64380, local_addr: 127.0.0.1:52864, task: repair
49927 Sep 22 23:22:27.125 INFO eid:142 Found repair files: ["08E", "08E.db"]
49928 Sep 22 23:22:27.125 TRCE incoming request, uri: /newextent/142/data, method: GET, req_id: 1f54fc79-308f-4a13-8be7-64c52c202998, remote_addr: 127.0.0.1:64380, local_addr: 127.0.0.1:52864, task: repair
49929 Sep 22 23:22:27.125 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/142/data, method: GET, req_id: 1f54fc79-308f-4a13-8be7-64c52c202998, remote_addr: 127.0.0.1:64380, local_addr: 127.0.0.1:52864, task: repair
49930 Sep 22 23:22:27.130 TRCE incoming request, uri: /newextent/142/db, method: GET, req_id: 1ad0bcd3-7925-47df-a263-97ef83220568, remote_addr: 127.0.0.1:64380, local_addr: 127.0.0.1:52864, task: repair
49931 Sep 22 23:22:27.130 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/142/db, method: GET, req_id: 1ad0bcd3-7925-47df-a263-97ef83220568, remote_addr: 127.0.0.1:64380, local_addr: 127.0.0.1:52864, task: repair
49932 Sep 22 23:22:27.131 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/08E.copy" to "/tmp/downstairs-zrMnlo6G/00/000/08E.replace"
49933 Sep 22 23:22:27.131 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49934 Sep 22 23:22:27.132 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/08E.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
49935 Sep 22 23:22:27.133 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/08E"
49936 Sep 22 23:22:27.133 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/08E.db"
49937 Sep 22 23:22:27.133 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49938 Sep 22 23:22:27.133 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/08E.replace" to "/tmp/downstairs-zrMnlo6G/00/000/08E.completed"
49939 Sep 22 23:22:27.133 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49940 Sep 22 23:22:27.133 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
49941 Sep 22 23:22:27.133 DEBG [0] It's time to notify for 662
49942 Sep 22 23:22:27.133 INFO Completion from [0] id:662 status:true
49943 Sep 22 23:22:27.133 INFO [663/752] Repair commands completed
49944 Sep 22 23:22:27.133 INFO Pop front: ReconcileIO { id: ReconciliationId(663), op: ExtentReopen { repair_id: ReconciliationId(663), extent_id: 142 }, state: ClientData([New, New, New]) }
49945 Sep 22 23:22:27.133 INFO Sent repair work, now wait for resp
49946 Sep 22 23:22:27.133 INFO [0] received reconcile message
49947 Sep 22 23:22:27.133 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(663), op: ExtentReopen { repair_id: ReconciliationId(663), extent_id: 142 }, state: ClientData([InProgress, New, New]) }, : downstairs
49948 Sep 22 23:22:27.133 INFO [0] client ExtentReopen { repair_id: ReconciliationId(663), extent_id: 142 }
49949 Sep 22 23:22:27.133 INFO [1] received reconcile message
49950 Sep 22 23:22:27.133 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(663), op: ExtentReopen { repair_id: ReconciliationId(663), extent_id: 142 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49951 Sep 22 23:22:27.133 INFO [1] client ExtentReopen { repair_id: ReconciliationId(663), extent_id: 142 }
49952 Sep 22 23:22:27.133 INFO [2] received reconcile message
49953 Sep 22 23:22:27.133 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(663), op: ExtentReopen { repair_id: ReconciliationId(663), extent_id: 142 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49954 Sep 22 23:22:27.133 INFO [2] client ExtentReopen { repair_id: ReconciliationId(663), extent_id: 142 }
49955 Sep 22 23:22:27.134 DEBG 663 Reopen extent 142
49956 Sep 22 23:22:27.134 DEBG 663 Reopen extent 142
49957 Sep 22 23:22:27.135 DEBG 663 Reopen extent 142
49958 Sep 22 23:22:27.135 DEBG [2] It's time to notify for 663
49959 Sep 22 23:22:27.135 INFO Completion from [2] id:663 status:true
49960 Sep 22 23:22:27.135 INFO [664/752] Repair commands completed
49961 Sep 22 23:22:27.135 INFO Pop front: ReconcileIO { id: ReconciliationId(664), op: ExtentFlush { repair_id: ReconciliationId(664), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49962 Sep 22 23:22:27.135 INFO Sent repair work, now wait for resp
49963 Sep 22 23:22:27.135 INFO [0] received reconcile message
49964 Sep 22 23:22:27.135 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(664), op: ExtentFlush { repair_id: ReconciliationId(664), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49965 Sep 22 23:22:27.135 INFO [0] client ExtentFlush { repair_id: ReconciliationId(664), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49966 Sep 22 23:22:27.135 INFO [1] received reconcile message
49967 Sep 22 23:22:27.135 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(664), op: ExtentFlush { repair_id: ReconciliationId(664), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49968 Sep 22 23:22:27.135 INFO [1] client ExtentFlush { repair_id: ReconciliationId(664), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49969 Sep 22 23:22:27.136 INFO [2] received reconcile message
49970 Sep 22 23:22:27.136 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(664), op: ExtentFlush { repair_id: ReconciliationId(664), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49971 Sep 22 23:22:27.136 INFO [2] client ExtentFlush { repair_id: ReconciliationId(664), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49972 Sep 22 23:22:27.136 DEBG 664 Flush extent 88 with f:2 g:2
49973 Sep 22 23:22:27.136 DEBG Flush just extent 88 with f:2 and g:2
49974 Sep 22 23:22:27.136 DEBG [1] It's time to notify for 664
49975 Sep 22 23:22:27.136 INFO Completion from [1] id:664 status:true
49976 Sep 22 23:22:27.136 INFO [665/752] Repair commands completed
49977 Sep 22 23:22:27.136 INFO Pop front: ReconcileIO { id: ReconciliationId(665), op: ExtentClose { repair_id: ReconciliationId(665), extent_id: 88 }, state: ClientData([New, New, New]) }
49978 Sep 22 23:22:27.136 INFO Sent repair work, now wait for resp
49979 Sep 22 23:22:27.136 INFO [0] received reconcile message
49980 Sep 22 23:22:27.136 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(665), op: ExtentClose { repair_id: ReconciliationId(665), extent_id: 88 }, state: ClientData([InProgress, New, New]) }, : downstairs
49981 Sep 22 23:22:27.136 INFO [0] client ExtentClose { repair_id: ReconciliationId(665), extent_id: 88 }
49982 Sep 22 23:22:27.136 INFO [1] received reconcile message
49983 Sep 22 23:22:27.136 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(665), op: ExtentClose { repair_id: ReconciliationId(665), extent_id: 88 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49984 Sep 22 23:22:27.136 INFO [1] client ExtentClose { repair_id: ReconciliationId(665), extent_id: 88 }
49985 Sep 22 23:22:27.136 INFO [2] received reconcile message
49986 Sep 22 23:22:27.136 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(665), op: ExtentClose { repair_id: ReconciliationId(665), extent_id: 88 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49987 Sep 22 23:22:27.136 INFO [2] client ExtentClose { repair_id: ReconciliationId(665), extent_id: 88 }
49988 Sep 22 23:22:27.136 DEBG 665 Close extent 88
49989 Sep 22 23:22:27.137 DEBG 665 Close extent 88
49990 Sep 22 23:22:27.137 DEBG 665 Close extent 88
49991 Sep 22 23:22:27.137 DEBG [2] It's time to notify for 665
49992 Sep 22 23:22:27.137 INFO Completion from [2] id:665 status:true
49993 Sep 22 23:22:27.137 INFO [666/752] Repair commands completed
49994 Sep 22 23:22:27.137 INFO Pop front: ReconcileIO { id: ReconciliationId(666), op: ExtentRepair { repair_id: ReconciliationId(666), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49995 Sep 22 23:22:27.137 INFO Sent repair work, now wait for resp
49996 Sep 22 23:22:27.137 INFO [0] received reconcile message
49997 Sep 22 23:22:27.137 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(666), op: ExtentRepair { repair_id: ReconciliationId(666), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49998 Sep 22 23:22:27.137 INFO [0] client ExtentRepair { repair_id: ReconciliationId(666), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
49999 Sep 22 23:22:27.137 INFO [0] Sending repair request ReconciliationId(666)
50000 Sep 22 23:22:27.138 INFO [1] received reconcile message
50001 Sep 22 23:22:27.138 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(666), op: ExtentRepair { repair_id: ReconciliationId(666), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50002 Sep 22 23:22:27.138 INFO [1] client ExtentRepair { repair_id: ReconciliationId(666), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50003 Sep 22 23:22:27.138 INFO [1] No action required ReconciliationId(666)
50004 Sep 22 23:22:27.138 INFO [2] received reconcile message
50005 Sep 22 23:22:27.138 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(666), op: ExtentRepair { repair_id: ReconciliationId(666), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50006 Sep 22 23:22:27.138 INFO [2] client ExtentRepair { repair_id: ReconciliationId(666), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50007 Sep 22 23:22:27.138 INFO [2] No action required ReconciliationId(666)
50008 Sep 22 23:22:27.138 DEBG 666 Repair extent 88 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
50009 Sep 22 23:22:27.138 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/058.copy"
50010 Sep 22 23:22:27.203 INFO accepted connection, remote_addr: 127.0.0.1:58448, local_addr: 127.0.0.1:52864, task: repair
50011 Sep 22 23:22:27.204 TRCE incoming request, uri: /extent/88/files, method: GET, req_id: c407c640-7177-486a-8c62-36c8a040d987, remote_addr: 127.0.0.1:58448, local_addr: 127.0.0.1:52864, task: repair
50012 Sep 22 23:22:27.204 INFO request completed, latency_us: 201, response_code: 200, uri: /extent/88/files, method: GET, req_id: c407c640-7177-486a-8c62-36c8a040d987, remote_addr: 127.0.0.1:58448, local_addr: 127.0.0.1:52864, task: repair
50013 Sep 22 23:22:27.204 INFO eid:88 Found repair files: ["058", "058.db"]
50014 Sep 22 23:22:27.204 TRCE incoming request, uri: /newextent/88/data, method: GET, req_id: c9c98208-e646-4784-9b69-9a0951e9ef6d, remote_addr: 127.0.0.1:58448, local_addr: 127.0.0.1:52864, task: repair
50015 Sep 22 23:22:27.205 INFO request completed, latency_us: 324, response_code: 200, uri: /newextent/88/data, method: GET, req_id: c9c98208-e646-4784-9b69-9a0951e9ef6d, remote_addr: 127.0.0.1:58448, local_addr: 127.0.0.1:52864, task: repair
50016 Sep 22 23:22:27.209 TRCE incoming request, uri: /newextent/88/db, method: GET, req_id: 83019044-db17-471f-95a7-3a9aca6fa93f, remote_addr: 127.0.0.1:58448, local_addr: 127.0.0.1:52864, task: repair
50017 Sep 22 23:22:27.210 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/88/db, method: GET, req_id: 83019044-db17-471f-95a7-3a9aca6fa93f, remote_addr: 127.0.0.1:58448, local_addr: 127.0.0.1:52864, task: repair
50018 Sep 22 23:22:27.211 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/058.copy" to "/tmp/downstairs-zrMnlo6G/00/000/058.replace"
50019 Sep 22 23:22:27.211 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50020 Sep 22 23:22:27.212 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/058.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
50021 Sep 22 23:22:27.212 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/058"
50022 Sep 22 23:22:27.212 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/058.db"
50023 Sep 22 23:22:27.212 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50024 Sep 22 23:22:27.212 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/058.replace" to "/tmp/downstairs-zrMnlo6G/00/000/058.completed"
50025 Sep 22 23:22:27.212 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50026 Sep 22 23:22:27.212 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50027 Sep 22 23:22:27.212 DEBG [0] It's time to notify for 666
50028 Sep 22 23:22:27.213 INFO Completion from [0] id:666 status:true
50029 Sep 22 23:22:27.213 INFO [667/752] Repair commands completed
50030 Sep 22 23:22:27.213 INFO Pop front: ReconcileIO { id: ReconciliationId(667), op: ExtentReopen { repair_id: ReconciliationId(667), extent_id: 88 }, state: ClientData([New, New, New]) }
50031 Sep 22 23:22:27.213 INFO Sent repair work, now wait for resp
50032 Sep 22 23:22:27.213 INFO [0] received reconcile message
50033 Sep 22 23:22:27.213 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(667), op: ExtentReopen { repair_id: ReconciliationId(667), extent_id: 88 }, state: ClientData([InProgress, New, New]) }, : downstairs
50034 Sep 22 23:22:27.213 INFO [0] client ExtentReopen { repair_id: ReconciliationId(667), extent_id: 88 }
50035 Sep 22 23:22:27.213 INFO [1] received reconcile message
50036 Sep 22 23:22:27.213 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(667), op: ExtentReopen { repair_id: ReconciliationId(667), extent_id: 88 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50037 Sep 22 23:22:27.213 INFO [1] client ExtentReopen { repair_id: ReconciliationId(667), extent_id: 88 }
50038 Sep 22 23:22:27.213 INFO [2] received reconcile message
50039 Sep 22 23:22:27.213 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(667), op: ExtentReopen { repair_id: ReconciliationId(667), extent_id: 88 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50040 Sep 22 23:22:27.213 INFO [2] client ExtentReopen { repair_id: ReconciliationId(667), extent_id: 88 }
50041 Sep 22 23:22:27.213 DEBG 667 Reopen extent 88
50042 Sep 22 23:22:27.214 DEBG 667 Reopen extent 88
50043 Sep 22 23:22:27.214 DEBG 667 Reopen extent 88
50044 Sep 22 23:22:27.215 DEBG [2] It's time to notify for 667
50045 Sep 22 23:22:27.215 INFO Completion from [2] id:667 status:true
50046 Sep 22 23:22:27.215 INFO [668/752] Repair commands completed
50047 Sep 22 23:22:27.215 INFO Pop front: ReconcileIO { id: ReconciliationId(668), op: ExtentFlush { repair_id: ReconciliationId(668), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50048 Sep 22 23:22:27.215 INFO Sent repair work, now wait for resp
50049 Sep 22 23:22:27.215 INFO [0] received reconcile message
50050 Sep 22 23:22:27.215 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(668), op: ExtentFlush { repair_id: ReconciliationId(668), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50051 Sep 22 23:22:27.215 INFO [0] client ExtentFlush { repair_id: ReconciliationId(668), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50052 Sep 22 23:22:27.215 INFO [1] received reconcile message
50053 Sep 22 23:22:27.215 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(668), op: ExtentFlush { repair_id: ReconciliationId(668), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50054 Sep 22 23:22:27.215 INFO [1] client ExtentFlush { repair_id: ReconciliationId(668), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50055 Sep 22 23:22:27.215 INFO [2] received reconcile message
50056 Sep 22 23:22:27.215 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(668), op: ExtentFlush { repair_id: ReconciliationId(668), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50057 Sep 22 23:22:27.215 INFO [2] client ExtentFlush { repair_id: ReconciliationId(668), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50058 Sep 22 23:22:27.215 DEBG 668 Flush extent 51 with f:2 g:2
50059 Sep 22 23:22:27.215 DEBG Flush just extent 51 with f:2 and g:2
50060 Sep 22 23:22:27.215 DEBG [1] It's time to notify for 668
50061 Sep 22 23:22:27.215 INFO Completion from [1] id:668 status:true
50062 Sep 22 23:22:27.215 INFO [669/752] Repair commands completed
50063 Sep 22 23:22:27.215 INFO Pop front: ReconcileIO { id: ReconciliationId(669), op: ExtentClose { repair_id: ReconciliationId(669), extent_id: 51 }, state: ClientData([New, New, New]) }
50064 Sep 22 23:22:27.215 INFO Sent repair work, now wait for resp
50065 Sep 22 23:22:27.215 INFO [0] received reconcile message
50066 Sep 22 23:22:27.215 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(669), op: ExtentClose { repair_id: ReconciliationId(669), extent_id: 51 }, state: ClientData([InProgress, New, New]) }, : downstairs
50067 Sep 22 23:22:27.215 INFO [0] client ExtentClose { repair_id: ReconciliationId(669), extent_id: 51 }
50068 Sep 22 23:22:27.215 INFO [1] received reconcile message
50069 Sep 22 23:22:27.215 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(669), op: ExtentClose { repair_id: ReconciliationId(669), extent_id: 51 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50070 Sep 22 23:22:27.215 INFO [1] client ExtentClose { repair_id: ReconciliationId(669), extent_id: 51 }
50071 Sep 22 23:22:27.216 INFO [2] received reconcile message
50072 Sep 22 23:22:27.216 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(669), op: ExtentClose { repair_id: ReconciliationId(669), extent_id: 51 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50073 Sep 22 23:22:27.216 INFO [2] client ExtentClose { repair_id: ReconciliationId(669), extent_id: 51 }
50074 Sep 22 23:22:27.216 DEBG 669 Close extent 51
50075 Sep 22 23:22:27.216 DEBG 669 Close extent 51
50076 Sep 22 23:22:27.216 DEBG 669 Close extent 51
50077 Sep 22 23:22:27.217 DEBG [2] It's time to notify for 669
50078 Sep 22 23:22:27.217 INFO Completion from [2] id:669 status:true
50079 Sep 22 23:22:27.217 INFO [670/752] Repair commands completed
50080 Sep 22 23:22:27.217 INFO Pop front: ReconcileIO { id: ReconciliationId(670), op: ExtentRepair { repair_id: ReconciliationId(670), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50081 Sep 22 23:22:27.217 INFO Sent repair work, now wait for resp
50082 Sep 22 23:22:27.217 INFO [0] received reconcile message
50083 Sep 22 23:22:27.217 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(670), op: ExtentRepair { repair_id: ReconciliationId(670), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50084 Sep 22 23:22:27.217 INFO [0] client ExtentRepair { repair_id: ReconciliationId(670), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50085 Sep 22 23:22:27.217 INFO [0] Sending repair request ReconciliationId(670)
50086 Sep 22 23:22:27.217 INFO [1] received reconcile message
50087 Sep 22 23:22:27.217 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(670), op: ExtentRepair { repair_id: ReconciliationId(670), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50088 Sep 22 23:22:27.217 INFO [1] client ExtentRepair { repair_id: ReconciliationId(670), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50089 Sep 22 23:22:27.217 INFO [1] No action required ReconciliationId(670)
50090 Sep 22 23:22:27.217 INFO [2] received reconcile message
50091 Sep 22 23:22:27.217 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(670), op: ExtentRepair { repair_id: ReconciliationId(670), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50092 Sep 22 23:22:27.217 INFO [2] client ExtentRepair { repair_id: ReconciliationId(670), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50093 Sep 22 23:22:27.217 INFO [2] No action required ReconciliationId(670)
50094 Sep 22 23:22:27.217 DEBG 670 Repair extent 51 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
50095 Sep 22 23:22:27.217 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/033.copy"
50096 Sep 22 23:22:27.283 INFO accepted connection, remote_addr: 127.0.0.1:59452, local_addr: 127.0.0.1:52864, task: repair
50097 Sep 22 23:22:27.283 TRCE incoming request, uri: /extent/51/files, method: GET, req_id: 3559e9f7-d34a-4c0d-af86-4d20e1cd1f89, remote_addr: 127.0.0.1:59452, local_addr: 127.0.0.1:52864, task: repair
50098 Sep 22 23:22:27.283 INFO request completed, latency_us: 201, response_code: 200, uri: /extent/51/files, method: GET, req_id: 3559e9f7-d34a-4c0d-af86-4d20e1cd1f89, remote_addr: 127.0.0.1:59452, local_addr: 127.0.0.1:52864, task: repair
50099 Sep 22 23:22:27.283 INFO eid:51 Found repair files: ["033", "033.db"]
50100 Sep 22 23:22:27.284 TRCE incoming request, uri: /newextent/51/data, method: GET, req_id: fe390388-1975-4ea7-99f4-bad030ca68d2, remote_addr: 127.0.0.1:59452, local_addr: 127.0.0.1:52864, task: repair
50101 Sep 22 23:22:27.284 INFO request completed, latency_us: 260, response_code: 200, uri: /newextent/51/data, method: GET, req_id: fe390388-1975-4ea7-99f4-bad030ca68d2, remote_addr: 127.0.0.1:59452, local_addr: 127.0.0.1:52864, task: repair
50102 Sep 22 23:22:27.289 TRCE incoming request, uri: /newextent/51/db, method: GET, req_id: 9f2bb5d5-f947-4c96-b29e-f0af846d5b60, remote_addr: 127.0.0.1:59452, local_addr: 127.0.0.1:52864, task: repair
50103 Sep 22 23:22:27.289 INFO request completed, latency_us: 302, response_code: 200, uri: /newextent/51/db, method: GET, req_id: 9f2bb5d5-f947-4c96-b29e-f0af846d5b60, remote_addr: 127.0.0.1:59452, local_addr: 127.0.0.1:52864, task: repair
50104 Sep 22 23:22:27.290 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/033.copy" to "/tmp/downstairs-zrMnlo6G/00/000/033.replace"
50105 Sep 22 23:22:27.290 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50106 Sep 22 23:22:27.291 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/033.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
50107 Sep 22 23:22:27.291 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/033"
50108 Sep 22 23:22:27.291 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/033.db"
50109 Sep 22 23:22:27.291 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50110 Sep 22 23:22:27.291 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/033.replace" to "/tmp/downstairs-zrMnlo6G/00/000/033.completed"
50111 Sep 22 23:22:27.291 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50112 Sep 22 23:22:27.291 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50113 Sep 22 23:22:27.291 DEBG [0] It's time to notify for 670
50114 Sep 22 23:22:27.292 INFO Completion from [0] id:670 status:true
50115 Sep 22 23:22:27.292 INFO [671/752] Repair commands completed
50116 Sep 22 23:22:27.292 INFO Pop front: ReconcileIO { id: ReconciliationId(671), op: ExtentReopen { repair_id: ReconciliationId(671), extent_id: 51 }, state: ClientData([New, New, New]) }
50117 Sep 22 23:22:27.292 INFO Sent repair work, now wait for resp
50118 Sep 22 23:22:27.292 INFO [0] received reconcile message
50119 Sep 22 23:22:27.292 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(671), op: ExtentReopen { repair_id: ReconciliationId(671), extent_id: 51 }, state: ClientData([InProgress, New, New]) }, : downstairs
50120 Sep 22 23:22:27.292 INFO [0] client ExtentReopen { repair_id: ReconciliationId(671), extent_id: 51 }
50121 Sep 22 23:22:27.292 INFO [1] received reconcile message
50122 Sep 22 23:22:27.292 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(671), op: ExtentReopen { repair_id: ReconciliationId(671), extent_id: 51 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50123 Sep 22 23:22:27.292 INFO [1] client ExtentReopen { repair_id: ReconciliationId(671), extent_id: 51 }
50124 Sep 22 23:22:27.292 INFO [2] received reconcile message
50125 Sep 22 23:22:27.292 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(671), op: ExtentReopen { repair_id: ReconciliationId(671), extent_id: 51 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50126 Sep 22 23:22:27.292 INFO [2] client ExtentReopen { repair_id: ReconciliationId(671), extent_id: 51 }
50127 Sep 22 23:22:27.292 DEBG 671 Reopen extent 51
50128 Sep 22 23:22:27.293 DEBG 671 Reopen extent 51
50129 Sep 22 23:22:27.293 DEBG 671 Reopen extent 51
50130 Sep 22 23:22:27.294 DEBG [2] It's time to notify for 671
50131 Sep 22 23:22:27.294 INFO Completion from [2] id:671 status:true
50132 Sep 22 23:22:27.294 INFO [672/752] Repair commands completed
50133 Sep 22 23:22:27.294 INFO Pop front: ReconcileIO { id: ReconciliationId(672), op: ExtentFlush { repair_id: ReconciliationId(672), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50134 Sep 22 23:22:27.294 INFO Sent repair work, now wait for resp
50135 Sep 22 23:22:27.294 INFO [0] received reconcile message
50136 Sep 22 23:22:27.294 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(672), op: ExtentFlush { repair_id: ReconciliationId(672), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50137 Sep 22 23:22:27.294 INFO [0] client ExtentFlush { repair_id: ReconciliationId(672), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50138 Sep 22 23:22:27.294 INFO [1] received reconcile message
50139 Sep 22 23:22:27.294 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(672), op: ExtentFlush { repair_id: ReconciliationId(672), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50140 Sep 22 23:22:27.294 INFO [1] client ExtentFlush { repair_id: ReconciliationId(672), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50141 Sep 22 23:22:27.294 INFO [2] received reconcile message
50142 Sep 22 23:22:27.294 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(672), op: ExtentFlush { repair_id: ReconciliationId(672), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50143 Sep 22 23:22:27.294 INFO [2] client ExtentFlush { repair_id: ReconciliationId(672), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50144 Sep 22 23:22:27.294 DEBG 672 Flush extent 33 with f:2 g:2
50145 Sep 22 23:22:27.294 DEBG Flush just extent 33 with f:2 and g:2
50146 Sep 22 23:22:27.294 DEBG [1] It's time to notify for 672
50147 Sep 22 23:22:27.294 INFO Completion from [1] id:672 status:true
50148 Sep 22 23:22:27.294 INFO [673/752] Repair commands completed
50149 Sep 22 23:22:27.294 INFO Pop front: ReconcileIO { id: ReconciliationId(673), op: ExtentClose { repair_id: ReconciliationId(673), extent_id: 33 }, state: ClientData([New, New, New]) }
50150 Sep 22 23:22:27.294 INFO Sent repair work, now wait for resp
50151 Sep 22 23:22:27.294 INFO [0] received reconcile message
50152 Sep 22 23:22:27.295 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(673), op: ExtentClose { repair_id: ReconciliationId(673), extent_id: 33 }, state: ClientData([InProgress, New, New]) }, : downstairs
50153 Sep 22 23:22:27.295 INFO [0] client ExtentClose { repair_id: ReconciliationId(673), extent_id: 33 }
50154 Sep 22 23:22:27.295 INFO [1] received reconcile message
50155 Sep 22 23:22:27.295 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(673), op: ExtentClose { repair_id: ReconciliationId(673), extent_id: 33 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50156 Sep 22 23:22:27.295 INFO [1] client ExtentClose { repair_id: ReconciliationId(673), extent_id: 33 }
50157 Sep 22 23:22:27.295 INFO [2] received reconcile message
50158 Sep 22 23:22:27.295 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(673), op: ExtentClose { repair_id: ReconciliationId(673), extent_id: 33 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50159 Sep 22 23:22:27.295 INFO [2] client ExtentClose { repair_id: ReconciliationId(673), extent_id: 33 }
50160 Sep 22 23:22:27.295 DEBG 673 Close extent 33
50161 Sep 22 23:22:27.295 DEBG 673 Close extent 33
50162 Sep 22 23:22:27.295 DEBG 673 Close extent 33
50163 Sep 22 23:22:27.296 DEBG [2] It's time to notify for 673
50164 Sep 22 23:22:27.296 INFO Completion from [2] id:673 status:true
50165 Sep 22 23:22:27.296 INFO [674/752] Repair commands completed
50166 Sep 22 23:22:27.296 INFO Pop front: ReconcileIO { id: ReconciliationId(674), op: ExtentRepair { repair_id: ReconciliationId(674), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50167 Sep 22 23:22:27.296 INFO Sent repair work, now wait for resp
50168 Sep 22 23:22:27.296 INFO [0] received reconcile message
50169 Sep 22 23:22:27.296 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(674), op: ExtentRepair { repair_id: ReconciliationId(674), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50170 Sep 22 23:22:27.296 INFO [0] client ExtentRepair { repair_id: ReconciliationId(674), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50171 Sep 22 23:22:27.296 INFO [0] Sending repair request ReconciliationId(674)
50172 Sep 22 23:22:27.296 INFO [1] received reconcile message
50173 Sep 22 23:22:27.296 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(674), op: ExtentRepair { repair_id: ReconciliationId(674), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50174 Sep 22 23:22:27.296 INFO [1] client ExtentRepair { repair_id: ReconciliationId(674), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50175 Sep 22 23:22:27.296 INFO [1] No action required ReconciliationId(674)
50176 Sep 22 23:22:27.296 INFO [2] received reconcile message
50177 Sep 22 23:22:27.296 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(674), op: ExtentRepair { repair_id: ReconciliationId(674), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50178 Sep 22 23:22:27.296 INFO [2] client ExtentRepair { repair_id: ReconciliationId(674), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50179 Sep 22 23:22:27.296 INFO [2] No action required ReconciliationId(674)
50180 Sep 22 23:22:27.296 DEBG 674 Repair extent 33 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
50181 Sep 22 23:22:27.296 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/021.copy"
50182 Sep 22 23:22:27.331 DEBG [rc] retire 1090 clears [JobId(1089), JobId(1090)], : downstairs
50183 Sep 22 23:22:27.331 DEBG IO Flush 1094 has deps [JobId(1093), JobId(1092)]
50184 Sep 22 23:22:27.331 DEBG Flush :1092 extent_limit None deps:[JobId(1091), JobId(1090)] res:true f:35 g:1
50185 Sep 22 23:22:27.331 WARN returning error on read!
50186 Sep 22 23:22:27.331 DEBG Read :1093 deps:[JobId(1092)] res:false
50187 Sep 22 23:22:27.331 INFO [lossy] skipping 1093
50188 Sep 22 23:22:27.337 DEBG Read :1093 deps:[JobId(1092)] res:true
50189 Sep 22 23:22:27.359 ERRO [0] job id 1093 saw error GenericError("test error")
50190 Sep 22 23:22:27.361 INFO accepted connection, remote_addr: 127.0.0.1:59101, local_addr: 127.0.0.1:52864, task: repair
50191 Sep 22 23:22:27.361 DEBG Flush :1094 extent_limit None deps:[JobId(1093), JobId(1092)] res:true f:36 g:1
50192 Sep 22 23:22:27.361 INFO [lossy] sleeping 1 second
50193 Sep 22 23:22:27.361 TRCE incoming request, uri: /extent/33/files, method: GET, req_id: 5d4e7ced-e28b-42e5-9cea-2717ee73cfc2, remote_addr: 127.0.0.1:59101, local_addr: 127.0.0.1:52864, task: repair
50194 Sep 22 23:22:27.362 INFO request completed, latency_us: 213, response_code: 200, uri: /extent/33/files, method: GET, req_id: 5d4e7ced-e28b-42e5-9cea-2717ee73cfc2, remote_addr: 127.0.0.1:59101, local_addr: 127.0.0.1:52864, task: repair
50195 Sep 22 23:22:27.362 INFO eid:33 Found repair files: ["021", "021.db"]
50196 Sep 22 23:22:27.362 TRCE incoming request, uri: /newextent/33/data, method: GET, req_id: 8ce52ddf-8d30-4839-9a02-90455e86700f, remote_addr: 127.0.0.1:59101, local_addr: 127.0.0.1:52864, task: repair
50197 Sep 22 23:22:27.362 INFO request completed, latency_us: 316, response_code: 200, uri: /newextent/33/data, method: GET, req_id: 8ce52ddf-8d30-4839-9a02-90455e86700f, remote_addr: 127.0.0.1:59101, local_addr: 127.0.0.1:52864, task: repair
50198 Sep 22 23:22:27.363 DEBG Flush :1092 extent_limit None deps:[JobId(1091), JobId(1090)] res:true f:35 g:1
50199 Sep 22 23:22:27.367 TRCE incoming request, uri: /newextent/33/db, method: GET, req_id: 501ab7ce-34e9-4ea3-b6cd-4ff218682319, remote_addr: 127.0.0.1:59101, local_addr: 127.0.0.1:52864, task: repair
50200 Sep 22 23:22:27.367 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/33/db, method: GET, req_id: 501ab7ce-34e9-4ea3-b6cd-4ff218682319, remote_addr: 127.0.0.1:59101, local_addr: 127.0.0.1:52864, task: repair
50201 Sep 22 23:22:27.369 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/021.copy" to "/tmp/downstairs-zrMnlo6G/00/000/021.replace"
50202 Sep 22 23:22:27.369 DEBG Read :1093 deps:[JobId(1092)] res:true
50203 Sep 22 23:22:27.369 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50204 Sep 22 23:22:27.369 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/021.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
50205 Sep 22 23:22:27.370 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/021"
50206 Sep 22 23:22:27.370 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/021.db"
50207 Sep 22 23:22:27.370 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50208 Sep 22 23:22:27.370 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/021.replace" to "/tmp/downstairs-zrMnlo6G/00/000/021.completed"
50209 Sep 22 23:22:27.370 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50210 Sep 22 23:22:27.370 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50211 Sep 22 23:22:27.370 DEBG [0] It's time to notify for 674
50212 Sep 22 23:22:27.370 INFO Completion from [0] id:674 status:true
50213 Sep 22 23:22:27.370 INFO [675/752] Repair commands completed
50214 Sep 22 23:22:27.370 INFO Pop front: ReconcileIO { id: ReconciliationId(675), op: ExtentReopen { repair_id: ReconciliationId(675), extent_id: 33 }, state: ClientData([New, New, New]) }
50215 Sep 22 23:22:27.370 INFO Sent repair work, now wait for resp
50216 Sep 22 23:22:27.370 INFO [0] received reconcile message
50217 Sep 22 23:22:27.370 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(675), op: ExtentReopen { repair_id: ReconciliationId(675), extent_id: 33 }, state: ClientData([InProgress, New, New]) }, : downstairs
50218 Sep 22 23:22:27.370 INFO [0] client ExtentReopen { repair_id: ReconciliationId(675), extent_id: 33 }
50219 Sep 22 23:22:27.370 INFO [1] received reconcile message
50220 Sep 22 23:22:27.371 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(675), op: ExtentReopen { repair_id: ReconciliationId(675), extent_id: 33 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50221 Sep 22 23:22:27.371 INFO [1] client ExtentReopen { repair_id: ReconciliationId(675), extent_id: 33 }
50222 Sep 22 23:22:27.371 INFO [2] received reconcile message
50223 Sep 22 23:22:27.371 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(675), op: ExtentReopen { repair_id: ReconciliationId(675), extent_id: 33 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50224 Sep 22 23:22:27.371 INFO [2] client ExtentReopen { repair_id: ReconciliationId(675), extent_id: 33 }
50225 Sep 22 23:22:27.371 DEBG 675 Reopen extent 33
50226 Sep 22 23:22:27.371 DEBG 675 Reopen extent 33
50227 Sep 22 23:22:27.372 DEBG 675 Reopen extent 33
50228 Sep 22 23:22:27.372 DEBG [2] It's time to notify for 675
50229 Sep 22 23:22:27.373 INFO Completion from [2] id:675 status:true
50230 Sep 22 23:22:27.373 INFO [676/752] Repair commands completed
50231 Sep 22 23:22:27.373 INFO Pop front: ReconcileIO { id: ReconciliationId(676), op: ExtentFlush { repair_id: ReconciliationId(676), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50232 Sep 22 23:22:27.373 INFO Sent repair work, now wait for resp
50233 Sep 22 23:22:27.373 INFO [0] received reconcile message
50234 Sep 22 23:22:27.373 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(676), op: ExtentFlush { repair_id: ReconciliationId(676), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50235 Sep 22 23:22:27.373 INFO [0] client ExtentFlush { repair_id: ReconciliationId(676), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50236 Sep 22 23:22:27.373 INFO [1] received reconcile message
50237 Sep 22 23:22:27.373 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(676), op: ExtentFlush { repair_id: ReconciliationId(676), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50238 Sep 22 23:22:27.373 INFO [1] client ExtentFlush { repair_id: ReconciliationId(676), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50239 Sep 22 23:22:27.373 INFO [2] received reconcile message
50240 Sep 22 23:22:27.373 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(676), op: ExtentFlush { repair_id: ReconciliationId(676), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50241 Sep 22 23:22:27.373 INFO [2] client ExtentFlush { repair_id: ReconciliationId(676), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50242 Sep 22 23:22:27.373 DEBG 676 Flush extent 54 with f:2 g:2
50243 Sep 22 23:22:27.373 DEBG Flush just extent 54 with f:2 and g:2
50244 Sep 22 23:22:27.373 DEBG [1] It's time to notify for 676
50245 Sep 22 23:22:27.373 INFO Completion from [1] id:676 status:true
50246 Sep 22 23:22:27.373 INFO [677/752] Repair commands completed
50247 Sep 22 23:22:27.373 INFO Pop front: ReconcileIO { id: ReconciliationId(677), op: ExtentClose { repair_id: ReconciliationId(677), extent_id: 54 }, state: ClientData([New, New, New]) }
50248 Sep 22 23:22:27.373 INFO Sent repair work, now wait for resp
50249 Sep 22 23:22:27.373 INFO [0] received reconcile message
50250 Sep 22 23:22:27.373 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(677), op: ExtentClose { repair_id: ReconciliationId(677), extent_id: 54 }, state: ClientData([InProgress, New, New]) }, : downstairs
50251 Sep 22 23:22:27.373 INFO [0] client ExtentClose { repair_id: ReconciliationId(677), extent_id: 54 }
50252 Sep 22 23:22:27.373 INFO [1] received reconcile message
50253 Sep 22 23:22:27.373 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(677), op: ExtentClose { repair_id: ReconciliationId(677), extent_id: 54 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50254 Sep 22 23:22:27.373 INFO [1] client ExtentClose { repair_id: ReconciliationId(677), extent_id: 54 }
50255 Sep 22 23:22:27.373 INFO [2] received reconcile message
50256 Sep 22 23:22:27.373 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(677), op: ExtentClose { repair_id: ReconciliationId(677), extent_id: 54 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50257 Sep 22 23:22:27.373 INFO [2] client ExtentClose { repair_id: ReconciliationId(677), extent_id: 54 }
50258 Sep 22 23:22:27.374 DEBG 677 Close extent 54
50259 Sep 22 23:22:27.374 DEBG 677 Close extent 54
50260 Sep 22 23:22:27.374 DEBG 677 Close extent 54
50261 Sep 22 23:22:27.374 DEBG [2] It's time to notify for 677
50262 Sep 22 23:22:27.375 INFO Completion from [2] id:677 status:true
50263 Sep 22 23:22:27.375 INFO [678/752] Repair commands completed
50264 Sep 22 23:22:27.375 INFO Pop front: ReconcileIO { id: ReconciliationId(678), op: ExtentRepair { repair_id: ReconciliationId(678), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50265 Sep 22 23:22:27.375 INFO Sent repair work, now wait for resp
50266 Sep 22 23:22:27.375 INFO [0] received reconcile message
50267 Sep 22 23:22:27.375 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(678), op: ExtentRepair { repair_id: ReconciliationId(678), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50268 Sep 22 23:22:27.375 INFO [0] client ExtentRepair { repair_id: ReconciliationId(678), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50269 Sep 22 23:22:27.375 INFO [0] Sending repair request ReconciliationId(678)
50270 Sep 22 23:22:27.375 INFO [1] received reconcile message
50271 Sep 22 23:22:27.375 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(678), op: ExtentRepair { repair_id: ReconciliationId(678), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50272 Sep 22 23:22:27.375 INFO [1] client ExtentRepair { repair_id: ReconciliationId(678), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50273 Sep 22 23:22:27.375 INFO [1] No action required ReconciliationId(678)
50274 Sep 22 23:22:27.375 INFO [2] received reconcile message
50275 Sep 22 23:22:27.375 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(678), op: ExtentRepair { repair_id: ReconciliationId(678), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50276 Sep 22 23:22:27.375 INFO [2] client ExtentRepair { repair_id: ReconciliationId(678), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50277 Sep 22 23:22:27.375 INFO [2] No action required ReconciliationId(678)
50278 Sep 22 23:22:27.375 DEBG 678 Repair extent 54 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
50279 Sep 22 23:22:27.375 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/036.copy"
50280 Sep 22 23:22:27.438 INFO accepted connection, remote_addr: 127.0.0.1:59164, local_addr: 127.0.0.1:52864, task: repair
50281 Sep 22 23:22:27.438 TRCE incoming request, uri: /extent/54/files, method: GET, req_id: 044f231c-1ff5-452e-bc67-2009ae647edb, remote_addr: 127.0.0.1:59164, local_addr: 127.0.0.1:52864, task: repair
50282 Sep 22 23:22:27.438 INFO request completed, latency_us: 199, response_code: 200, uri: /extent/54/files, method: GET, req_id: 044f231c-1ff5-452e-bc67-2009ae647edb, remote_addr: 127.0.0.1:59164, local_addr: 127.0.0.1:52864, task: repair
50283 Sep 22 23:22:27.438 INFO eid:54 Found repair files: ["036", "036.db"]
50284 Sep 22 23:22:27.439 TRCE incoming request, uri: /newextent/54/data, method: GET, req_id: 62311f09-c851-48fd-9d5e-02a587aec6c1, remote_addr: 127.0.0.1:59164, local_addr: 127.0.0.1:52864, task: repair
50285 Sep 22 23:22:27.439 INFO request completed, latency_us: 260, response_code: 200, uri: /newextent/54/data, method: GET, req_id: 62311f09-c851-48fd-9d5e-02a587aec6c1, remote_addr: 127.0.0.1:59164, local_addr: 127.0.0.1:52864, task: repair
50286 Sep 22 23:22:27.444 TRCE incoming request, uri: /newextent/54/db, method: GET, req_id: 9e0d4a93-7ac7-4def-8ca7-385b316decf2, remote_addr: 127.0.0.1:59164, local_addr: 127.0.0.1:52864, task: repair
50287 Sep 22 23:22:27.444 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/54/db, method: GET, req_id: 9e0d4a93-7ac7-4def-8ca7-385b316decf2, remote_addr: 127.0.0.1:59164, local_addr: 127.0.0.1:52864, task: repair
50288 Sep 22 23:22:27.445 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/036.copy" to "/tmp/downstairs-zrMnlo6G/00/000/036.replace"
50289 Sep 22 23:22:27.445 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50290 Sep 22 23:22:27.446 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/036.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
50291 Sep 22 23:22:27.446 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/036"
50292 Sep 22 23:22:27.446 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/036.db"
50293 Sep 22 23:22:27.446 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50294 Sep 22 23:22:27.446 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/036.replace" to "/tmp/downstairs-zrMnlo6G/00/000/036.completed"
50295 Sep 22 23:22:27.446 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50296 Sep 22 23:22:27.446 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50297 Sep 22 23:22:27.447 DEBG [0] It's time to notify for 678
50298 Sep 22 23:22:27.447 INFO Completion from [0] id:678 status:true
50299 Sep 22 23:22:27.447 INFO [679/752] Repair commands completed
50300 Sep 22 23:22:27.447 INFO Pop front: ReconcileIO { id: ReconciliationId(679), op: ExtentReopen { repair_id: ReconciliationId(679), extent_id: 54 }, state: ClientData([New, New, New]) }
50301 Sep 22 23:22:27.447 INFO Sent repair work, now wait for resp
50302 Sep 22 23:22:27.447 INFO [0] received reconcile message
50303 Sep 22 23:22:27.447 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(679), op: ExtentReopen { repair_id: ReconciliationId(679), extent_id: 54 }, state: ClientData([InProgress, New, New]) }, : downstairs
50304 Sep 22 23:22:27.447 INFO [0] client ExtentReopen { repair_id: ReconciliationId(679), extent_id: 54 }
50305 Sep 22 23:22:27.447 INFO [1] received reconcile message
50306 Sep 22 23:22:27.447 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(679), op: ExtentReopen { repair_id: ReconciliationId(679), extent_id: 54 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50307 Sep 22 23:22:27.447 INFO [1] client ExtentReopen { repair_id: ReconciliationId(679), extent_id: 54 }
50308 Sep 22 23:22:27.447 INFO [2] received reconcile message
50309 Sep 22 23:22:27.447 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(679), op: ExtentReopen { repair_id: ReconciliationId(679), extent_id: 54 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50310 Sep 22 23:22:27.447 INFO [2] client ExtentReopen { repair_id: ReconciliationId(679), extent_id: 54 }
50311 Sep 22 23:22:27.447 DEBG 679 Reopen extent 54
50312 Sep 22 23:22:27.448 DEBG 679 Reopen extent 54
50313 Sep 22 23:22:27.448 DEBG 679 Reopen extent 54
50314 Sep 22 23:22:27.449 DEBG [2] It's time to notify for 679
50315 Sep 22 23:22:27.449 INFO Completion from [2] id:679 status:true
50316 Sep 22 23:22:27.449 INFO [680/752] Repair commands completed
50317 Sep 22 23:22:27.449 INFO Pop front: ReconcileIO { id: ReconciliationId(680), op: ExtentFlush { repair_id: ReconciliationId(680), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50318 Sep 22 23:22:27.449 INFO Sent repair work, now wait for resp
50319 Sep 22 23:22:27.449 INFO [0] received reconcile message
50320 Sep 22 23:22:27.449 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(680), op: ExtentFlush { repair_id: ReconciliationId(680), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50321 Sep 22 23:22:27.449 INFO [0] client ExtentFlush { repair_id: ReconciliationId(680), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50322 Sep 22 23:22:27.449 INFO [1] received reconcile message
50323 Sep 22 23:22:27.449 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(680), op: ExtentFlush { repair_id: ReconciliationId(680), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50324 Sep 22 23:22:27.449 INFO [1] client ExtentFlush { repair_id: ReconciliationId(680), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50325 Sep 22 23:22:27.449 INFO [2] received reconcile message
50326 Sep 22 23:22:27.449 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(680), op: ExtentFlush { repair_id: ReconciliationId(680), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50327 Sep 22 23:22:27.449 INFO [2] client ExtentFlush { repair_id: ReconciliationId(680), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50328 Sep 22 23:22:27.449 DEBG 680 Flush extent 112 with f:2 g:2
50329 Sep 22 23:22:27.449 DEBG Flush just extent 112 with f:2 and g:2
50330 Sep 22 23:22:27.449 DEBG [1] It's time to notify for 680
50331 Sep 22 23:22:27.449 INFO Completion from [1] id:680 status:true
50332 Sep 22 23:22:27.449 INFO [681/752] Repair commands completed
50333 Sep 22 23:22:27.449 INFO Pop front: ReconcileIO { id: ReconciliationId(681), op: ExtentClose { repair_id: ReconciliationId(681), extent_id: 112 }, state: ClientData([New, New, New]) }
50334 Sep 22 23:22:27.449 INFO Sent repair work, now wait for resp
50335 Sep 22 23:22:27.450 INFO [0] received reconcile message
50336 Sep 22 23:22:27.450 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(681), op: ExtentClose { repair_id: ReconciliationId(681), extent_id: 112 }, state: ClientData([InProgress, New, New]) }, : downstairs
50337 Sep 22 23:22:27.450 INFO [0] client ExtentClose { repair_id: ReconciliationId(681), extent_id: 112 }
50338 Sep 22 23:22:27.450 INFO [1] received reconcile message
50339 Sep 22 23:22:27.450 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(681), op: ExtentClose { repair_id: ReconciliationId(681), extent_id: 112 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50340 Sep 22 23:22:27.450 INFO [1] client ExtentClose { repair_id: ReconciliationId(681), extent_id: 112 }
50341 Sep 22 23:22:27.450 INFO [2] received reconcile message
50342 Sep 22 23:22:27.450 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(681), op: ExtentClose { repair_id: ReconciliationId(681), extent_id: 112 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50343 Sep 22 23:22:27.450 INFO [2] client ExtentClose { repair_id: ReconciliationId(681), extent_id: 112 }
50344 Sep 22 23:22:27.450 DEBG 681 Close extent 112
50345 Sep 22 23:22:27.450 DEBG 681 Close extent 112
50346 Sep 22 23:22:27.450 DEBG 681 Close extent 112
50347 Sep 22 23:22:27.451 DEBG [2] It's time to notify for 681
50348 Sep 22 23:22:27.451 INFO Completion from [2] id:681 status:true
50349 Sep 22 23:22:27.451 INFO [682/752] Repair commands completed
50350 Sep 22 23:22:27.451 INFO Pop front: ReconcileIO { id: ReconciliationId(682), op: ExtentRepair { repair_id: ReconciliationId(682), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50351 Sep 22 23:22:27.451 INFO Sent repair work, now wait for resp
50352 Sep 22 23:22:27.451 INFO [0] received reconcile message
50353 Sep 22 23:22:27.451 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(682), op: ExtentRepair { repair_id: ReconciliationId(682), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50354 Sep 22 23:22:27.451 INFO [0] client ExtentRepair { repair_id: ReconciliationId(682), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50355 Sep 22 23:22:27.451 INFO [0] Sending repair request ReconciliationId(682)
50356 Sep 22 23:22:27.451 INFO [1] received reconcile message
50357 Sep 22 23:22:27.451 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(682), op: ExtentRepair { repair_id: ReconciliationId(682), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50358 Sep 22 23:22:27.451 INFO [1] client ExtentRepair { repair_id: ReconciliationId(682), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50359 Sep 22 23:22:27.451 INFO [1] No action required ReconciliationId(682)
50360 Sep 22 23:22:27.451 INFO [2] received reconcile message
50361 Sep 22 23:22:27.451 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(682), op: ExtentRepair { repair_id: ReconciliationId(682), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50362 Sep 22 23:22:27.451 INFO [2] client ExtentRepair { repair_id: ReconciliationId(682), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50363 Sep 22 23:22:27.451 INFO [2] No action required ReconciliationId(682)
50364 Sep 22 23:22:27.451 DEBG 682 Repair extent 112 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
50365 Sep 22 23:22:27.451 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/070.copy"
50366 Sep 22 23:22:27.512 INFO accepted connection, remote_addr: 127.0.0.1:55071, local_addr: 127.0.0.1:52864, task: repair
50367 Sep 22 23:22:27.513 TRCE incoming request, uri: /extent/112/files, method: GET, req_id: bea0ec14-1bd7-4c55-97c3-1fc28b67600d, remote_addr: 127.0.0.1:55071, local_addr: 127.0.0.1:52864, task: repair
50368 Sep 22 23:22:27.513 INFO request completed, latency_us: 207, response_code: 200, uri: /extent/112/files, method: GET, req_id: bea0ec14-1bd7-4c55-97c3-1fc28b67600d, remote_addr: 127.0.0.1:55071, local_addr: 127.0.0.1:52864, task: repair
50369 Sep 22 23:22:27.513 INFO eid:112 Found repair files: ["070", "070.db"]
50370 Sep 22 23:22:27.513 TRCE incoming request, uri: /newextent/112/data, method: GET, req_id: 8959e550-1dd6-4477-a20c-d59d90a0ca15, remote_addr: 127.0.0.1:55071, local_addr: 127.0.0.1:52864, task: repair
50371 Sep 22 23:22:27.514 INFO request completed, latency_us: 311, response_code: 200, uri: /newextent/112/data, method: GET, req_id: 8959e550-1dd6-4477-a20c-d59d90a0ca15, remote_addr: 127.0.0.1:55071, local_addr: 127.0.0.1:52864, task: repair
50372 Sep 22 23:22:27.519 TRCE incoming request, uri: /newextent/112/db, method: GET, req_id: fceb4789-baef-44ce-bc33-30f7b568d86c, remote_addr: 127.0.0.1:55071, local_addr: 127.0.0.1:52864, task: repair
50373 Sep 22 23:22:27.519 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/112/db, method: GET, req_id: fceb4789-baef-44ce-bc33-30f7b568d86c, remote_addr: 127.0.0.1:55071, local_addr: 127.0.0.1:52864, task: repair
50374 Sep 22 23:22:27.520 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/070.copy" to "/tmp/downstairs-zrMnlo6G/00/000/070.replace"
50375 Sep 22 23:22:27.520 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50376 Sep 22 23:22:27.521 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/070.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
50377 Sep 22 23:22:27.521 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/070"
50378 Sep 22 23:22:27.521 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/070.db"
50379 Sep 22 23:22:27.521 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50380 Sep 22 23:22:27.521 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/070.replace" to "/tmp/downstairs-zrMnlo6G/00/000/070.completed"
50381 Sep 22 23:22:27.521 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50382 Sep 22 23:22:27.521 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50383 Sep 22 23:22:27.522 DEBG [0] It's time to notify for 682
50384 Sep 22 23:22:27.522 INFO Completion from [0] id:682 status:true
50385 Sep 22 23:22:27.522 INFO [683/752] Repair commands completed
50386 Sep 22 23:22:27.522 INFO Pop front: ReconcileIO { id: ReconciliationId(683), op: ExtentReopen { repair_id: ReconciliationId(683), extent_id: 112 }, state: ClientData([New, New, New]) }
50387 Sep 22 23:22:27.522 INFO Sent repair work, now wait for resp
50388 Sep 22 23:22:27.522 INFO [0] received reconcile message
50389 Sep 22 23:22:27.522 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(683), op: ExtentReopen { repair_id: ReconciliationId(683), extent_id: 112 }, state: ClientData([InProgress, New, New]) }, : downstairs
50390 Sep 22 23:22:27.522 INFO [0] client ExtentReopen { repair_id: ReconciliationId(683), extent_id: 112 }
50391 Sep 22 23:22:27.522 INFO [1] received reconcile message
50392 Sep 22 23:22:27.522 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(683), op: ExtentReopen { repair_id: ReconciliationId(683), extent_id: 112 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50393 Sep 22 23:22:27.522 INFO [1] client ExtentReopen { repair_id: ReconciliationId(683), extent_id: 112 }
50394 Sep 22 23:22:27.522 INFO [2] received reconcile message
50395 Sep 22 23:22:27.522 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(683), op: ExtentReopen { repair_id: ReconciliationId(683), extent_id: 112 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50396 Sep 22 23:22:27.522 INFO [2] client ExtentReopen { repair_id: ReconciliationId(683), extent_id: 112 }
50397 Sep 22 23:22:27.522 DEBG 683 Reopen extent 112
50398 Sep 22 23:22:27.523 DEBG 683 Reopen extent 112
50399 Sep 22 23:22:27.523 DEBG 683 Reopen extent 112
50400 Sep 22 23:22:27.524 DEBG [2] It's time to notify for 683
50401 Sep 22 23:22:27.524 INFO Completion from [2] id:683 status:true
50402 Sep 22 23:22:27.524 INFO [684/752] Repair commands completed
50403 Sep 22 23:22:27.524 INFO Pop front: ReconcileIO { id: ReconciliationId(684), op: ExtentFlush { repair_id: ReconciliationId(684), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50404 Sep 22 23:22:27.524 INFO Sent repair work, now wait for resp
50405 Sep 22 23:22:27.524 INFO [0] received reconcile message
50406 Sep 22 23:22:27.524 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(684), op: ExtentFlush { repair_id: ReconciliationId(684), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50407 Sep 22 23:22:27.524 INFO [0] client ExtentFlush { repair_id: ReconciliationId(684), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50408 Sep 22 23:22:27.524 INFO [1] received reconcile message
50409 Sep 22 23:22:27.524 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(684), op: ExtentFlush { repair_id: ReconciliationId(684), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50410 Sep 22 23:22:27.524 INFO [1] client ExtentFlush { repair_id: ReconciliationId(684), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50411 Sep 22 23:22:27.524 INFO [2] received reconcile message
50412 Sep 22 23:22:27.524 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(684), op: ExtentFlush { repair_id: ReconciliationId(684), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50413 Sep 22 23:22:27.524 INFO [2] client ExtentFlush { repair_id: ReconciliationId(684), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50414 Sep 22 23:22:27.524 DEBG 684 Flush extent 143 with f:2 g:2
50415 Sep 22 23:22:27.524 DEBG Flush just extent 143 with f:2 and g:2
50416 Sep 22 23:22:27.525 DEBG [1] It's time to notify for 684
50417 Sep 22 23:22:27.525 INFO Completion from [1] id:684 status:true
50418 Sep 22 23:22:27.525 INFO [685/752] Repair commands completed
50419 Sep 22 23:22:27.525 INFO Pop front: ReconcileIO { id: ReconciliationId(685), op: ExtentClose { repair_id: ReconciliationId(685), extent_id: 143 }, state: ClientData([New, New, New]) }
50420 Sep 22 23:22:27.525 INFO Sent repair work, now wait for resp
50421 Sep 22 23:22:27.525 INFO [0] received reconcile message
50422 Sep 22 23:22:27.525 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(685), op: ExtentClose { repair_id: ReconciliationId(685), extent_id: 143 }, state: ClientData([InProgress, New, New]) }, : downstairs
50423 Sep 22 23:22:27.525 INFO [0] client ExtentClose { repair_id: ReconciliationId(685), extent_id: 143 }
50424 Sep 22 23:22:27.525 INFO [1] received reconcile message
50425 Sep 22 23:22:27.525 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(685), op: ExtentClose { repair_id: ReconciliationId(685), extent_id: 143 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50426 Sep 22 23:22:27.525 INFO [1] client ExtentClose { repair_id: ReconciliationId(685), extent_id: 143 }
50427 Sep 22 23:22:27.525 INFO [2] received reconcile message
50428 Sep 22 23:22:27.525 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(685), op: ExtentClose { repair_id: ReconciliationId(685), extent_id: 143 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50429 Sep 22 23:22:27.525 INFO [2] client ExtentClose { repair_id: ReconciliationId(685), extent_id: 143 }
50430 Sep 22 23:22:27.525 DEBG 685 Close extent 143
50431 Sep 22 23:22:27.525 DEBG 685 Close extent 143
50432 Sep 22 23:22:27.526 DEBG 685 Close extent 143
50433 Sep 22 23:22:27.526 DEBG [2] It's time to notify for 685
50434 Sep 22 23:22:27.526 INFO Completion from [2] id:685 status:true
50435 Sep 22 23:22:27.526 INFO [686/752] Repair commands completed
50436 Sep 22 23:22:27.526 INFO Pop front: ReconcileIO { id: ReconciliationId(686), op: ExtentRepair { repair_id: ReconciliationId(686), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50437 Sep 22 23:22:27.526 INFO Sent repair work, now wait for resp
50438 Sep 22 23:22:27.526 INFO [0] received reconcile message
50439 Sep 22 23:22:27.526 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(686), op: ExtentRepair { repair_id: ReconciliationId(686), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50440 Sep 22 23:22:27.526 INFO [0] client ExtentRepair { repair_id: ReconciliationId(686), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50441 Sep 22 23:22:27.526 INFO [0] Sending repair request ReconciliationId(686)
50442 Sep 22 23:22:27.526 INFO [1] received reconcile message
50443 Sep 22 23:22:27.526 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(686), op: ExtentRepair { repair_id: ReconciliationId(686), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50444 Sep 22 23:22:27.526 INFO [1] client ExtentRepair { repair_id: ReconciliationId(686), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50445 Sep 22 23:22:27.526 INFO [1] No action required ReconciliationId(686)
50446 Sep 22 23:22:27.526 INFO [2] received reconcile message
50447 Sep 22 23:22:27.526 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(686), op: ExtentRepair { repair_id: ReconciliationId(686), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50448 Sep 22 23:22:27.526 INFO [2] client ExtentRepair { repair_id: ReconciliationId(686), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50449 Sep 22 23:22:27.526 INFO [2] No action required ReconciliationId(686)
50450 Sep 22 23:22:27.526 DEBG 686 Repair extent 143 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
50451 Sep 22 23:22:27.527 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/08F.copy"
50452 Sep 22 23:22:27.589 INFO accepted connection, remote_addr: 127.0.0.1:33091, local_addr: 127.0.0.1:52864, task: repair
50453 Sep 22 23:22:27.589 TRCE incoming request, uri: /extent/143/files, method: GET, req_id: ac8d7628-8f70-414a-a2a9-cc729a1ede28, remote_addr: 127.0.0.1:33091, local_addr: 127.0.0.1:52864, task: repair
50454 Sep 22 23:22:27.589 INFO request completed, latency_us: 237, response_code: 200, uri: /extent/143/files, method: GET, req_id: ac8d7628-8f70-414a-a2a9-cc729a1ede28, remote_addr: 127.0.0.1:33091, local_addr: 127.0.0.1:52864, task: repair
50455 Sep 22 23:22:27.589 INFO eid:143 Found repair files: ["08F", "08F.db"]
50456 Sep 22 23:22:27.590 TRCE incoming request, uri: /newextent/143/data, method: GET, req_id: 8642b366-f0ad-4753-a50f-f2062d5f09fe, remote_addr: 127.0.0.1:33091, local_addr: 127.0.0.1:52864, task: repair
50457 Sep 22 23:22:27.590 INFO request completed, latency_us: 269, response_code: 200, uri: /newextent/143/data, method: GET, req_id: 8642b366-f0ad-4753-a50f-f2062d5f09fe, remote_addr: 127.0.0.1:33091, local_addr: 127.0.0.1:52864, task: repair
50458 Sep 22 23:22:27.595 TRCE incoming request, uri: /newextent/143/db, method: GET, req_id: 46b91a23-6f00-4a13-9872-56a46bd3113b, remote_addr: 127.0.0.1:33091, local_addr: 127.0.0.1:52864, task: repair
50459 Sep 22 23:22:27.595 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/143/db, method: GET, req_id: 46b91a23-6f00-4a13-9872-56a46bd3113b, remote_addr: 127.0.0.1:33091, local_addr: 127.0.0.1:52864, task: repair
50460 Sep 22 23:22:27.596 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/08F.copy" to "/tmp/downstairs-zrMnlo6G/00/000/08F.replace"
50461 Sep 22 23:22:27.596 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50462 Sep 22 23:22:27.597 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/08F.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
50463 Sep 22 23:22:27.597 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/08F"
50464 Sep 22 23:22:27.598 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/08F.db"
50465 Sep 22 23:22:27.598 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50466 Sep 22 23:22:27.598 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/08F.replace" to "/tmp/downstairs-zrMnlo6G/00/000/08F.completed"
50467 Sep 22 23:22:27.598 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50468 Sep 22 23:22:27.598 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50469 Sep 22 23:22:27.598 DEBG [0] It's time to notify for 686
50470 Sep 22 23:22:27.598 INFO Completion from [0] id:686 status:true
50471 Sep 22 23:22:27.598 INFO [687/752] Repair commands completed
50472 Sep 22 23:22:27.598 INFO Pop front: ReconcileIO { id: ReconciliationId(687), op: ExtentReopen { repair_id: ReconciliationId(687), extent_id: 143 }, state: ClientData([New, New, New]) }
50473 Sep 22 23:22:27.598 INFO Sent repair work, now wait for resp
50474 Sep 22 23:22:27.598 INFO [0] received reconcile message
50475 Sep 22 23:22:27.598 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(687), op: ExtentReopen { repair_id: ReconciliationId(687), extent_id: 143 }, state: ClientData([InProgress, New, New]) }, : downstairs
50476 Sep 22 23:22:27.598 INFO [0] client ExtentReopen { repair_id: ReconciliationId(687), extent_id: 143 }
50477 Sep 22 23:22:27.598 INFO [1] received reconcile message
50478 Sep 22 23:22:27.598 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(687), op: ExtentReopen { repair_id: ReconciliationId(687), extent_id: 143 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50479 Sep 22 23:22:27.598 INFO [1] client ExtentReopen { repair_id: ReconciliationId(687), extent_id: 143 }
50480 Sep 22 23:22:27.598 INFO [2] received reconcile message
50481 Sep 22 23:22:27.598 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(687), op: ExtentReopen { repair_id: ReconciliationId(687), extent_id: 143 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50482 Sep 22 23:22:27.598 INFO [2] client ExtentReopen { repair_id: ReconciliationId(687), extent_id: 143 }
50483 Sep 22 23:22:27.599 DEBG 687 Reopen extent 143
50484 Sep 22 23:22:27.599 DEBG 687 Reopen extent 143
50485 Sep 22 23:22:27.600 DEBG 687 Reopen extent 143
50486 Sep 22 23:22:27.600 DEBG [2] It's time to notify for 687
50487 Sep 22 23:22:27.600 INFO Completion from [2] id:687 status:true
50488 Sep 22 23:22:27.600 INFO [688/752] Repair commands completed
50489 Sep 22 23:22:27.600 INFO Pop front: ReconcileIO { id: ReconciliationId(688), op: ExtentFlush { repair_id: ReconciliationId(688), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50490 Sep 22 23:22:27.600 INFO Sent repair work, now wait for resp
50491 Sep 22 23:22:27.601 INFO [0] received reconcile message
50492 Sep 22 23:22:27.601 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(688), op: ExtentFlush { repair_id: ReconciliationId(688), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50493 Sep 22 23:22:27.601 INFO [0] client ExtentFlush { repair_id: ReconciliationId(688), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50494 Sep 22 23:22:27.601 INFO [1] received reconcile message
50495 Sep 22 23:22:27.601 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(688), op: ExtentFlush { repair_id: ReconciliationId(688), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50496 Sep 22 23:22:27.601 INFO [1] client ExtentFlush { repair_id: ReconciliationId(688), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50497 Sep 22 23:22:27.601 INFO [2] received reconcile message
50498 Sep 22 23:22:27.601 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(688), op: ExtentFlush { repair_id: ReconciliationId(688), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50499 Sep 22 23:22:27.601 INFO [2] client ExtentFlush { repair_id: ReconciliationId(688), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50500 Sep 22 23:22:27.601 DEBG 688 Flush extent 151 with f:2 g:2
50501 Sep 22 23:22:27.601 DEBG Flush just extent 151 with f:2 and g:2
50502 Sep 22 23:22:27.601 DEBG [1] It's time to notify for 688
50503 Sep 22 23:22:27.601 INFO Completion from [1] id:688 status:true
50504 Sep 22 23:22:27.601 INFO [689/752] Repair commands completed
50505 Sep 22 23:22:27.601 INFO Pop front: ReconcileIO { id: ReconciliationId(689), op: ExtentClose { repair_id: ReconciliationId(689), extent_id: 151 }, state: ClientData([New, New, New]) }
50506 Sep 22 23:22:27.601 INFO Sent repair work, now wait for resp
50507 Sep 22 23:22:27.601 INFO [0] received reconcile message
50508 Sep 22 23:22:27.601 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(689), op: ExtentClose { repair_id: ReconciliationId(689), extent_id: 151 }, state: ClientData([InProgress, New, New]) }, : downstairs
50509 Sep 22 23:22:27.601 INFO [0] client ExtentClose { repair_id: ReconciliationId(689), extent_id: 151 }
50510 Sep 22 23:22:27.601 INFO [1] received reconcile message
50511 Sep 22 23:22:27.601 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(689), op: ExtentClose { repair_id: ReconciliationId(689), extent_id: 151 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50512 Sep 22 23:22:27.601 INFO [1] client ExtentClose { repair_id: ReconciliationId(689), extent_id: 151 }
50513 Sep 22 23:22:27.601 INFO [2] received reconcile message
50514 Sep 22 23:22:27.601 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(689), op: ExtentClose { repair_id: ReconciliationId(689), extent_id: 151 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50515 Sep 22 23:22:27.601 INFO [2] client ExtentClose { repair_id: ReconciliationId(689), extent_id: 151 }
50516 Sep 22 23:22:27.601 DEBG 689 Close extent 151
50517 Sep 22 23:22:27.602 DEBG 689 Close extent 151
50518 Sep 22 23:22:27.602 DEBG 689 Close extent 151
50519 Sep 22 23:22:27.602 DEBG [2] It's time to notify for 689
50520 Sep 22 23:22:27.602 INFO Completion from [2] id:689 status:true
50521 Sep 22 23:22:27.602 INFO [690/752] Repair commands completed
50522 Sep 22 23:22:27.603 INFO Pop front: ReconcileIO { id: ReconciliationId(690), op: ExtentRepair { repair_id: ReconciliationId(690), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50523 Sep 22 23:22:27.603 INFO Sent repair work, now wait for resp
50524 Sep 22 23:22:27.603 INFO [0] received reconcile message
50525 Sep 22 23:22:27.603 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(690), op: ExtentRepair { repair_id: ReconciliationId(690), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50526 Sep 22 23:22:27.603 INFO [0] client ExtentRepair { repair_id: ReconciliationId(690), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50527 Sep 22 23:22:27.603 INFO [0] Sending repair request ReconciliationId(690)
50528 Sep 22 23:22:27.603 INFO [1] received reconcile message
50529 Sep 22 23:22:27.603 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(690), op: ExtentRepair { repair_id: ReconciliationId(690), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50530 Sep 22 23:22:27.603 INFO [1] client ExtentRepair { repair_id: ReconciliationId(690), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50531 Sep 22 23:22:27.603 INFO [1] No action required ReconciliationId(690)
50532 Sep 22 23:22:27.603 INFO [2] received reconcile message
50533 Sep 22 23:22:27.603 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(690), op: ExtentRepair { repair_id: ReconciliationId(690), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50534 Sep 22 23:22:27.603 INFO [2] client ExtentRepair { repair_id: ReconciliationId(690), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50535 Sep 22 23:22:27.603 INFO [2] No action required ReconciliationId(690)
50536 Sep 22 23:22:27.603 DEBG 690 Repair extent 151 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
50537 Sep 22 23:22:27.603 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/097.copy"
50538 Sep 22 23:22:27.668 INFO accepted connection, remote_addr: 127.0.0.1:63298, local_addr: 127.0.0.1:52864, task: repair
50539 Sep 22 23:22:27.668 TRCE incoming request, uri: /extent/151/files, method: GET, req_id: 66ce9c53-4f78-44a7-9f7a-f7f1d96ab2be, remote_addr: 127.0.0.1:63298, local_addr: 127.0.0.1:52864, task: repair
50540 Sep 22 23:22:27.668 INFO request completed, latency_us: 237, response_code: 200, uri: /extent/151/files, method: GET, req_id: 66ce9c53-4f78-44a7-9f7a-f7f1d96ab2be, remote_addr: 127.0.0.1:63298, local_addr: 127.0.0.1:52864, task: repair
50541 Sep 22 23:22:27.669 INFO eid:151 Found repair files: ["097", "097.db"]
50542 Sep 22 23:22:27.669 TRCE incoming request, uri: /newextent/151/data, method: GET, req_id: f3e71f3c-240d-4aef-8b48-1ad5aa208f0b, remote_addr: 127.0.0.1:63298, local_addr: 127.0.0.1:52864, task: repair
50543 Sep 22 23:22:27.669 INFO request completed, latency_us: 332, response_code: 200, uri: /newextent/151/data, method: GET, req_id: f3e71f3c-240d-4aef-8b48-1ad5aa208f0b, remote_addr: 127.0.0.1:63298, local_addr: 127.0.0.1:52864, task: repair
50544 Sep 22 23:22:27.674 TRCE incoming request, uri: /newextent/151/db, method: GET, req_id: 75655525-ad71-4198-8dac-cdd17c73537a, remote_addr: 127.0.0.1:63298, local_addr: 127.0.0.1:52864, task: repair
50545 Sep 22 23:22:27.675 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/151/db, method: GET, req_id: 75655525-ad71-4198-8dac-cdd17c73537a, remote_addr: 127.0.0.1:63298, local_addr: 127.0.0.1:52864, task: repair
50546 Sep 22 23:22:27.676 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/097.copy" to "/tmp/downstairs-zrMnlo6G/00/000/097.replace"
50547 Sep 22 23:22:27.676 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50548 Sep 22 23:22:27.677 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/097.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
50549 Sep 22 23:22:27.677 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/097"
50550 Sep 22 23:22:27.677 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/097.db"
50551 Sep 22 23:22:27.677 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50552 Sep 22 23:22:27.677 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/097.replace" to "/tmp/downstairs-zrMnlo6G/00/000/097.completed"
50553 Sep 22 23:22:27.677 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50554 Sep 22 23:22:27.677 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50555 Sep 22 23:22:27.677 DEBG [0] It's time to notify for 690
50556 Sep 22 23:22:27.678 INFO Completion from [0] id:690 status:true
50557 Sep 22 23:22:27.678 INFO [691/752] Repair commands completed
50558 Sep 22 23:22:27.678 INFO Pop front: ReconcileIO { id: ReconciliationId(691), op: ExtentReopen { repair_id: ReconciliationId(691), extent_id: 151 }, state: ClientData([New, New, New]) }
50559 Sep 22 23:22:27.678 INFO Sent repair work, now wait for resp
50560 Sep 22 23:22:27.678 INFO [0] received reconcile message
50561 Sep 22 23:22:27.678 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(691), op: ExtentReopen { repair_id: ReconciliationId(691), extent_id: 151 }, state: ClientData([InProgress, New, New]) }, : downstairs
50562 Sep 22 23:22:27.678 INFO [0] client ExtentReopen { repair_id: ReconciliationId(691), extent_id: 151 }
50563 Sep 22 23:22:27.678 INFO [1] received reconcile message
50564 Sep 22 23:22:27.678 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(691), op: ExtentReopen { repair_id: ReconciliationId(691), extent_id: 151 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50565 Sep 22 23:22:27.678 INFO [1] client ExtentReopen { repair_id: ReconciliationId(691), extent_id: 151 }
50566 Sep 22 23:22:27.678 INFO [2] received reconcile message
50567 Sep 22 23:22:27.678 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(691), op: ExtentReopen { repair_id: ReconciliationId(691), extent_id: 151 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50568 Sep 22 23:22:27.678 INFO [2] client ExtentReopen { repair_id: ReconciliationId(691), extent_id: 151 }
50569 Sep 22 23:22:27.678 DEBG 691 Reopen extent 151
50570 Sep 22 23:22:27.679 DEBG 691 Reopen extent 151
50571 Sep 22 23:22:27.679 DEBG 691 Reopen extent 151
50572 Sep 22 23:22:27.680 DEBG [2] It's time to notify for 691
50573 Sep 22 23:22:27.680 INFO Completion from [2] id:691 status:true
50574 Sep 22 23:22:27.680 INFO [692/752] Repair commands completed
50575 Sep 22 23:22:27.680 INFO Pop front: ReconcileIO { id: ReconciliationId(692), op: ExtentFlush { repair_id: ReconciliationId(692), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50576 Sep 22 23:22:27.680 INFO Sent repair work, now wait for resp
50577 Sep 22 23:22:27.680 INFO [0] received reconcile message
50578 Sep 22 23:22:27.680 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(692), op: ExtentFlush { repair_id: ReconciliationId(692), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50579 Sep 22 23:22:27.680 INFO [0] client ExtentFlush { repair_id: ReconciliationId(692), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50580 Sep 22 23:22:27.680 INFO [1] received reconcile message
50581 Sep 22 23:22:27.680 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(692), op: ExtentFlush { repair_id: ReconciliationId(692), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50582 Sep 22 23:22:27.680 INFO [1] client ExtentFlush { repair_id: ReconciliationId(692), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50583 Sep 22 23:22:27.680 INFO [2] received reconcile message
50584 Sep 22 23:22:27.680 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(692), op: ExtentFlush { repair_id: ReconciliationId(692), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50585 Sep 22 23:22:27.680 INFO [2] client ExtentFlush { repair_id: ReconciliationId(692), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50586 Sep 22 23:22:27.680 DEBG 692 Flush extent 101 with f:2 g:2
50587 Sep 22 23:22:27.680 DEBG Flush just extent 101 with f:2 and g:2
50588 Sep 22 23:22:27.680 DEBG [1] It's time to notify for 692
50589 Sep 22 23:22:27.681 INFO Completion from [1] id:692 status:true
50590 Sep 22 23:22:27.681 INFO [693/752] Repair commands completed
50591 Sep 22 23:22:27.681 INFO Pop front: ReconcileIO { id: ReconciliationId(693), op: ExtentClose { repair_id: ReconciliationId(693), extent_id: 101 }, state: ClientData([New, New, New]) }
50592 Sep 22 23:22:27.681 INFO Sent repair work, now wait for resp
50593 Sep 22 23:22:27.681 INFO [0] received reconcile message
50594 Sep 22 23:22:27.681 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(693), op: ExtentClose { repair_id: ReconciliationId(693), extent_id: 101 }, state: ClientData([InProgress, New, New]) }, : downstairs
50595 Sep 22 23:22:27.681 INFO [0] client ExtentClose { repair_id: ReconciliationId(693), extent_id: 101 }
50596 Sep 22 23:22:27.681 INFO [1] received reconcile message
50597 Sep 22 23:22:27.681 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(693), op: ExtentClose { repair_id: ReconciliationId(693), extent_id: 101 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50598 Sep 22 23:22:27.681 INFO [1] client ExtentClose { repair_id: ReconciliationId(693), extent_id: 101 }
50599 Sep 22 23:22:27.681 INFO [2] received reconcile message
50600 Sep 22 23:22:27.681 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(693), op: ExtentClose { repair_id: ReconciliationId(693), extent_id: 101 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50601 Sep 22 23:22:27.681 INFO [2] client ExtentClose { repair_id: ReconciliationId(693), extent_id: 101 }
50602 Sep 22 23:22:27.681 DEBG 693 Close extent 101
50603 Sep 22 23:22:27.681 DEBG 693 Close extent 101
50604 Sep 22 23:22:27.682 DEBG 693 Close extent 101
50605 Sep 22 23:22:27.682 DEBG [2] It's time to notify for 693
50606 Sep 22 23:22:27.682 INFO Completion from [2] id:693 status:true
50607 Sep 22 23:22:27.682 INFO [694/752] Repair commands completed
50608 Sep 22 23:22:27.682 INFO Pop front: ReconcileIO { id: ReconciliationId(694), op: ExtentRepair { repair_id: ReconciliationId(694), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50609 Sep 22 23:22:27.682 INFO Sent repair work, now wait for resp
50610 Sep 22 23:22:27.682 INFO [0] received reconcile message
50611 Sep 22 23:22:27.682 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(694), op: ExtentRepair { repair_id: ReconciliationId(694), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50612 Sep 22 23:22:27.682 INFO [0] client ExtentRepair { repair_id: ReconciliationId(694), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50613 Sep 22 23:22:27.682 INFO [0] Sending repair request ReconciliationId(694)
50614 Sep 22 23:22:27.682 INFO [1] received reconcile message
50615 Sep 22 23:22:27.682 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(694), op: ExtentRepair { repair_id: ReconciliationId(694), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50616 Sep 22 23:22:27.682 INFO [1] client ExtentRepair { repair_id: ReconciliationId(694), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50617 Sep 22 23:22:27.682 INFO [1] No action required ReconciliationId(694)
50618 Sep 22 23:22:27.682 INFO [2] received reconcile message
50619 Sep 22 23:22:27.682 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(694), op: ExtentRepair { repair_id: ReconciliationId(694), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50620 Sep 22 23:22:27.682 INFO [2] client ExtentRepair { repair_id: ReconciliationId(694), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50621 Sep 22 23:22:27.682 INFO [2] No action required ReconciliationId(694)
50622 Sep 22 23:22:27.682 DEBG 694 Repair extent 101 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
50623 Sep 22 23:22:27.683 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/065.copy"
50624 Sep 22 23:22:27.747 INFO accepted connection, remote_addr: 127.0.0.1:37015, local_addr: 127.0.0.1:52864, task: repair
50625 Sep 22 23:22:27.747 TRCE incoming request, uri: /extent/101/files, method: GET, req_id: 1b764a04-d2b2-4cdd-98f0-f5e24d54eb18, remote_addr: 127.0.0.1:37015, local_addr: 127.0.0.1:52864, task: repair
50626 Sep 22 23:22:27.748 INFO request completed, latency_us: 230, response_code: 200, uri: /extent/101/files, method: GET, req_id: 1b764a04-d2b2-4cdd-98f0-f5e24d54eb18, remote_addr: 127.0.0.1:37015, local_addr: 127.0.0.1:52864, task: repair
50627 Sep 22 23:22:27.748 INFO eid:101 Found repair files: ["065", "065.db"]
50628 Sep 22 23:22:27.748 TRCE incoming request, uri: /newextent/101/data, method: GET, req_id: 2705c68f-7de3-40d3-b657-db8f42ee9cb6, remote_addr: 127.0.0.1:37015, local_addr: 127.0.0.1:52864, task: repair
50629 Sep 22 23:22:27.749 INFO request completed, latency_us: 329, response_code: 200, uri: /newextent/101/data, method: GET, req_id: 2705c68f-7de3-40d3-b657-db8f42ee9cb6, remote_addr: 127.0.0.1:37015, local_addr: 127.0.0.1:52864, task: repair
50630 Sep 22 23:22:27.753 TRCE incoming request, uri: /newextent/101/db, method: GET, req_id: 7240f1e3-98f3-4626-95d6-68d21b65d73c, remote_addr: 127.0.0.1:37015, local_addr: 127.0.0.1:52864, task: repair
50631 Sep 22 23:22:27.754 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/101/db, method: GET, req_id: 7240f1e3-98f3-4626-95d6-68d21b65d73c, remote_addr: 127.0.0.1:37015, local_addr: 127.0.0.1:52864, task: repair
50632 Sep 22 23:22:27.755 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/065.copy" to "/tmp/downstairs-zrMnlo6G/00/000/065.replace"
50633 Sep 22 23:22:27.755 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50634 Sep 22 23:22:27.756 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/065.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
50635 Sep 22 23:22:27.756 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/065"
50636 Sep 22 23:22:27.756 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/065.db"
50637 Sep 22 23:22:27.756 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50638 Sep 22 23:22:27.756 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/065.replace" to "/tmp/downstairs-zrMnlo6G/00/000/065.completed"
50639 Sep 22 23:22:27.756 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50640 Sep 22 23:22:27.756 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50641 Sep 22 23:22:27.756 DEBG [0] It's time to notify for 694
50642 Sep 22 23:22:27.757 INFO Completion from [0] id:694 status:true
50643 Sep 22 23:22:27.757 INFO [695/752] Repair commands completed
50644 Sep 22 23:22:27.757 INFO Pop front: ReconcileIO { id: ReconciliationId(695), op: ExtentReopen { repair_id: ReconciliationId(695), extent_id: 101 }, state: ClientData([New, New, New]) }
50645 Sep 22 23:22:27.757 INFO Sent repair work, now wait for resp
50646 Sep 22 23:22:27.757 INFO [0] received reconcile message
50647 Sep 22 23:22:27.757 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(695), op: ExtentReopen { repair_id: ReconciliationId(695), extent_id: 101 }, state: ClientData([InProgress, New, New]) }, : downstairs
50648 Sep 22 23:22:27.757 INFO [0] client ExtentReopen { repair_id: ReconciliationId(695), extent_id: 101 }
50649 Sep 22 23:22:27.757 INFO [1] received reconcile message
50650 Sep 22 23:22:27.757 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(695), op: ExtentReopen { repair_id: ReconciliationId(695), extent_id: 101 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50651 Sep 22 23:22:27.757 INFO [1] client ExtentReopen { repair_id: ReconciliationId(695), extent_id: 101 }
50652 Sep 22 23:22:27.757 INFO [2] received reconcile message
50653 Sep 22 23:22:27.757 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(695), op: ExtentReopen { repair_id: ReconciliationId(695), extent_id: 101 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50654 Sep 22 23:22:27.757 INFO [2] client ExtentReopen { repair_id: ReconciliationId(695), extent_id: 101 }
50655 Sep 22 23:22:27.757 DEBG 695 Reopen extent 101
50656 Sep 22 23:22:27.758 DEBG 695 Reopen extent 101
50657 Sep 22 23:22:27.758 DEBG 695 Reopen extent 101
50658 Sep 22 23:22:27.759 DEBG [2] It's time to notify for 695
50659 Sep 22 23:22:27.759 INFO Completion from [2] id:695 status:true
50660 Sep 22 23:22:27.759 INFO [696/752] Repair commands completed
50661 Sep 22 23:22:27.759 INFO Pop front: ReconcileIO { id: ReconciliationId(696), op: ExtentFlush { repair_id: ReconciliationId(696), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50662 Sep 22 23:22:27.759 INFO Sent repair work, now wait for resp
50663 Sep 22 23:22:27.759 INFO [0] received reconcile message
50664 Sep 22 23:22:27.759 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(696), op: ExtentFlush { repair_id: ReconciliationId(696), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50665 Sep 22 23:22:27.759 INFO [0] client ExtentFlush { repair_id: ReconciliationId(696), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50666 Sep 22 23:22:27.759 INFO [1] received reconcile message
50667 Sep 22 23:22:27.759 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(696), op: ExtentFlush { repair_id: ReconciliationId(696), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50668 Sep 22 23:22:27.759 INFO [1] client ExtentFlush { repair_id: ReconciliationId(696), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50669 Sep 22 23:22:27.759 INFO [2] received reconcile message
50670 Sep 22 23:22:27.759 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(696), op: ExtentFlush { repair_id: ReconciliationId(696), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50671 Sep 22 23:22:27.759 INFO [2] client ExtentFlush { repair_id: ReconciliationId(696), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50672 Sep 22 23:22:27.759 DEBG 696 Flush extent 173 with f:2 g:2
50673 Sep 22 23:22:27.759 DEBG Flush just extent 173 with f:2 and g:2
50674 Sep 22 23:22:27.759 DEBG [1] It's time to notify for 696
50675 Sep 22 23:22:27.759 INFO Completion from [1] id:696 status:true
50676 Sep 22 23:22:27.760 INFO [697/752] Repair commands completed
50677 Sep 22 23:22:27.760 INFO Pop front: ReconcileIO { id: ReconciliationId(697), op: ExtentClose { repair_id: ReconciliationId(697), extent_id: 173 }, state: ClientData([New, New, New]) }
50678 Sep 22 23:22:27.760 INFO Sent repair work, now wait for resp
50679 Sep 22 23:22:27.760 INFO [0] received reconcile message
50680 Sep 22 23:22:27.760 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(697), op: ExtentClose { repair_id: ReconciliationId(697), extent_id: 173 }, state: ClientData([InProgress, New, New]) }, : downstairs
50681 Sep 22 23:22:27.760 INFO [0] client ExtentClose { repair_id: ReconciliationId(697), extent_id: 173 }
50682 Sep 22 23:22:27.760 INFO [1] received reconcile message
50683 Sep 22 23:22:27.760 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(697), op: ExtentClose { repair_id: ReconciliationId(697), extent_id: 173 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50684 Sep 22 23:22:27.760 INFO [1] client ExtentClose { repair_id: ReconciliationId(697), extent_id: 173 }
50685 Sep 22 23:22:27.760 INFO [2] received reconcile message
50686 Sep 22 23:22:27.760 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(697), op: ExtentClose { repair_id: ReconciliationId(697), extent_id: 173 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50687 Sep 22 23:22:27.760 INFO [2] client ExtentClose { repair_id: ReconciliationId(697), extent_id: 173 }
50688 Sep 22 23:22:27.760 DEBG 697 Close extent 173
50689 Sep 22 23:22:27.760 DEBG 697 Close extent 173
50690 Sep 22 23:22:27.760 DEBG 697 Close extent 173
50691 Sep 22 23:22:27.761 DEBG [2] It's time to notify for 697
50692 Sep 22 23:22:27.761 INFO Completion from [2] id:697 status:true
50693 Sep 22 23:22:27.761 INFO [698/752] Repair commands completed
50694 Sep 22 23:22:27.761 INFO Pop front: ReconcileIO { id: ReconciliationId(698), op: ExtentRepair { repair_id: ReconciliationId(698), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50695 Sep 22 23:22:27.761 INFO Sent repair work, now wait for resp
50696 Sep 22 23:22:27.761 INFO [0] received reconcile message
50697 Sep 22 23:22:27.761 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(698), op: ExtentRepair { repair_id: ReconciliationId(698), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50698 Sep 22 23:22:27.761 INFO [0] client ExtentRepair { repair_id: ReconciliationId(698), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50699 Sep 22 23:22:27.761 INFO [0] Sending repair request ReconciliationId(698)
50700 Sep 22 23:22:27.761 INFO [1] received reconcile message
50701 Sep 22 23:22:27.761 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(698), op: ExtentRepair { repair_id: ReconciliationId(698), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50702 Sep 22 23:22:27.761 INFO [1] client ExtentRepair { repair_id: ReconciliationId(698), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50703 Sep 22 23:22:27.761 INFO [1] No action required ReconciliationId(698)
50704 Sep 22 23:22:27.761 INFO [2] received reconcile message
50705 Sep 22 23:22:27.761 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(698), op: ExtentRepair { repair_id: ReconciliationId(698), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50706 Sep 22 23:22:27.761 INFO [2] client ExtentRepair { repair_id: ReconciliationId(698), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50707 Sep 22 23:22:27.761 INFO [2] No action required ReconciliationId(698)
50708 Sep 22 23:22:27.761 DEBG 698 Repair extent 173 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
50709 Sep 22 23:22:27.761 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0AD.copy"
50710 Sep 22 23:22:27.817 DEBG [0] Read AckReady 1093, : downstairs
50711 Sep 22 23:22:27.827 INFO accepted connection, remote_addr: 127.0.0.1:64338, local_addr: 127.0.0.1:52864, task: repair
50712 Sep 22 23:22:27.827 TRCE incoming request, uri: /extent/173/files, method: GET, req_id: 841e9673-d182-483b-abf8-71e0731fd45a, remote_addr: 127.0.0.1:64338, local_addr: 127.0.0.1:52864, task: repair
50713 Sep 22 23:22:27.827 INFO request completed, latency_us: 280, response_code: 200, uri: /extent/173/files, method: GET, req_id: 841e9673-d182-483b-abf8-71e0731fd45a, remote_addr: 127.0.0.1:64338, local_addr: 127.0.0.1:52864, task: repair
50714 Sep 22 23:22:27.827 INFO eid:173 Found repair files: ["0AD", "0AD.db"]
50715 Sep 22 23:22:27.828 TRCE incoming request, uri: /newextent/173/data, method: GET, req_id: 3a4a6b3c-7faf-4a2c-80d4-10450327c20f, remote_addr: 127.0.0.1:64338, local_addr: 127.0.0.1:52864, task: repair
50716 Sep 22 23:22:27.828 INFO request completed, latency_us: 382, response_code: 200, uri: /newextent/173/data, method: GET, req_id: 3a4a6b3c-7faf-4a2c-80d4-10450327c20f, remote_addr: 127.0.0.1:64338, local_addr: 127.0.0.1:52864, task: repair
50717 Sep 22 23:22:27.833 TRCE incoming request, uri: /newextent/173/db, method: GET, req_id: 0fc68043-49c8-4d25-90ac-61bbc17952e4, remote_addr: 127.0.0.1:64338, local_addr: 127.0.0.1:52864, task: repair
50718 Sep 22 23:22:27.833 INFO request completed, latency_us: 309, response_code: 200, uri: /newextent/173/db, method: GET, req_id: 0fc68043-49c8-4d25-90ac-61bbc17952e4, remote_addr: 127.0.0.1:64338, local_addr: 127.0.0.1:52864, task: repair
50719 Sep 22 23:22:27.834 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0AD.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0AD.replace"
50720 Sep 22 23:22:27.834 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50721 Sep 22 23:22:27.836 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0AD.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
50722 Sep 22 23:22:27.836 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0AD"
50723 Sep 22 23:22:27.836 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0AD.db"
50724 Sep 22 23:22:27.836 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50725 Sep 22 23:22:27.836 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0AD.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0AD.completed"
50726 Sep 22 23:22:27.837 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50727 Sep 22 23:22:27.837 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50728 Sep 22 23:22:27.837 DEBG [0] It's time to notify for 698
50729 Sep 22 23:22:27.837 INFO Completion from [0] id:698 status:true
50730 Sep 22 23:22:27.837 INFO [699/752] Repair commands completed
50731 Sep 22 23:22:27.837 INFO Pop front: ReconcileIO { id: ReconciliationId(699), op: ExtentReopen { repair_id: ReconciliationId(699), extent_id: 173 }, state: ClientData([New, New, New]) }
50732 Sep 22 23:22:27.837 INFO Sent repair work, now wait for resp
50733 Sep 22 23:22:27.837 INFO [0] received reconcile message
50734 Sep 22 23:22:27.837 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(699), op: ExtentReopen { repair_id: ReconciliationId(699), extent_id: 173 }, state: ClientData([InProgress, New, New]) }, : downstairs
50735 Sep 22 23:22:27.837 INFO [0] client ExtentReopen { repair_id: ReconciliationId(699), extent_id: 173 }
50736 Sep 22 23:22:27.837 INFO [1] received reconcile message
50737 Sep 22 23:22:27.837 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(699), op: ExtentReopen { repair_id: ReconciliationId(699), extent_id: 173 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50738 Sep 22 23:22:27.837 INFO [1] client ExtentReopen { repair_id: ReconciliationId(699), extent_id: 173 }
50739 Sep 22 23:22:27.837 INFO [2] received reconcile message
50740 Sep 22 23:22:27.837 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(699), op: ExtentReopen { repair_id: ReconciliationId(699), extent_id: 173 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50741 Sep 22 23:22:27.837 INFO [2] client ExtentReopen { repair_id: ReconciliationId(699), extent_id: 173 }
50742 Sep 22 23:22:27.838 DEBG 699 Reopen extent 173
50743 Sep 22 23:22:27.838 DEBG 699 Reopen extent 173
50744 Sep 22 23:22:27.839 DEBG 699 Reopen extent 173
50745 Sep 22 23:22:27.839 DEBG [2] It's time to notify for 699
50746 Sep 22 23:22:27.840 INFO Completion from [2] id:699 status:true
50747 Sep 22 23:22:27.840 INFO [700/752] Repair commands completed
50748 Sep 22 23:22:27.840 INFO Pop front: ReconcileIO { id: ReconciliationId(700), op: ExtentFlush { repair_id: ReconciliationId(700), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50749 Sep 22 23:22:27.840 INFO Sent repair work, now wait for resp
50750 Sep 22 23:22:27.840 INFO [0] received reconcile message
50751 Sep 22 23:22:27.840 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(700), op: ExtentFlush { repair_id: ReconciliationId(700), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50752 Sep 22 23:22:27.840 INFO [0] client ExtentFlush { repair_id: ReconciliationId(700), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50753 Sep 22 23:22:27.840 INFO [1] received reconcile message
50754 Sep 22 23:22:27.840 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(700), op: ExtentFlush { repair_id: ReconciliationId(700), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50755 Sep 22 23:22:27.840 INFO [1] client ExtentFlush { repair_id: ReconciliationId(700), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50756 Sep 22 23:22:27.840 INFO [2] received reconcile message
50757 Sep 22 23:22:27.840 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(700), op: ExtentFlush { repair_id: ReconciliationId(700), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50758 Sep 22 23:22:27.840 INFO [2] client ExtentFlush { repair_id: ReconciliationId(700), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50759 Sep 22 23:22:27.840 DEBG 700 Flush extent 20 with f:2 g:2
50760 Sep 22 23:22:27.840 DEBG Flush just extent 20 with f:2 and g:2
50761 Sep 22 23:22:27.840 DEBG [1] It's time to notify for 700
50762 Sep 22 23:22:27.840 INFO Completion from [1] id:700 status:true
50763 Sep 22 23:22:27.840 INFO [701/752] Repair commands completed
50764 Sep 22 23:22:27.840 INFO Pop front: ReconcileIO { id: ReconciliationId(701), op: ExtentClose { repair_id: ReconciliationId(701), extent_id: 20 }, state: ClientData([New, New, New]) }
50765 Sep 22 23:22:27.840 INFO Sent repair work, now wait for resp
50766 Sep 22 23:22:27.840 INFO [0] received reconcile message
50767 Sep 22 23:22:27.840 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(701), op: ExtentClose { repair_id: ReconciliationId(701), extent_id: 20 }, state: ClientData([InProgress, New, New]) }, : downstairs
50768 Sep 22 23:22:27.840 INFO [0] client ExtentClose { repair_id: ReconciliationId(701), extent_id: 20 }
50769 Sep 22 23:22:27.840 INFO [1] received reconcile message
50770 Sep 22 23:22:27.840 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(701), op: ExtentClose { repair_id: ReconciliationId(701), extent_id: 20 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50771 Sep 22 23:22:27.840 INFO [1] client ExtentClose { repair_id: ReconciliationId(701), extent_id: 20 }
50772 Sep 22 23:22:27.840 INFO [2] received reconcile message
50773 Sep 22 23:22:27.840 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(701), op: ExtentClose { repair_id: ReconciliationId(701), extent_id: 20 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50774 Sep 22 23:22:27.840 INFO [2] client ExtentClose { repair_id: ReconciliationId(701), extent_id: 20 }
50775 Sep 22 23:22:27.841 DEBG 701 Close extent 20
50776 Sep 22 23:22:27.841 DEBG 701 Close extent 20
50777 Sep 22 23:22:27.841 DEBG 701 Close extent 20
50778 Sep 22 23:22:27.842 DEBG [2] It's time to notify for 701
50779 Sep 22 23:22:27.842 INFO Completion from [2] id:701 status:true
50780 Sep 22 23:22:27.842 INFO [702/752] Repair commands completed
50781 Sep 22 23:22:27.842 INFO Pop front: ReconcileIO { id: ReconciliationId(702), op: ExtentRepair { repair_id: ReconciliationId(702), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50782 Sep 22 23:22:27.842 INFO Sent repair work, now wait for resp
50783 Sep 22 23:22:27.842 INFO [0] received reconcile message
50784 Sep 22 23:22:27.842 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(702), op: ExtentRepair { repair_id: ReconciliationId(702), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50785 Sep 22 23:22:27.842 INFO [0] client ExtentRepair { repair_id: ReconciliationId(702), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50786 Sep 22 23:22:27.842 INFO [0] Sending repair request ReconciliationId(702)
50787 Sep 22 23:22:27.842 INFO [1] received reconcile message
50788 Sep 22 23:22:27.842 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(702), op: ExtentRepair { repair_id: ReconciliationId(702), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50789 Sep 22 23:22:27.842 INFO [1] client ExtentRepair { repair_id: ReconciliationId(702), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50790 Sep 22 23:22:27.842 INFO [1] No action required ReconciliationId(702)
50791 Sep 22 23:22:27.842 INFO [2] received reconcile message
50792 Sep 22 23:22:27.842 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(702), op: ExtentRepair { repair_id: ReconciliationId(702), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50793 Sep 22 23:22:27.842 INFO [2] client ExtentRepair { repair_id: ReconciliationId(702), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50794 Sep 22 23:22:27.842 INFO [2] No action required ReconciliationId(702)
50795 Sep 22 23:22:27.842 DEBG 702 Repair extent 20 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
50796 Sep 22 23:22:27.842 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/014.copy"
50797 Sep 22 23:22:27.909 INFO accepted connection, remote_addr: 127.0.0.1:37947, local_addr: 127.0.0.1:52864, task: repair
50798 Sep 22 23:22:27.909 TRCE incoming request, uri: /extent/20/files, method: GET, req_id: 6038f6e2-787e-46e6-86c4-596823d9314e, remote_addr: 127.0.0.1:37947, local_addr: 127.0.0.1:52864, task: repair
50799 Sep 22 23:22:27.910 INFO request completed, latency_us: 283, response_code: 200, uri: /extent/20/files, method: GET, req_id: 6038f6e2-787e-46e6-86c4-596823d9314e, remote_addr: 127.0.0.1:37947, local_addr: 127.0.0.1:52864, task: repair
50800 Sep 22 23:22:27.910 INFO eid:20 Found repair files: ["014", "014.db"]
50801 Sep 22 23:22:27.910 TRCE incoming request, uri: /newextent/20/data, method: GET, req_id: fada309d-928c-48db-908a-2f6b2c0a2044, remote_addr: 127.0.0.1:37947, local_addr: 127.0.0.1:52864, task: repair
50802 Sep 22 23:22:27.911 INFO request completed, latency_us: 378, response_code: 200, uri: /newextent/20/data, method: GET, req_id: fada309d-928c-48db-908a-2f6b2c0a2044, remote_addr: 127.0.0.1:37947, local_addr: 127.0.0.1:52864, task: repair
50803 Sep 22 23:22:27.916 TRCE incoming request, uri: /newextent/20/db, method: GET, req_id: c0205d8e-3a61-47ff-8b8c-7b12b2332b1d, remote_addr: 127.0.0.1:37947, local_addr: 127.0.0.1:52864, task: repair
50804 Sep 22 23:22:27.916 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/20/db, method: GET, req_id: c0205d8e-3a61-47ff-8b8c-7b12b2332b1d, remote_addr: 127.0.0.1:37947, local_addr: 127.0.0.1:52864, task: repair
50805 Sep 22 23:22:27.917 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/014.copy" to "/tmp/downstairs-zrMnlo6G/00/000/014.replace"
50806 Sep 22 23:22:27.917 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50807 Sep 22 23:22:27.918 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/014.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
50808 Sep 22 23:22:27.919 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/014"
50809 Sep 22 23:22:27.919 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/014.db"
50810 Sep 22 23:22:27.919 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50811 Sep 22 23:22:27.919 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/014.replace" to "/tmp/downstairs-zrMnlo6G/00/000/014.completed"
50812 Sep 22 23:22:27.919 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50813 Sep 22 23:22:27.919 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50814 Sep 22 23:22:27.919 DEBG [0] It's time to notify for 702
50815 Sep 22 23:22:27.919 INFO Completion from [0] id:702 status:true
50816 Sep 22 23:22:27.919 INFO [703/752] Repair commands completed
50817 Sep 22 23:22:27.919 INFO Pop front: ReconcileIO { id: ReconciliationId(703), op: ExtentReopen { repair_id: ReconciliationId(703), extent_id: 20 }, state: ClientData([New, New, New]) }
50818 Sep 22 23:22:27.919 INFO Sent repair work, now wait for resp
50819 Sep 22 23:22:27.919 INFO [0] received reconcile message
50820 Sep 22 23:22:27.919 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(703), op: ExtentReopen { repair_id: ReconciliationId(703), extent_id: 20 }, state: ClientData([InProgress, New, New]) }, : downstairs
50821 Sep 22 23:22:27.919 INFO [0] client ExtentReopen { repair_id: ReconciliationId(703), extent_id: 20 }
50822 Sep 22 23:22:27.920 INFO [1] received reconcile message
50823 Sep 22 23:22:27.920 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(703), op: ExtentReopen { repair_id: ReconciliationId(703), extent_id: 20 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50824 Sep 22 23:22:27.920 INFO [1] client ExtentReopen { repair_id: ReconciliationId(703), extent_id: 20 }
50825 Sep 22 23:22:27.920 INFO [2] received reconcile message
50826 Sep 22 23:22:27.920 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(703), op: ExtentReopen { repair_id: ReconciliationId(703), extent_id: 20 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50827 Sep 22 23:22:27.920 INFO [2] client ExtentReopen { repair_id: ReconciliationId(703), extent_id: 20 }
50828 Sep 22 23:22:27.920 DEBG 703 Reopen extent 20
50829 Sep 22 23:22:27.921 DEBG 703 Reopen extent 20
50830 Sep 22 23:22:27.921 DEBG 703 Reopen extent 20
50831 Sep 22 23:22:27.922 DEBG [2] It's time to notify for 703
50832 Sep 22 23:22:27.922 INFO Completion from [2] id:703 status:true
50833 Sep 22 23:22:27.922 INFO [704/752] Repair commands completed
50834 Sep 22 23:22:27.922 INFO Pop front: ReconcileIO { id: ReconciliationId(704), op: ExtentFlush { repair_id: ReconciliationId(704), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50835 Sep 22 23:22:27.922 INFO Sent repair work, now wait for resp
50836 Sep 22 23:22:27.922 INFO [0] received reconcile message
50837 Sep 22 23:22:27.922 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(704), op: ExtentFlush { repair_id: ReconciliationId(704), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50838 Sep 22 23:22:27.922 INFO [0] client ExtentFlush { repair_id: ReconciliationId(704), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50839 Sep 22 23:22:27.922 INFO [1] received reconcile message
50840 Sep 22 23:22:27.922 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(704), op: ExtentFlush { repair_id: ReconciliationId(704), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50841 Sep 22 23:22:27.922 INFO [1] client ExtentFlush { repair_id: ReconciliationId(704), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50842 Sep 22 23:22:27.922 INFO [2] received reconcile message
50843 Sep 22 23:22:27.922 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(704), op: ExtentFlush { repair_id: ReconciliationId(704), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50844 Sep 22 23:22:27.922 INFO [2] client ExtentFlush { repair_id: ReconciliationId(704), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50845 Sep 22 23:22:27.922 DEBG 704 Flush extent 104 with f:2 g:2
50846 Sep 22 23:22:27.922 DEBG Flush just extent 104 with f:2 and g:2
50847 Sep 22 23:22:27.922 DEBG [1] It's time to notify for 704
50848 Sep 22 23:22:27.922 INFO Completion from [1] id:704 status:true
50849 Sep 22 23:22:27.922 INFO [705/752] Repair commands completed
50850 Sep 22 23:22:27.922 INFO Pop front: ReconcileIO { id: ReconciliationId(705), op: ExtentClose { repair_id: ReconciliationId(705), extent_id: 104 }, state: ClientData([New, New, New]) }
50851 Sep 22 23:22:27.923 INFO Sent repair work, now wait for resp
50852 Sep 22 23:22:27.923 INFO [0] received reconcile message
50853 Sep 22 23:22:27.923 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(705), op: ExtentClose { repair_id: ReconciliationId(705), extent_id: 104 }, state: ClientData([InProgress, New, New]) }, : downstairs
50854 Sep 22 23:22:27.923 INFO [0] client ExtentClose { repair_id: ReconciliationId(705), extent_id: 104 }
50855 Sep 22 23:22:27.923 INFO [1] received reconcile message
50856 Sep 22 23:22:27.923 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(705), op: ExtentClose { repair_id: ReconciliationId(705), extent_id: 104 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50857 Sep 22 23:22:27.923 INFO [1] client ExtentClose { repair_id: ReconciliationId(705), extent_id: 104 }
50858 Sep 22 23:22:27.923 INFO [2] received reconcile message
50859 Sep 22 23:22:27.923 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(705), op: ExtentClose { repair_id: ReconciliationId(705), extent_id: 104 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50860 Sep 22 23:22:27.923 INFO [2] client ExtentClose { repair_id: ReconciliationId(705), extent_id: 104 }
50861 Sep 22 23:22:27.923 DEBG 705 Close extent 104
50862 Sep 22 23:22:27.923 DEBG 705 Close extent 104
50863 Sep 22 23:22:27.923 DEBG 705 Close extent 104
50864 Sep 22 23:22:27.924 DEBG [2] It's time to notify for 705
50865 Sep 22 23:22:27.924 INFO Completion from [2] id:705 status:true
50866 Sep 22 23:22:27.924 INFO [706/752] Repair commands completed
50867 Sep 22 23:22:27.924 INFO Pop front: ReconcileIO { id: ReconciliationId(706), op: ExtentRepair { repair_id: ReconciliationId(706), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50868 Sep 22 23:22:27.924 INFO Sent repair work, now wait for resp
50869 Sep 22 23:22:27.924 INFO [0] received reconcile message
50870 Sep 22 23:22:27.924 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(706), op: ExtentRepair { repair_id: ReconciliationId(706), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50871 Sep 22 23:22:27.924 INFO [0] client ExtentRepair { repair_id: ReconciliationId(706), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50872 Sep 22 23:22:27.924 INFO [0] Sending repair request ReconciliationId(706)
50873 Sep 22 23:22:27.924 INFO [1] received reconcile message
50874 Sep 22 23:22:27.924 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(706), op: ExtentRepair { repair_id: ReconciliationId(706), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50875 Sep 22 23:22:27.924 INFO [1] client ExtentRepair { repair_id: ReconciliationId(706), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50876 Sep 22 23:22:27.924 INFO [1] No action required ReconciliationId(706)
50877 Sep 22 23:22:27.924 INFO [2] received reconcile message
50878 Sep 22 23:22:27.924 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(706), op: ExtentRepair { repair_id: ReconciliationId(706), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50879 Sep 22 23:22:27.924 INFO [2] client ExtentRepair { repair_id: ReconciliationId(706), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50880 Sep 22 23:22:27.924 INFO [2] No action required ReconciliationId(706)
50881 Sep 22 23:22:27.924 DEBG 706 Repair extent 104 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
50882 Sep 22 23:22:27.924 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/068.copy"
50883 Sep 22 23:22:27.989 INFO accepted connection, remote_addr: 127.0.0.1:46641, local_addr: 127.0.0.1:52864, task: repair
50884 Sep 22 23:22:27.989 TRCE incoming request, uri: /extent/104/files, method: GET, req_id: 9382c064-ece8-4d33-9ac3-1504eeead0b0, remote_addr: 127.0.0.1:46641, local_addr: 127.0.0.1:52864, task: repair
50885 Sep 22 23:22:27.990 INFO request completed, latency_us: 222, response_code: 200, uri: /extent/104/files, method: GET, req_id: 9382c064-ece8-4d33-9ac3-1504eeead0b0, remote_addr: 127.0.0.1:46641, local_addr: 127.0.0.1:52864, task: repair
50886 Sep 22 23:22:27.990 INFO eid:104 Found repair files: ["068", "068.db"]
50887 Sep 22 23:22:27.990 TRCE incoming request, uri: /newextent/104/data, method: GET, req_id: cd7f2879-b231-47a1-9626-585d0c29c482, remote_addr: 127.0.0.1:46641, local_addr: 127.0.0.1:52864, task: repair
50888 Sep 22 23:22:27.991 INFO request completed, latency_us: 324, response_code: 200, uri: /newextent/104/data, method: GET, req_id: cd7f2879-b231-47a1-9626-585d0c29c482, remote_addr: 127.0.0.1:46641, local_addr: 127.0.0.1:52864, task: repair
50889 Sep 22 23:22:27.996 TRCE incoming request, uri: /newextent/104/db, method: GET, req_id: 6008e912-1579-4c14-b4de-a6510e50cdc0, remote_addr: 127.0.0.1:46641, local_addr: 127.0.0.1:52864, task: repair
50890 Sep 22 23:22:27.996 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/104/db, method: GET, req_id: 6008e912-1579-4c14-b4de-a6510e50cdc0, remote_addr: 127.0.0.1:46641, local_addr: 127.0.0.1:52864, task: repair
50891 Sep 22 23:22:27.997 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/068.copy" to "/tmp/downstairs-zrMnlo6G/00/000/068.replace"
50892 Sep 22 23:22:27.997 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50893 Sep 22 23:22:27.998 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/068.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
50894 Sep 22 23:22:27.998 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/068"
50895 Sep 22 23:22:27.998 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/068.db"
50896 Sep 22 23:22:27.998 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50897 Sep 22 23:22:27.998 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/068.replace" to "/tmp/downstairs-zrMnlo6G/00/000/068.completed"
50898 Sep 22 23:22:27.998 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50899 Sep 22 23:22:27.998 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50900 Sep 22 23:22:27.999 DEBG [0] It's time to notify for 706
50901 Sep 22 23:22:27.999 INFO Completion from [0] id:706 status:true
50902 Sep 22 23:22:27.999 INFO [707/752] Repair commands completed
50903 Sep 22 23:22:27.999 INFO Pop front: ReconcileIO { id: ReconciliationId(707), op: ExtentReopen { repair_id: ReconciliationId(707), extent_id: 104 }, state: ClientData([New, New, New]) }
50904 Sep 22 23:22:27.999 INFO Sent repair work, now wait for resp
50905 Sep 22 23:22:27.999 INFO [0] received reconcile message
50906 Sep 22 23:22:27.999 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(707), op: ExtentReopen { repair_id: ReconciliationId(707), extent_id: 104 }, state: ClientData([InProgress, New, New]) }, : downstairs
50907 Sep 22 23:22:27.999 INFO [0] client ExtentReopen { repair_id: ReconciliationId(707), extent_id: 104 }
50908 Sep 22 23:22:27.999 INFO [1] received reconcile message
50909 Sep 22 23:22:27.999 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(707), op: ExtentReopen { repair_id: ReconciliationId(707), extent_id: 104 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50910 Sep 22 23:22:27.999 INFO [1] client ExtentReopen { repair_id: ReconciliationId(707), extent_id: 104 }
50911 Sep 22 23:22:27.999 INFO [2] received reconcile message
50912 Sep 22 23:22:27.999 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(707), op: ExtentReopen { repair_id: ReconciliationId(707), extent_id: 104 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50913 Sep 22 23:22:27.999 INFO [2] client ExtentReopen { repair_id: ReconciliationId(707), extent_id: 104 }
50914 Sep 22 23:22:27.999 DEBG 707 Reopen extent 104
50915 Sep 22 23:22:28.000 DEBG 707 Reopen extent 104
50916 Sep 22 23:22:28.000 DEBG 707 Reopen extent 104
50917 Sep 22 23:22:28.001 DEBG [2] It's time to notify for 707
50918 Sep 22 23:22:28.001 INFO Completion from [2] id:707 status:true
50919 Sep 22 23:22:28.001 INFO [708/752] Repair commands completed
50920 Sep 22 23:22:28.001 INFO Pop front: ReconcileIO { id: ReconciliationId(708), op: ExtentFlush { repair_id: ReconciliationId(708), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50921 Sep 22 23:22:28.001 INFO Sent repair work, now wait for resp
50922 Sep 22 23:22:28.001 INFO [0] received reconcile message
50923 Sep 22 23:22:28.001 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(708), op: ExtentFlush { repair_id: ReconciliationId(708), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50924 Sep 22 23:22:28.001 INFO [0] client ExtentFlush { repair_id: ReconciliationId(708), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50925 Sep 22 23:22:28.001 INFO [1] received reconcile message
50926 Sep 22 23:22:28.001 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(708), op: ExtentFlush { repair_id: ReconciliationId(708), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50927 Sep 22 23:22:28.001 INFO [1] client ExtentFlush { repair_id: ReconciliationId(708), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50928 Sep 22 23:22:28.001 INFO [2] received reconcile message
50929 Sep 22 23:22:28.001 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(708), op: ExtentFlush { repair_id: ReconciliationId(708), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50930 Sep 22 23:22:28.001 INFO [2] client ExtentFlush { repair_id: ReconciliationId(708), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50931 Sep 22 23:22:28.002 DEBG 708 Flush extent 81 with f:2 g:2
50932 Sep 22 23:22:28.002 DEBG Flush just extent 81 with f:2 and g:2
50933 Sep 22 23:22:28.002 DEBG [1] It's time to notify for 708
50934 Sep 22 23:22:28.002 INFO Completion from [1] id:708 status:true
50935 Sep 22 23:22:28.002 INFO [709/752] Repair commands completed
50936 Sep 22 23:22:28.002 INFO Pop front: ReconcileIO { id: ReconciliationId(709), op: ExtentClose { repair_id: ReconciliationId(709), extent_id: 81 }, state: ClientData([New, New, New]) }
50937 Sep 22 23:22:28.002 INFO Sent repair work, now wait for resp
50938 Sep 22 23:22:28.002 INFO [0] received reconcile message
50939 Sep 22 23:22:28.002 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(709), op: ExtentClose { repair_id: ReconciliationId(709), extent_id: 81 }, state: ClientData([InProgress, New, New]) }, : downstairs
50940 Sep 22 23:22:28.002 INFO [0] client ExtentClose { repair_id: ReconciliationId(709), extent_id: 81 }
50941 Sep 22 23:22:28.002 INFO [1] received reconcile message
50942 Sep 22 23:22:28.002 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(709), op: ExtentClose { repair_id: ReconciliationId(709), extent_id: 81 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50943 Sep 22 23:22:28.002 INFO [1] client ExtentClose { repair_id: ReconciliationId(709), extent_id: 81 }
50944 Sep 22 23:22:28.002 INFO [2] received reconcile message
50945 Sep 22 23:22:28.002 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(709), op: ExtentClose { repair_id: ReconciliationId(709), extent_id: 81 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50946 Sep 22 23:22:28.002 INFO [2] client ExtentClose { repair_id: ReconciliationId(709), extent_id: 81 }
50947 Sep 22 23:22:28.002 DEBG 709 Close extent 81
50948 Sep 22 23:22:28.002 DEBG 709 Close extent 81
50949 Sep 22 23:22:28.003 DEBG 709 Close extent 81
50950 Sep 22 23:22:28.003 DEBG [2] It's time to notify for 709
50951 Sep 22 23:22:28.003 INFO Completion from [2] id:709 status:true
50952 Sep 22 23:22:28.003 INFO [710/752] Repair commands completed
50953 Sep 22 23:22:28.003 INFO Pop front: ReconcileIO { id: ReconciliationId(710), op: ExtentRepair { repair_id: ReconciliationId(710), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50954 Sep 22 23:22:28.003 INFO Sent repair work, now wait for resp
50955 Sep 22 23:22:28.003 INFO [0] received reconcile message
50956 Sep 22 23:22:28.003 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(710), op: ExtentRepair { repair_id: ReconciliationId(710), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50957 Sep 22 23:22:28.003 INFO [0] client ExtentRepair { repair_id: ReconciliationId(710), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50958 Sep 22 23:22:28.003 INFO [0] Sending repair request ReconciliationId(710)
50959 Sep 22 23:22:28.003 INFO [1] received reconcile message
50960 Sep 22 23:22:28.003 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(710), op: ExtentRepair { repair_id: ReconciliationId(710), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50961 Sep 22 23:22:28.003 INFO [1] client ExtentRepair { repair_id: ReconciliationId(710), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50962 Sep 22 23:22:28.003 INFO [1] No action required ReconciliationId(710)
50963 Sep 22 23:22:28.003 INFO [2] received reconcile message
50964 Sep 22 23:22:28.004 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(710), op: ExtentRepair { repair_id: ReconciliationId(710), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50965 Sep 22 23:22:28.004 INFO [2] client ExtentRepair { repair_id: ReconciliationId(710), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
50966 Sep 22 23:22:28.004 INFO [2] No action required ReconciliationId(710)
50967 Sep 22 23:22:28.004 DEBG 710 Repair extent 81 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
50968 Sep 22 23:22:28.004 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/051.copy"
50969 Sep 22 23:22:28.069 INFO accepted connection, remote_addr: 127.0.0.1:48474, local_addr: 127.0.0.1:52864, task: repair
50970 Sep 22 23:22:28.070 TRCE incoming request, uri: /extent/81/files, method: GET, req_id: c8331010-c2da-45ca-b56b-984b4c25f239, remote_addr: 127.0.0.1:48474, local_addr: 127.0.0.1:52864, task: repair
50971 Sep 22 23:22:28.070 INFO request completed, latency_us: 246, response_code: 200, uri: /extent/81/files, method: GET, req_id: c8331010-c2da-45ca-b56b-984b4c25f239, remote_addr: 127.0.0.1:48474, local_addr: 127.0.0.1:52864, task: repair
50972 Sep 22 23:22:28.070 INFO eid:81 Found repair files: ["051", "051.db"]
50973 Sep 22 23:22:28.070 TRCE incoming request, uri: /newextent/81/data, method: GET, req_id: 28992663-e63f-4cee-b965-005b9d7740f3, remote_addr: 127.0.0.1:48474, local_addr: 127.0.0.1:52864, task: repair
50974 Sep 22 23:22:28.071 INFO request completed, latency_us: 358, response_code: 200, uri: /newextent/81/data, method: GET, req_id: 28992663-e63f-4cee-b965-005b9d7740f3, remote_addr: 127.0.0.1:48474, local_addr: 127.0.0.1:52864, task: repair
50975 Sep 22 23:22:28.076 TRCE incoming request, uri: /newextent/81/db, method: GET, req_id: da08371f-3635-4080-b01a-4fbd9df28301, remote_addr: 127.0.0.1:48474, local_addr: 127.0.0.1:52864, task: repair
50976 Sep 22 23:22:28.076 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/81/db, method: GET, req_id: da08371f-3635-4080-b01a-4fbd9df28301, remote_addr: 127.0.0.1:48474, local_addr: 127.0.0.1:52864, task: repair
50977 Sep 22 23:22:28.077 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/051.copy" to "/tmp/downstairs-zrMnlo6G/00/000/051.replace"
50978 Sep 22 23:22:28.077 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50979 Sep 22 23:22:28.078 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/051.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
50980 Sep 22 23:22:28.078 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/051"
50981 Sep 22 23:22:28.078 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/051.db"
50982 Sep 22 23:22:28.078 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50983 Sep 22 23:22:28.078 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/051.replace" to "/tmp/downstairs-zrMnlo6G/00/000/051.completed"
50984 Sep 22 23:22:28.078 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50985 Sep 22 23:22:28.078 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
50986 Sep 22 23:22:28.079 DEBG [0] It's time to notify for 710
50987 Sep 22 23:22:28.079 INFO Completion from [0] id:710 status:true
50988 Sep 22 23:22:28.079 INFO [711/752] Repair commands completed
50989 Sep 22 23:22:28.079 INFO Pop front: ReconcileIO { id: ReconciliationId(711), op: ExtentReopen { repair_id: ReconciliationId(711), extent_id: 81 }, state: ClientData([New, New, New]) }
50990 Sep 22 23:22:28.079 INFO Sent repair work, now wait for resp
50991 Sep 22 23:22:28.079 INFO [0] received reconcile message
50992 Sep 22 23:22:28.079 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(711), op: ExtentReopen { repair_id: ReconciliationId(711), extent_id: 81 }, state: ClientData([InProgress, New, New]) }, : downstairs
50993 Sep 22 23:22:28.079 INFO [0] client ExtentReopen { repair_id: ReconciliationId(711), extent_id: 81 }
50994 Sep 22 23:22:28.079 INFO [1] received reconcile message
50995 Sep 22 23:22:28.079 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(711), op: ExtentReopen { repair_id: ReconciliationId(711), extent_id: 81 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50996 Sep 22 23:22:28.079 INFO [1] client ExtentReopen { repair_id: ReconciliationId(711), extent_id: 81 }
50997 Sep 22 23:22:28.079 INFO [2] received reconcile message
50998 Sep 22 23:22:28.079 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(711), op: ExtentReopen { repair_id: ReconciliationId(711), extent_id: 81 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50999 Sep 22 23:22:28.079 INFO [2] client ExtentReopen { repair_id: ReconciliationId(711), extent_id: 81 }
51000 Sep 22 23:22:28.079 DEBG 711 Reopen extent 81
51001 Sep 22 23:22:28.080 DEBG 711 Reopen extent 81
51002 Sep 22 23:22:28.080 DEBG 711 Reopen extent 81
51003 Sep 22 23:22:28.081 DEBG [2] It's time to notify for 711
51004 Sep 22 23:22:28.081 INFO Completion from [2] id:711 status:true
51005 Sep 22 23:22:28.081 INFO [712/752] Repair commands completed
51006 Sep 22 23:22:28.081 INFO Pop front: ReconcileIO { id: ReconciliationId(712), op: ExtentFlush { repair_id: ReconciliationId(712), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51007 Sep 22 23:22:28.081 INFO Sent repair work, now wait for resp
51008 Sep 22 23:22:28.081 INFO [0] received reconcile message
51009 Sep 22 23:22:28.081 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(712), op: ExtentFlush { repair_id: ReconciliationId(712), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51010 Sep 22 23:22:28.081 INFO [0] client ExtentFlush { repair_id: ReconciliationId(712), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51011 Sep 22 23:22:28.081 INFO [1] received reconcile message
51012 Sep 22 23:22:28.081 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(712), op: ExtentFlush { repair_id: ReconciliationId(712), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51013 Sep 22 23:22:28.081 INFO [1] client ExtentFlush { repair_id: ReconciliationId(712), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51014 Sep 22 23:22:28.081 INFO [2] received reconcile message
51015 Sep 22 23:22:28.081 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(712), op: ExtentFlush { repair_id: ReconciliationId(712), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51016 Sep 22 23:22:28.081 INFO [2] client ExtentFlush { repair_id: ReconciliationId(712), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51017 Sep 22 23:22:28.082 DEBG 712 Flush extent 120 with f:2 g:2
51018 Sep 22 23:22:28.082 DEBG Flush just extent 120 with f:2 and g:2
51019 Sep 22 23:22:28.082 DEBG [1] It's time to notify for 712
51020 Sep 22 23:22:28.082 INFO Completion from [1] id:712 status:true
51021 Sep 22 23:22:28.082 INFO [713/752] Repair commands completed
51022 Sep 22 23:22:28.082 INFO Pop front: ReconcileIO { id: ReconciliationId(713), op: ExtentClose { repair_id: ReconciliationId(713), extent_id: 120 }, state: ClientData([New, New, New]) }
51023 Sep 22 23:22:28.082 INFO Sent repair work, now wait for resp
51024 Sep 22 23:22:28.082 INFO [0] received reconcile message
51025 Sep 22 23:22:28.082 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(713), op: ExtentClose { repair_id: ReconciliationId(713), extent_id: 120 }, state: ClientData([InProgress, New, New]) }, : downstairs
51026 Sep 22 23:22:28.082 INFO [0] client ExtentClose { repair_id: ReconciliationId(713), extent_id: 120 }
51027 Sep 22 23:22:28.082 INFO [1] received reconcile message
51028 Sep 22 23:22:28.082 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(713), op: ExtentClose { repair_id: ReconciliationId(713), extent_id: 120 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51029 Sep 22 23:22:28.082 INFO [1] client ExtentClose { repair_id: ReconciliationId(713), extent_id: 120 }
51030 Sep 22 23:22:28.082 INFO [2] received reconcile message
51031 Sep 22 23:22:28.082 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(713), op: ExtentClose { repair_id: ReconciliationId(713), extent_id: 120 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51032 Sep 22 23:22:28.082 INFO [2] client ExtentClose { repair_id: ReconciliationId(713), extent_id: 120 }
51033 Sep 22 23:22:28.082 DEBG 713 Close extent 120
51034 Sep 22 23:22:28.082 DEBG 713 Close extent 120
51035 Sep 22 23:22:28.083 DEBG 713 Close extent 120
51036 Sep 22 23:22:28.083 DEBG [2] It's time to notify for 713
51037 Sep 22 23:22:28.083 INFO Completion from [2] id:713 status:true
51038 Sep 22 23:22:28.083 INFO [714/752] Repair commands completed
51039 Sep 22 23:22:28.083 INFO Pop front: ReconcileIO { id: ReconciliationId(714), op: ExtentRepair { repair_id: ReconciliationId(714), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51040 Sep 22 23:22:28.083 INFO Sent repair work, now wait for resp
51041 Sep 22 23:22:28.083 INFO [0] received reconcile message
51042 Sep 22 23:22:28.083 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(714), op: ExtentRepair { repair_id: ReconciliationId(714), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51043 Sep 22 23:22:28.083 INFO [0] client ExtentRepair { repair_id: ReconciliationId(714), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51044 Sep 22 23:22:28.083 INFO [0] Sending repair request ReconciliationId(714)
51045 Sep 22 23:22:28.083 INFO [1] received reconcile message
51046 Sep 22 23:22:28.083 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(714), op: ExtentRepair { repair_id: ReconciliationId(714), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51047 Sep 22 23:22:28.084 INFO [1] client ExtentRepair { repair_id: ReconciliationId(714), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51048 Sep 22 23:22:28.084 INFO [1] No action required ReconciliationId(714)
51049 Sep 22 23:22:28.084 INFO [2] received reconcile message
51050 Sep 22 23:22:28.084 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(714), op: ExtentRepair { repair_id: ReconciliationId(714), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51051 Sep 22 23:22:28.084 INFO [2] client ExtentRepair { repair_id: ReconciliationId(714), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51052 Sep 22 23:22:28.084 INFO [2] No action required ReconciliationId(714)
51053 Sep 22 23:22:28.084 DEBG 714 Repair extent 120 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
51054 Sep 22 23:22:28.084 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/078.copy"
51055 Sep 22 23:22:28.147 INFO accepted connection, remote_addr: 127.0.0.1:59646, local_addr: 127.0.0.1:52864, task: repair
51056 Sep 22 23:22:28.147 TRCE incoming request, uri: /extent/120/files, method: GET, req_id: 4b80a61b-e7d9-4ed1-ad54-4e3780892680, remote_addr: 127.0.0.1:59646, local_addr: 127.0.0.1:52864, task: repair
51057 Sep 22 23:22:28.147 INFO request completed, latency_us: 232, response_code: 200, uri: /extent/120/files, method: GET, req_id: 4b80a61b-e7d9-4ed1-ad54-4e3780892680, remote_addr: 127.0.0.1:59646, local_addr: 127.0.0.1:52864, task: repair
51058 Sep 22 23:22:28.148 INFO eid:120 Found repair files: ["078", "078.db"]
51059 Sep 22 23:22:28.148 TRCE incoming request, uri: /newextent/120/data, method: GET, req_id: e988c35e-260e-40a1-ae9f-bc0eeb5735eb, remote_addr: 127.0.0.1:59646, local_addr: 127.0.0.1:52864, task: repair
51060 Sep 22 23:22:28.148 INFO request completed, latency_us: 352, response_code: 200, uri: /newextent/120/data, method: GET, req_id: e988c35e-260e-40a1-ae9f-bc0eeb5735eb, remote_addr: 127.0.0.1:59646, local_addr: 127.0.0.1:52864, task: repair
51061 Sep 22 23:22:28.151 DEBG up_ds_listen was notified
51062 Sep 22 23:22:28.151 DEBG up_ds_listen process 1092
51063 Sep 22 23:22:28.151 DEBG [A] ack job 1092:93, : downstairs
51064 Sep 22 23:22:28.151 DEBG up_ds_listen process 1093
51065 Sep 22 23:22:28.151 DEBG [A] ack job 1093:94, : downstairs
51066 Sep 22 23:22:28.153 TRCE incoming request, uri: /newextent/120/db, method: GET, req_id: 18777097-13d1-4ecd-a9eb-cad5db9563f6, remote_addr: 127.0.0.1:59646, local_addr: 127.0.0.1:52864, task: repair
51067 Sep 22 23:22:28.154 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/120/db, method: GET, req_id: 18777097-13d1-4ecd-a9eb-cad5db9563f6, remote_addr: 127.0.0.1:59646, local_addr: 127.0.0.1:52864, task: repair
51068 Sep 22 23:22:28.155 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/078.copy" to "/tmp/downstairs-zrMnlo6G/00/000/078.replace"
51069 Sep 22 23:22:28.155 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51070 Sep 22 23:22:28.156 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/078.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
51071 Sep 22 23:22:28.156 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/078"
51072 Sep 22 23:22:28.156 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/078.db"
51073 Sep 22 23:22:28.156 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51074 Sep 22 23:22:28.156 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/078.replace" to "/tmp/downstairs-zrMnlo6G/00/000/078.completed"
51075 Sep 22 23:22:28.156 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51076 Sep 22 23:22:28.156 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51077 Sep 22 23:22:28.157 DEBG [0] It's time to notify for 714
51078 Sep 22 23:22:28.157 INFO Completion from [0] id:714 status:true
51079 Sep 22 23:22:28.157 INFO [715/752] Repair commands completed
51080 Sep 22 23:22:28.157 INFO Pop front: ReconcileIO { id: ReconciliationId(715), op: ExtentReopen { repair_id: ReconciliationId(715), extent_id: 120 }, state: ClientData([New, New, New]) }
51081 Sep 22 23:22:28.157 INFO Sent repair work, now wait for resp
51082 Sep 22 23:22:28.157 INFO [0] received reconcile message
51083 Sep 22 23:22:28.157 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(715), op: ExtentReopen { repair_id: ReconciliationId(715), extent_id: 120 }, state: ClientData([InProgress, New, New]) }, : downstairs
51084 Sep 22 23:22:28.157 INFO [0] client ExtentReopen { repair_id: ReconciliationId(715), extent_id: 120 }
51085 Sep 22 23:22:28.157 INFO [1] received reconcile message
51086 Sep 22 23:22:28.157 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(715), op: ExtentReopen { repair_id: ReconciliationId(715), extent_id: 120 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51087 Sep 22 23:22:28.157 INFO [1] client ExtentReopen { repair_id: ReconciliationId(715), extent_id: 120 }
51088 Sep 22 23:22:28.157 INFO [2] received reconcile message
51089 Sep 22 23:22:28.157 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(715), op: ExtentReopen { repair_id: ReconciliationId(715), extent_id: 120 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51090 Sep 22 23:22:28.157 INFO [2] client ExtentReopen { repair_id: ReconciliationId(715), extent_id: 120 }
51091 Sep 22 23:22:28.157 DEBG 715 Reopen extent 120
51092 Sep 22 23:22:28.158 DEBG 715 Reopen extent 120
51093 Sep 22 23:22:28.158 DEBG 715 Reopen extent 120
51094 Sep 22 23:22:28.159 DEBG [2] It's time to notify for 715
51095 Sep 22 23:22:28.159 INFO Completion from [2] id:715 status:true
51096 Sep 22 23:22:28.159 INFO [716/752] Repair commands completed
51097 Sep 22 23:22:28.159 INFO Pop front: ReconcileIO { id: ReconciliationId(716), op: ExtentFlush { repair_id: ReconciliationId(716), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51098 Sep 22 23:22:28.159 INFO Sent repair work, now wait for resp
51099 Sep 22 23:22:28.159 INFO [0] received reconcile message
51100 Sep 22 23:22:28.159 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(716), op: ExtentFlush { repair_id: ReconciliationId(716), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51101 Sep 22 23:22:28.159 INFO [0] client ExtentFlush { repair_id: ReconciliationId(716), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51102 Sep 22 23:22:28.159 INFO [1] received reconcile message
51103 Sep 22 23:22:28.159 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(716), op: ExtentFlush { repair_id: ReconciliationId(716), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51104 Sep 22 23:22:28.159 INFO [1] client ExtentFlush { repair_id: ReconciliationId(716), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51105 Sep 22 23:22:28.159 INFO [2] received reconcile message
51106 Sep 22 23:22:28.159 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(716), op: ExtentFlush { repair_id: ReconciliationId(716), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51107 Sep 22 23:22:28.159 INFO [2] client ExtentFlush { repair_id: ReconciliationId(716), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51108 Sep 22 23:22:28.160 DEBG 716 Flush extent 145 with f:2 g:2
51109 Sep 22 23:22:28.160 DEBG Flush just extent 145 with f:2 and g:2
51110 Sep 22 23:22:28.160 DEBG [1] It's time to notify for 716
51111 Sep 22 23:22:28.160 INFO Completion from [1] id:716 status:true
51112 Sep 22 23:22:28.160 INFO [717/752] Repair commands completed
51113 Sep 22 23:22:28.160 INFO Pop front: ReconcileIO { id: ReconciliationId(717), op: ExtentClose { repair_id: ReconciliationId(717), extent_id: 145 }, state: ClientData([New, New, New]) }
51114 Sep 22 23:22:28.160 INFO Sent repair work, now wait for resp
51115 Sep 22 23:22:28.160 INFO [0] received reconcile message
51116 Sep 22 23:22:28.160 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(717), op: ExtentClose { repair_id: ReconciliationId(717), extent_id: 145 }, state: ClientData([InProgress, New, New]) }, : downstairs
51117 Sep 22 23:22:28.160 INFO [0] client ExtentClose { repair_id: ReconciliationId(717), extent_id: 145 }
51118 Sep 22 23:22:28.160 INFO [1] received reconcile message
51119 Sep 22 23:22:28.160 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(717), op: ExtentClose { repair_id: ReconciliationId(717), extent_id: 145 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51120 Sep 22 23:22:28.160 INFO [1] client ExtentClose { repair_id: ReconciliationId(717), extent_id: 145 }
51121 Sep 22 23:22:28.160 INFO [2] received reconcile message
51122 Sep 22 23:22:28.160 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(717), op: ExtentClose { repair_id: ReconciliationId(717), extent_id: 145 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51123 Sep 22 23:22:28.160 INFO [2] client ExtentClose { repair_id: ReconciliationId(717), extent_id: 145 }
51124 Sep 22 23:22:28.160 DEBG 717 Close extent 145
51125 Sep 22 23:22:28.160 DEBG 717 Close extent 145
51126 Sep 22 23:22:28.161 DEBG 717 Close extent 145
51127 Sep 22 23:22:28.161 DEBG [2] It's time to notify for 717
51128 Sep 22 23:22:28.161 INFO Completion from [2] id:717 status:true
51129 Sep 22 23:22:28.161 INFO [718/752] Repair commands completed
51130 Sep 22 23:22:28.161 INFO Pop front: ReconcileIO { id: ReconciliationId(718), op: ExtentRepair { repair_id: ReconciliationId(718), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51131 Sep 22 23:22:28.161 INFO Sent repair work, now wait for resp
51132 Sep 22 23:22:28.161 INFO [0] received reconcile message
51133 Sep 22 23:22:28.161 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(718), op: ExtentRepair { repair_id: ReconciliationId(718), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51134 Sep 22 23:22:28.161 INFO [0] client ExtentRepair { repair_id: ReconciliationId(718), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51135 Sep 22 23:22:28.161 INFO [0] Sending repair request ReconciliationId(718)
51136 Sep 22 23:22:28.161 INFO [1] received reconcile message
51137 Sep 22 23:22:28.161 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(718), op: ExtentRepair { repair_id: ReconciliationId(718), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51138 Sep 22 23:22:28.161 INFO [1] client ExtentRepair { repair_id: ReconciliationId(718), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51139 Sep 22 23:22:28.161 INFO [1] No action required ReconciliationId(718)
51140 Sep 22 23:22:28.162 INFO [2] received reconcile message
51141 Sep 22 23:22:28.162 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(718), op: ExtentRepair { repair_id: ReconciliationId(718), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51142 Sep 22 23:22:28.162 INFO [2] client ExtentRepair { repair_id: ReconciliationId(718), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51143 Sep 22 23:22:28.162 INFO [2] No action required ReconciliationId(718)
51144 Sep 22 23:22:28.162 DEBG 718 Repair extent 145 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
51145 Sep 22 23:22:28.162 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/091.copy"
51146 Sep 22 23:22:28.204 DEBG up_ds_listen checked 2 jobs, back to waiting
51147 Sep 22 23:22:28.204 DEBG up_ds_listen was notified
51148 Sep 22 23:22:28.204 DEBG up_ds_listen checked 0 jobs, back to waiting
51149 Sep 22 23:22:28.205 DEBG Flush :1092 extent_limit None deps:[JobId(1091), JobId(1090)] res:true f:35 g:1
51150 Sep 22 23:22:28.211 DEBG Read :1093 deps:[JobId(1092)] res:true
51151 Sep 22 23:22:28.227 INFO accepted connection, remote_addr: 127.0.0.1:34697, local_addr: 127.0.0.1:52864, task: repair
51152 Sep 22 23:22:28.228 TRCE incoming request, uri: /extent/145/files, method: GET, req_id: 365847b7-a027-48dd-a7f1-8f480e1cc27a, remote_addr: 127.0.0.1:34697, local_addr: 127.0.0.1:52864, task: repair
51153 Sep 22 23:22:28.228 INFO request completed, latency_us: 239, response_code: 200, uri: /extent/145/files, method: GET, req_id: 365847b7-a027-48dd-a7f1-8f480e1cc27a, remote_addr: 127.0.0.1:34697, local_addr: 127.0.0.1:52864, task: repair
51154 Sep 22 23:22:28.228 INFO eid:145 Found repair files: ["091", "091.db"]
51155 Sep 22 23:22:28.228 TRCE incoming request, uri: /newextent/145/data, method: GET, req_id: 370067db-4190-4d4b-89c5-f2f9970c46cf, remote_addr: 127.0.0.1:34697, local_addr: 127.0.0.1:52864, task: repair
51156 Sep 22 23:22:28.229 INFO request completed, latency_us: 349, response_code: 200, uri: /newextent/145/data, method: GET, req_id: 370067db-4190-4d4b-89c5-f2f9970c46cf, remote_addr: 127.0.0.1:34697, local_addr: 127.0.0.1:52864, task: repair
51157 Sep 22 23:22:28.233 DEBG IO Read 1095 has deps [JobId(1094)]
51158 Sep 22 23:22:28.233 DEBG [rc] retire 1092 clears [JobId(1091), JobId(1092)], : downstairs
51159 Sep 22 23:22:28.234 TRCE incoming request, uri: /newextent/145/db, method: GET, req_id: bba38001-333e-4cdd-8004-746ece71293f, remote_addr: 127.0.0.1:34697, local_addr: 127.0.0.1:52864, task: repair
51160 Sep 22 23:22:28.234 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/145/db, method: GET, req_id: bba38001-333e-4cdd-8004-746ece71293f, remote_addr: 127.0.0.1:34697, local_addr: 127.0.0.1:52864, task: repair
51161 Sep 22 23:22:28.235 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/091.copy" to "/tmp/downstairs-zrMnlo6G/00/000/091.replace"
51162 Sep 22 23:22:28.235 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51163 Sep 22 23:22:28.236 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/091.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
51164 Sep 22 23:22:28.236 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/091"
51165 Sep 22 23:22:28.236 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/091.db"
51166 Sep 22 23:22:28.236 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51167 Sep 22 23:22:28.236 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/091.replace" to "/tmp/downstairs-zrMnlo6G/00/000/091.completed"
51168 Sep 22 23:22:28.236 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51169 Sep 22 23:22:28.237 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51170 Sep 22 23:22:28.237 DEBG [0] It's time to notify for 718
51171 Sep 22 23:22:28.237 INFO Completion from [0] id:718 status:true
51172 Sep 22 23:22:28.237 INFO [719/752] Repair commands completed
51173 Sep 22 23:22:28.237 INFO Pop front: ReconcileIO { id: ReconciliationId(719), op: ExtentReopen { repair_id: ReconciliationId(719), extent_id: 145 }, state: ClientData([New, New, New]) }
51174 Sep 22 23:22:28.237 INFO Sent repair work, now wait for resp
51175 Sep 22 23:22:28.237 INFO [0] received reconcile message
51176 Sep 22 23:22:28.237 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(719), op: ExtentReopen { repair_id: ReconciliationId(719), extent_id: 145 }, state: ClientData([InProgress, New, New]) }, : downstairs
51177 Sep 22 23:22:28.237 INFO [0] client ExtentReopen { repair_id: ReconciliationId(719), extent_id: 145 }
51178 Sep 22 23:22:28.237 INFO [1] received reconcile message
51179 Sep 22 23:22:28.237 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(719), op: ExtentReopen { repair_id: ReconciliationId(719), extent_id: 145 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51180 Sep 22 23:22:28.237 INFO [1] client ExtentReopen { repair_id: ReconciliationId(719), extent_id: 145 }
51181 Sep 22 23:22:28.237 INFO [2] received reconcile message
51182 Sep 22 23:22:28.237 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(719), op: ExtentReopen { repair_id: ReconciliationId(719), extent_id: 145 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51183 Sep 22 23:22:28.237 INFO [2] client ExtentReopen { repair_id: ReconciliationId(719), extent_id: 145 }
51184 Sep 22 23:22:28.237 DEBG 719 Reopen extent 145
51185 Sep 22 23:22:28.238 DEBG 719 Reopen extent 145
51186 Sep 22 23:22:28.239 DEBG 719 Reopen extent 145
51187 Sep 22 23:22:28.239 DEBG [2] It's time to notify for 719
51188 Sep 22 23:22:28.239 INFO Completion from [2] id:719 status:true
51189 Sep 22 23:22:28.239 INFO [720/752] Repair commands completed
51190 Sep 22 23:22:28.239 INFO Pop front: ReconcileIO { id: ReconciliationId(720), op: ExtentFlush { repair_id: ReconciliationId(720), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51191 Sep 22 23:22:28.239 INFO Sent repair work, now wait for resp
51192 Sep 22 23:22:28.239 INFO [0] received reconcile message
51193 Sep 22 23:22:28.239 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(720), op: ExtentFlush { repair_id: ReconciliationId(720), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51194 Sep 22 23:22:28.239 INFO [0] client ExtentFlush { repair_id: ReconciliationId(720), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51195 Sep 22 23:22:28.239 INFO [1] received reconcile message
51196 Sep 22 23:22:28.239 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(720), op: ExtentFlush { repair_id: ReconciliationId(720), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51197 Sep 22 23:22:28.239 INFO [1] client ExtentFlush { repair_id: ReconciliationId(720), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51198 Sep 22 23:22:28.239 INFO [2] received reconcile message
51199 Sep 22 23:22:28.239 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(720), op: ExtentFlush { repair_id: ReconciliationId(720), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51200 Sep 22 23:22:28.239 INFO [2] client ExtentFlush { repair_id: ReconciliationId(720), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51201 Sep 22 23:22:28.240 DEBG 720 Flush extent 131 with f:2 g:2
51202 Sep 22 23:22:28.240 DEBG Flush just extent 131 with f:2 and g:2
51203 Sep 22 23:22:28.240 DEBG [1] It's time to notify for 720
51204 Sep 22 23:22:28.240 INFO Completion from [1] id:720 status:true
51205 Sep 22 23:22:28.240 INFO [721/752] Repair commands completed
51206 Sep 22 23:22:28.240 INFO Pop front: ReconcileIO { id: ReconciliationId(721), op: ExtentClose { repair_id: ReconciliationId(721), extent_id: 131 }, state: ClientData([New, New, New]) }
51207 Sep 22 23:22:28.240 INFO Sent repair work, now wait for resp
51208 Sep 22 23:22:28.240 INFO [0] received reconcile message
51209 Sep 22 23:22:28.240 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(721), op: ExtentClose { repair_id: ReconciliationId(721), extent_id: 131 }, state: ClientData([InProgress, New, New]) }, : downstairs
51210 Sep 22 23:22:28.240 INFO [0] client ExtentClose { repair_id: ReconciliationId(721), extent_id: 131 }
51211 Sep 22 23:22:28.240 INFO [1] received reconcile message
51212 Sep 22 23:22:28.240 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(721), op: ExtentClose { repair_id: ReconciliationId(721), extent_id: 131 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51213 Sep 22 23:22:28.240 INFO [1] client ExtentClose { repair_id: ReconciliationId(721), extent_id: 131 }
51214 Sep 22 23:22:28.240 INFO [2] received reconcile message
51215 Sep 22 23:22:28.240 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(721), op: ExtentClose { repair_id: ReconciliationId(721), extent_id: 131 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51216 Sep 22 23:22:28.240 INFO [2] client ExtentClose { repair_id: ReconciliationId(721), extent_id: 131 }
51217 Sep 22 23:22:28.240 DEBG 721 Close extent 131
51218 Sep 22 23:22:28.240 DEBG 721 Close extent 131
51219 Sep 22 23:22:28.241 DEBG 721 Close extent 131
51220 Sep 22 23:22:28.241 DEBG [2] It's time to notify for 721
51221 Sep 22 23:22:28.241 INFO Completion from [2] id:721 status:true
51222 Sep 22 23:22:28.241 INFO [722/752] Repair commands completed
51223 Sep 22 23:22:28.241 INFO Pop front: ReconcileIO { id: ReconciliationId(722), op: ExtentRepair { repair_id: ReconciliationId(722), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51224 Sep 22 23:22:28.241 INFO Sent repair work, now wait for resp
51225 Sep 22 23:22:28.241 INFO [0] received reconcile message
51226 Sep 22 23:22:28.241 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(722), op: ExtentRepair { repair_id: ReconciliationId(722), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51227 Sep 22 23:22:28.241 INFO [0] client ExtentRepair { repair_id: ReconciliationId(722), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51228 Sep 22 23:22:28.241 INFO [0] Sending repair request ReconciliationId(722)
51229 Sep 22 23:22:28.241 INFO [1] received reconcile message
51230 Sep 22 23:22:28.241 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(722), op: ExtentRepair { repair_id: ReconciliationId(722), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51231 Sep 22 23:22:28.241 INFO [1] client ExtentRepair { repair_id: ReconciliationId(722), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51232 Sep 22 23:22:28.241 INFO [1] No action required ReconciliationId(722)
51233 Sep 22 23:22:28.242 INFO [2] received reconcile message
51234 Sep 22 23:22:28.242 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(722), op: ExtentRepair { repair_id: ReconciliationId(722), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51235 Sep 22 23:22:28.242 INFO [2] client ExtentRepair { repair_id: ReconciliationId(722), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51236 Sep 22 23:22:28.242 INFO [2] No action required ReconciliationId(722)
51237 Sep 22 23:22:28.242 DEBG 722 Repair extent 131 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
51238 Sep 22 23:22:28.242 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/083.copy"
51239 Sep 22 23:22:28.248 WARN returning error on flush!
51240 Sep 22 23:22:28.248 DEBG Flush :1094 extent_limit None deps:[JobId(1093), JobId(1092)] res:false f:36 g:1
51241 Sep 22 23:22:28.248 DEBG Flush :1094 extent_limit None deps:[JobId(1093), JobId(1092)] res:true f:36 g:1
51242 Sep 22 23:22:28.253 DEBG Read :1095 deps:[JobId(1094)] res:true
51243 Sep 22 23:22:28.304 INFO accepted connection, remote_addr: 127.0.0.1:46641, local_addr: 127.0.0.1:52864, task: repair
51244 Sep 22 23:22:28.304 TRCE incoming request, uri: /extent/131/files, method: GET, req_id: 3ffacf74-8166-47f0-98fa-a83792402b1c, remote_addr: 127.0.0.1:46641, local_addr: 127.0.0.1:52864, task: repair
51245 Sep 22 23:22:28.304 INFO request completed, latency_us: 225, response_code: 200, uri: /extent/131/files, method: GET, req_id: 3ffacf74-8166-47f0-98fa-a83792402b1c, remote_addr: 127.0.0.1:46641, local_addr: 127.0.0.1:52864, task: repair
51246 Sep 22 23:22:28.305 INFO eid:131 Found repair files: ["083", "083.db"]
51247 Sep 22 23:22:28.305 TRCE incoming request, uri: /newextent/131/data, method: GET, req_id: 89149804-e68d-43f9-a76d-c96f7af34546, remote_addr: 127.0.0.1:46641, local_addr: 127.0.0.1:52864, task: repair
51248 Sep 22 23:22:28.305 INFO request completed, latency_us: 340, response_code: 200, uri: /newextent/131/data, method: GET, req_id: 89149804-e68d-43f9-a76d-c96f7af34546, remote_addr: 127.0.0.1:46641, local_addr: 127.0.0.1:52864, task: repair
51249 Sep 22 23:22:28.310 TRCE incoming request, uri: /newextent/131/db, method: GET, req_id: a7da5631-e13e-43a7-a454-6f3e4a62d8a6, remote_addr: 127.0.0.1:46641, local_addr: 127.0.0.1:52864, task: repair
51250 Sep 22 23:22:28.310 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/131/db, method: GET, req_id: a7da5631-e13e-43a7-a454-6f3e4a62d8a6, remote_addr: 127.0.0.1:46641, local_addr: 127.0.0.1:52864, task: repair
51251 Sep 22 23:22:28.311 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/083.copy" to "/tmp/downstairs-zrMnlo6G/00/000/083.replace"
51252 Sep 22 23:22:28.312 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51253 Sep 22 23:22:28.312 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/083.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
51254 Sep 22 23:22:28.313 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/083"
51255 Sep 22 23:22:28.313 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/083.db"
51256 Sep 22 23:22:28.313 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51257 Sep 22 23:22:28.313 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/083.replace" to "/tmp/downstairs-zrMnlo6G/00/000/083.completed"
51258 Sep 22 23:22:28.313 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51259 Sep 22 23:22:28.313 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51260 Sep 22 23:22:28.313 DEBG [0] It's time to notify for 722
51261 Sep 22 23:22:28.313 INFO Completion from [0] id:722 status:true
51262 Sep 22 23:22:28.313 INFO [723/752] Repair commands completed
51263 Sep 22 23:22:28.313 INFO Pop front: ReconcileIO { id: ReconciliationId(723), op: ExtentReopen { repair_id: ReconciliationId(723), extent_id: 131 }, state: ClientData([New, New, New]) }
51264 Sep 22 23:22:28.313 INFO Sent repair work, now wait for resp
51265 Sep 22 23:22:28.313 INFO [0] received reconcile message
51266 Sep 22 23:22:28.313 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(723), op: ExtentReopen { repair_id: ReconciliationId(723), extent_id: 131 }, state: ClientData([InProgress, New, New]) }, : downstairs
51267 Sep 22 23:22:28.313 INFO [0] client ExtentReopen { repair_id: ReconciliationId(723), extent_id: 131 }
51268 Sep 22 23:22:28.313 INFO [1] received reconcile message
51269 Sep 22 23:22:28.313 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(723), op: ExtentReopen { repair_id: ReconciliationId(723), extent_id: 131 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51270 Sep 22 23:22:28.314 INFO [1] client ExtentReopen { repair_id: ReconciliationId(723), extent_id: 131 }
51271 Sep 22 23:22:28.314 INFO [2] received reconcile message
51272 Sep 22 23:22:28.314 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(723), op: ExtentReopen { repair_id: ReconciliationId(723), extent_id: 131 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51273 Sep 22 23:22:28.314 INFO [2] client ExtentReopen { repair_id: ReconciliationId(723), extent_id: 131 }
51274 Sep 22 23:22:28.314 DEBG 723 Reopen extent 131
51275 Sep 22 23:22:28.314 DEBG 723 Reopen extent 131
51276 Sep 22 23:22:28.315 DEBG 723 Reopen extent 131
51277 Sep 22 23:22:28.316 DEBG [2] It's time to notify for 723
51278 Sep 22 23:22:28.316 INFO Completion from [2] id:723 status:true
51279 Sep 22 23:22:28.316 INFO [724/752] Repair commands completed
51280 Sep 22 23:22:28.316 INFO Pop front: ReconcileIO { id: ReconciliationId(724), op: ExtentFlush { repair_id: ReconciliationId(724), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51281 Sep 22 23:22:28.316 INFO Sent repair work, now wait for resp
51282 Sep 22 23:22:28.316 INFO [0] received reconcile message
51283 Sep 22 23:22:28.316 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(724), op: ExtentFlush { repair_id: ReconciliationId(724), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51284 Sep 22 23:22:28.316 INFO [0] client ExtentFlush { repair_id: ReconciliationId(724), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51285 Sep 22 23:22:28.316 INFO [1] received reconcile message
51286 Sep 22 23:22:28.316 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(724), op: ExtentFlush { repair_id: ReconciliationId(724), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51287 Sep 22 23:22:28.316 INFO [1] client ExtentFlush { repair_id: ReconciliationId(724), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51288 Sep 22 23:22:28.316 INFO [2] received reconcile message
51289 Sep 22 23:22:28.316 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(724), op: ExtentFlush { repair_id: ReconciliationId(724), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51290 Sep 22 23:22:28.316 INFO [2] client ExtentFlush { repair_id: ReconciliationId(724), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51291 Sep 22 23:22:28.316 DEBG 724 Flush extent 170 with f:2 g:2
51292 Sep 22 23:22:28.316 DEBG Flush just extent 170 with f:2 and g:2
51293 Sep 22 23:22:28.316 DEBG [1] It's time to notify for 724
51294 Sep 22 23:22:28.316 INFO Completion from [1] id:724 status:true
51295 Sep 22 23:22:28.316 INFO [725/752] Repair commands completed
51296 Sep 22 23:22:28.316 INFO Pop front: ReconcileIO { id: ReconciliationId(725), op: ExtentClose { repair_id: ReconciliationId(725), extent_id: 170 }, state: ClientData([New, New, New]) }
51297 Sep 22 23:22:28.316 INFO Sent repair work, now wait for resp
51298 Sep 22 23:22:28.316 INFO [0] received reconcile message
51299 Sep 22 23:22:28.316 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(725), op: ExtentClose { repair_id: ReconciliationId(725), extent_id: 170 }, state: ClientData([InProgress, New, New]) }, : downstairs
51300 Sep 22 23:22:28.316 INFO [0] client ExtentClose { repair_id: ReconciliationId(725), extent_id: 170 }
51301 Sep 22 23:22:28.316 INFO [1] received reconcile message
51302 Sep 22 23:22:28.316 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(725), op: ExtentClose { repair_id: ReconciliationId(725), extent_id: 170 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51303 Sep 22 23:22:28.316 INFO [1] client ExtentClose { repair_id: ReconciliationId(725), extent_id: 170 }
51304 Sep 22 23:22:28.316 INFO [2] received reconcile message
51305 Sep 22 23:22:28.316 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(725), op: ExtentClose { repair_id: ReconciliationId(725), extent_id: 170 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51306 Sep 22 23:22:28.316 INFO [2] client ExtentClose { repair_id: ReconciliationId(725), extent_id: 170 }
51307 Sep 22 23:22:28.317 DEBG 725 Close extent 170
51308 Sep 22 23:22:28.317 DEBG 725 Close extent 170
51309 Sep 22 23:22:28.317 DEBG 725 Close extent 170
51310 Sep 22 23:22:28.318 DEBG [2] It's time to notify for 725
51311 Sep 22 23:22:28.318 INFO Completion from [2] id:725 status:true
51312 Sep 22 23:22:28.318 INFO [726/752] Repair commands completed
51313 Sep 22 23:22:28.318 INFO Pop front: ReconcileIO { id: ReconciliationId(726), op: ExtentRepair { repair_id: ReconciliationId(726), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51314 Sep 22 23:22:28.318 INFO Sent repair work, now wait for resp
51315 Sep 22 23:22:28.318 INFO [0] received reconcile message
51316 Sep 22 23:22:28.318 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(726), op: ExtentRepair { repair_id: ReconciliationId(726), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51317 Sep 22 23:22:28.318 INFO [0] client ExtentRepair { repair_id: ReconciliationId(726), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51318 Sep 22 23:22:28.318 INFO [0] Sending repair request ReconciliationId(726)
51319 Sep 22 23:22:28.318 INFO [1] received reconcile message
51320 Sep 22 23:22:28.318 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(726), op: ExtentRepair { repair_id: ReconciliationId(726), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51321 Sep 22 23:22:28.318 INFO [1] client ExtentRepair { repair_id: ReconciliationId(726), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51322 Sep 22 23:22:28.318 INFO [1] No action required ReconciliationId(726)
51323 Sep 22 23:22:28.318 INFO [2] received reconcile message
51324 Sep 22 23:22:28.318 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(726), op: ExtentRepair { repair_id: ReconciliationId(726), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51325 Sep 22 23:22:28.318 INFO [2] client ExtentRepair { repair_id: ReconciliationId(726), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51326 Sep 22 23:22:28.318 INFO [2] No action required ReconciliationId(726)
51327 Sep 22 23:22:28.318 DEBG 726 Repair extent 170 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
51328 Sep 22 23:22:28.318 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0AA.copy"
51329 Sep 22 23:22:28.384 INFO accepted connection, remote_addr: 127.0.0.1:40005, local_addr: 127.0.0.1:52864, task: repair
51330 Sep 22 23:22:28.384 TRCE incoming request, uri: /extent/170/files, method: GET, req_id: 968623e9-6482-42b6-bf97-f826537761f4, remote_addr: 127.0.0.1:40005, local_addr: 127.0.0.1:52864, task: repair
51331 Sep 22 23:22:28.385 INFO request completed, latency_us: 234, response_code: 200, uri: /extent/170/files, method: GET, req_id: 968623e9-6482-42b6-bf97-f826537761f4, remote_addr: 127.0.0.1:40005, local_addr: 127.0.0.1:52864, task: repair
51332 Sep 22 23:22:28.385 INFO eid:170 Found repair files: ["0AA", "0AA.db"]
51333 Sep 22 23:22:28.385 TRCE incoming request, uri: /newextent/170/data, method: GET, req_id: 36d0443d-aab3-4ac4-a914-1ac7588303f3, remote_addr: 127.0.0.1:40005, local_addr: 127.0.0.1:52864, task: repair
51334 Sep 22 23:22:28.386 INFO request completed, latency_us: 329, response_code: 200, uri: /newextent/170/data, method: GET, req_id: 36d0443d-aab3-4ac4-a914-1ac7588303f3, remote_addr: 127.0.0.1:40005, local_addr: 127.0.0.1:52864, task: repair
51335 Sep 22 23:22:28.390 TRCE incoming request, uri: /newextent/170/db, method: GET, req_id: 51129285-e60f-4194-8722-c84978748b65, remote_addr: 127.0.0.1:40005, local_addr: 127.0.0.1:52864, task: repair
51336 Sep 22 23:22:28.391 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/170/db, method: GET, req_id: 51129285-e60f-4194-8722-c84978748b65, remote_addr: 127.0.0.1:40005, local_addr: 127.0.0.1:52864, task: repair
51337 Sep 22 23:22:28.392 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0AA.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0AA.replace"
51338 Sep 22 23:22:28.392 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51339 Sep 22 23:22:28.393 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0AA.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
51340 Sep 22 23:22:28.393 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0AA"
51341 Sep 22 23:22:28.393 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0AA.db"
51342 Sep 22 23:22:28.393 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51343 Sep 22 23:22:28.393 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0AA.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0AA.completed"
51344 Sep 22 23:22:28.393 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51345 Sep 22 23:22:28.393 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51346 Sep 22 23:22:28.393 DEBG [0] It's time to notify for 726
51347 Sep 22 23:22:28.393 INFO Completion from [0] id:726 status:true
51348 Sep 22 23:22:28.393 INFO [727/752] Repair commands completed
51349 Sep 22 23:22:28.393 INFO Pop front: ReconcileIO { id: ReconciliationId(727), op: ExtentReopen { repair_id: ReconciliationId(727), extent_id: 170 }, state: ClientData([New, New, New]) }
51350 Sep 22 23:22:28.393 INFO Sent repair work, now wait for resp
51351 Sep 22 23:22:28.394 INFO [0] received reconcile message
51352 Sep 22 23:22:28.394 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(727), op: ExtentReopen { repair_id: ReconciliationId(727), extent_id: 170 }, state: ClientData([InProgress, New, New]) }, : downstairs
51353 Sep 22 23:22:28.394 INFO [0] client ExtentReopen { repair_id: ReconciliationId(727), extent_id: 170 }
51354 Sep 22 23:22:28.394 INFO [1] received reconcile message
51355 Sep 22 23:22:28.394 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(727), op: ExtentReopen { repair_id: ReconciliationId(727), extent_id: 170 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51356 Sep 22 23:22:28.394 INFO [1] client ExtentReopen { repair_id: ReconciliationId(727), extent_id: 170 }
51357 Sep 22 23:22:28.394 INFO [2] received reconcile message
51358 Sep 22 23:22:28.394 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(727), op: ExtentReopen { repair_id: ReconciliationId(727), extent_id: 170 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51359 Sep 22 23:22:28.394 INFO [2] client ExtentReopen { repair_id: ReconciliationId(727), extent_id: 170 }
51360 Sep 22 23:22:28.394 DEBG 727 Reopen extent 170
51361 Sep 22 23:22:28.395 DEBG 727 Reopen extent 170
51362 Sep 22 23:22:28.395 DEBG 727 Reopen extent 170
51363 Sep 22 23:22:28.396 DEBG [2] It's time to notify for 727
51364 Sep 22 23:22:28.396 INFO Completion from [2] id:727 status:true
51365 Sep 22 23:22:28.396 INFO [728/752] Repair commands completed
51366 Sep 22 23:22:28.396 INFO Pop front: ReconcileIO { id: ReconciliationId(728), op: ExtentFlush { repair_id: ReconciliationId(728), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51367 Sep 22 23:22:28.396 INFO Sent repair work, now wait for resp
51368 Sep 22 23:22:28.396 INFO [0] received reconcile message
51369 Sep 22 23:22:28.396 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(728), op: ExtentFlush { repair_id: ReconciliationId(728), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51370 Sep 22 23:22:28.396 INFO [0] client ExtentFlush { repair_id: ReconciliationId(728), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51371 Sep 22 23:22:28.396 INFO [1] received reconcile message
51372 Sep 22 23:22:28.396 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(728), op: ExtentFlush { repair_id: ReconciliationId(728), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51373 Sep 22 23:22:28.396 INFO [1] client ExtentFlush { repair_id: ReconciliationId(728), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51374 Sep 22 23:22:28.396 INFO [2] received reconcile message
51375 Sep 22 23:22:28.396 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(728), op: ExtentFlush { repair_id: ReconciliationId(728), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51376 Sep 22 23:22:28.396 INFO [2] client ExtentFlush { repair_id: ReconciliationId(728), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51377 Sep 22 23:22:28.396 DEBG 728 Flush extent 72 with f:2 g:2
51378 Sep 22 23:22:28.396 DEBG Flush just extent 72 with f:2 and g:2
51379 Sep 22 23:22:28.396 DEBG [1] It's time to notify for 728
51380 Sep 22 23:22:28.396 INFO Completion from [1] id:728 status:true
51381 Sep 22 23:22:28.396 INFO [729/752] Repair commands completed
51382 Sep 22 23:22:28.396 INFO Pop front: ReconcileIO { id: ReconciliationId(729), op: ExtentClose { repair_id: ReconciliationId(729), extent_id: 72 }, state: ClientData([New, New, New]) }
51383 Sep 22 23:22:28.396 INFO Sent repair work, now wait for resp
51384 Sep 22 23:22:28.396 INFO [0] received reconcile message
51385 Sep 22 23:22:28.396 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(729), op: ExtentClose { repair_id: ReconciliationId(729), extent_id: 72 }, state: ClientData([InProgress, New, New]) }, : downstairs
51386 Sep 22 23:22:28.396 INFO [0] client ExtentClose { repair_id: ReconciliationId(729), extent_id: 72 }
51387 Sep 22 23:22:28.397 INFO [1] received reconcile message
51388 Sep 22 23:22:28.397 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(729), op: ExtentClose { repair_id: ReconciliationId(729), extent_id: 72 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51389 Sep 22 23:22:28.397 INFO [1] client ExtentClose { repair_id: ReconciliationId(729), extent_id: 72 }
51390 Sep 22 23:22:28.397 INFO [2] received reconcile message
51391 Sep 22 23:22:28.397 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(729), op: ExtentClose { repair_id: ReconciliationId(729), extent_id: 72 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51392 Sep 22 23:22:28.397 INFO [2] client ExtentClose { repair_id: ReconciliationId(729), extent_id: 72 }
51393 Sep 22 23:22:28.397 DEBG 729 Close extent 72
51394 Sep 22 23:22:28.397 DEBG 729 Close extent 72
51395 Sep 22 23:22:28.397 DEBG 729 Close extent 72
51396 Sep 22 23:22:28.398 DEBG [2] It's time to notify for 729
51397 Sep 22 23:22:28.398 INFO Completion from [2] id:729 status:true
51398 Sep 22 23:22:28.398 INFO [730/752] Repair commands completed
51399 Sep 22 23:22:28.398 INFO Pop front: ReconcileIO { id: ReconciliationId(730), op: ExtentRepair { repair_id: ReconciliationId(730), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51400 Sep 22 23:22:28.398 INFO Sent repair work, now wait for resp
51401 Sep 22 23:22:28.398 INFO [0] received reconcile message
51402 Sep 22 23:22:28.398 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(730), op: ExtentRepair { repair_id: ReconciliationId(730), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51403 Sep 22 23:22:28.398 INFO [0] client ExtentRepair { repair_id: ReconciliationId(730), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51404 Sep 22 23:22:28.398 INFO [0] Sending repair request ReconciliationId(730)
51405 Sep 22 23:22:28.398 INFO [1] received reconcile message
51406 Sep 22 23:22:28.398 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(730), op: ExtentRepair { repair_id: ReconciliationId(730), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51407 Sep 22 23:22:28.398 INFO [1] client ExtentRepair { repair_id: ReconciliationId(730), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51408 Sep 22 23:22:28.398 INFO [1] No action required ReconciliationId(730)
51409 Sep 22 23:22:28.398 INFO [2] received reconcile message
51410 Sep 22 23:22:28.398 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(730), op: ExtentRepair { repair_id: ReconciliationId(730), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51411 Sep 22 23:22:28.398 INFO [2] client ExtentRepair { repair_id: ReconciliationId(730), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51412 Sep 22 23:22:28.398 INFO [2] No action required ReconciliationId(730)
51413 Sep 22 23:22:28.398 DEBG 730 Repair extent 72 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
51414 Sep 22 23:22:28.398 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/048.copy"
51415 Sep 22 23:22:28.464 INFO accepted connection, remote_addr: 127.0.0.1:45975, local_addr: 127.0.0.1:52864, task: repair
51416 Sep 22 23:22:28.465 TRCE incoming request, uri: /extent/72/files, method: GET, req_id: 5e218426-d861-4c71-a05d-fce3b9e901fe, remote_addr: 127.0.0.1:45975, local_addr: 127.0.0.1:52864, task: repair
51417 Sep 22 23:22:28.465 INFO request completed, latency_us: 245, response_code: 200, uri: /extent/72/files, method: GET, req_id: 5e218426-d861-4c71-a05d-fce3b9e901fe, remote_addr: 127.0.0.1:45975, local_addr: 127.0.0.1:52864, task: repair
51418 Sep 22 23:22:28.465 INFO eid:72 Found repair files: ["048", "048.db"]
51419 Sep 22 23:22:28.466 TRCE incoming request, uri: /newextent/72/data, method: GET, req_id: 4c86aad1-8539-4fb3-8971-494813115ec5, remote_addr: 127.0.0.1:45975, local_addr: 127.0.0.1:52864, task: repair
51420 Sep 22 23:22:28.466 INFO request completed, latency_us: 273, response_code: 200, uri: /newextent/72/data, method: GET, req_id: 4c86aad1-8539-4fb3-8971-494813115ec5, remote_addr: 127.0.0.1:45975, local_addr: 127.0.0.1:52864, task: repair
51421 Sep 22 23:22:28.471 TRCE incoming request, uri: /newextent/72/db, method: GET, req_id: 3e190832-c8ec-4c54-b936-06fdc1a5249f, remote_addr: 127.0.0.1:45975, local_addr: 127.0.0.1:52864, task: repair
51422 Sep 22 23:22:28.471 INFO request completed, latency_us: 305, response_code: 200, uri: /newextent/72/db, method: GET, req_id: 3e190832-c8ec-4c54-b936-06fdc1a5249f, remote_addr: 127.0.0.1:45975, local_addr: 127.0.0.1:52864, task: repair
51423 Sep 22 23:22:28.472 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/048.copy" to "/tmp/downstairs-zrMnlo6G/00/000/048.replace"
51424 Sep 22 23:22:28.472 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51425 Sep 22 23:22:28.473 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/048.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
51426 Sep 22 23:22:28.473 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/048"
51427 Sep 22 23:22:28.473 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/048.db"
51428 Sep 22 23:22:28.474 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51429 Sep 22 23:22:28.474 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/048.replace" to "/tmp/downstairs-zrMnlo6G/00/000/048.completed"
51430 Sep 22 23:22:28.474 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51431 Sep 22 23:22:28.474 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51432 Sep 22 23:22:28.474 DEBG [0] It's time to notify for 730
51433 Sep 22 23:22:28.474 INFO Completion from [0] id:730 status:true
51434 Sep 22 23:22:28.474 INFO [731/752] Repair commands completed
51435 Sep 22 23:22:28.474 INFO Pop front: ReconcileIO { id: ReconciliationId(731), op: ExtentReopen { repair_id: ReconciliationId(731), extent_id: 72 }, state: ClientData([New, New, New]) }
51436 Sep 22 23:22:28.474 INFO Sent repair work, now wait for resp
51437 Sep 22 23:22:28.474 INFO [0] received reconcile message
51438 Sep 22 23:22:28.474 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(731), op: ExtentReopen { repair_id: ReconciliationId(731), extent_id: 72 }, state: ClientData([InProgress, New, New]) }, : downstairs
51439 Sep 22 23:22:28.474 INFO [0] client ExtentReopen { repair_id: ReconciliationId(731), extent_id: 72 }
51440 Sep 22 23:22:28.474 INFO [1] received reconcile message
51441 Sep 22 23:22:28.474 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(731), op: ExtentReopen { repair_id: ReconciliationId(731), extent_id: 72 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51442 Sep 22 23:22:28.474 INFO [1] client ExtentReopen { repair_id: ReconciliationId(731), extent_id: 72 }
51443 Sep 22 23:22:28.474 INFO [2] received reconcile message
51444 Sep 22 23:22:28.474 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(731), op: ExtentReopen { repair_id: ReconciliationId(731), extent_id: 72 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51445 Sep 22 23:22:28.474 INFO [2] client ExtentReopen { repair_id: ReconciliationId(731), extent_id: 72 }
51446 Sep 22 23:22:28.474 DEBG 731 Reopen extent 72
51447 Sep 22 23:22:28.475 DEBG 731 Reopen extent 72
51448 Sep 22 23:22:28.476 DEBG 731 Reopen extent 72
51449 Sep 22 23:22:28.476 DEBG [2] It's time to notify for 731
51450 Sep 22 23:22:28.476 INFO Completion from [2] id:731 status:true
51451 Sep 22 23:22:28.476 INFO [732/752] Repair commands completed
51452 Sep 22 23:22:28.476 INFO Pop front: ReconcileIO { id: ReconciliationId(732), op: ExtentFlush { repair_id: ReconciliationId(732), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51453 Sep 22 23:22:28.476 INFO Sent repair work, now wait for resp
51454 Sep 22 23:22:28.477 INFO [0] received reconcile message
51455 Sep 22 23:22:28.477 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(732), op: ExtentFlush { repair_id: ReconciliationId(732), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51456 Sep 22 23:22:28.477 INFO [0] client ExtentFlush { repair_id: ReconciliationId(732), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51457 Sep 22 23:22:28.477 INFO [1] received reconcile message
51458 Sep 22 23:22:28.477 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(732), op: ExtentFlush { repair_id: ReconciliationId(732), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51459 Sep 22 23:22:28.477 INFO [1] client ExtentFlush { repair_id: ReconciliationId(732), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51460 Sep 22 23:22:28.477 INFO [2] received reconcile message
51461 Sep 22 23:22:28.477 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(732), op: ExtentFlush { repair_id: ReconciliationId(732), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51462 Sep 22 23:22:28.477 INFO [2] client ExtentFlush { repair_id: ReconciliationId(732), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51463 Sep 22 23:22:28.477 DEBG 732 Flush extent 180 with f:2 g:2
51464 Sep 22 23:22:28.477 DEBG Flush just extent 180 with f:2 and g:2
51465 Sep 22 23:22:28.477 DEBG [1] It's time to notify for 732
51466 Sep 22 23:22:28.477 INFO Completion from [1] id:732 status:true
51467 Sep 22 23:22:28.477 INFO [733/752] Repair commands completed
51468 Sep 22 23:22:28.477 INFO Pop front: ReconcileIO { id: ReconciliationId(733), op: ExtentClose { repair_id: ReconciliationId(733), extent_id: 180 }, state: ClientData([New, New, New]) }
51469 Sep 22 23:22:28.477 INFO Sent repair work, now wait for resp
51470 Sep 22 23:22:28.477 INFO [0] received reconcile message
51471 Sep 22 23:22:28.477 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(733), op: ExtentClose { repair_id: ReconciliationId(733), extent_id: 180 }, state: ClientData([InProgress, New, New]) }, : downstairs
51472 Sep 22 23:22:28.477 INFO [0] client ExtentClose { repair_id: ReconciliationId(733), extent_id: 180 }
51473 Sep 22 23:22:28.477 INFO [1] received reconcile message
51474 Sep 22 23:22:28.477 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(733), op: ExtentClose { repair_id: ReconciliationId(733), extent_id: 180 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51475 Sep 22 23:22:28.477 INFO [1] client ExtentClose { repair_id: ReconciliationId(733), extent_id: 180 }
51476 Sep 22 23:22:28.477 INFO [2] received reconcile message
51477 Sep 22 23:22:28.477 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(733), op: ExtentClose { repair_id: ReconciliationId(733), extent_id: 180 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51478 Sep 22 23:22:28.477 INFO [2] client ExtentClose { repair_id: ReconciliationId(733), extent_id: 180 }
51479 Sep 22 23:22:28.477 DEBG 733 Close extent 180
51480 Sep 22 23:22:28.478 DEBG 733 Close extent 180
51481 Sep 22 23:22:28.478 DEBG 733 Close extent 180
51482 Sep 22 23:22:28.478 DEBG [2] It's time to notify for 733
51483 Sep 22 23:22:28.478 INFO Completion from [2] id:733 status:true
51484 Sep 22 23:22:28.478 INFO [734/752] Repair commands completed
51485 Sep 22 23:22:28.478 INFO Pop front: ReconcileIO { id: ReconciliationId(734), op: ExtentRepair { repair_id: ReconciliationId(734), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51486 Sep 22 23:22:28.479 INFO Sent repair work, now wait for resp
51487 Sep 22 23:22:28.479 INFO [0] received reconcile message
51488 Sep 22 23:22:28.479 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(734), op: ExtentRepair { repair_id: ReconciliationId(734), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51489 Sep 22 23:22:28.479 INFO [0] client ExtentRepair { repair_id: ReconciliationId(734), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51490 Sep 22 23:22:28.479 INFO [0] Sending repair request ReconciliationId(734)
51491 Sep 22 23:22:28.479 INFO [1] received reconcile message
51492 Sep 22 23:22:28.479 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(734), op: ExtentRepair { repair_id: ReconciliationId(734), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51493 Sep 22 23:22:28.479 INFO [1] client ExtentRepair { repair_id: ReconciliationId(734), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51494 Sep 22 23:22:28.479 INFO [1] No action required ReconciliationId(734)
51495 Sep 22 23:22:28.479 INFO [2] received reconcile message
51496 Sep 22 23:22:28.479 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(734), op: ExtentRepair { repair_id: ReconciliationId(734), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51497 Sep 22 23:22:28.479 INFO [2] client ExtentRepair { repair_id: ReconciliationId(734), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51498 Sep 22 23:22:28.479 INFO [2] No action required ReconciliationId(734)
51499 Sep 22 23:22:28.479 DEBG 734 Repair extent 180 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
51500 Sep 22 23:22:28.479 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/0B4.copy"
51501 Sep 22 23:22:28.543 INFO accepted connection, remote_addr: 127.0.0.1:46864, local_addr: 127.0.0.1:52864, task: repair
51502 Sep 22 23:22:28.544 TRCE incoming request, uri: /extent/180/files, method: GET, req_id: 8c035ce6-8000-4998-b2f8-0a72fb2d67e4, remote_addr: 127.0.0.1:46864, local_addr: 127.0.0.1:52864, task: repair
51503 Sep 22 23:22:28.544 INFO request completed, latency_us: 232, response_code: 200, uri: /extent/180/files, method: GET, req_id: 8c035ce6-8000-4998-b2f8-0a72fb2d67e4, remote_addr: 127.0.0.1:46864, local_addr: 127.0.0.1:52864, task: repair
51504 Sep 22 23:22:28.544 INFO eid:180 Found repair files: ["0B4", "0B4.db"]
51505 Sep 22 23:22:28.545 TRCE incoming request, uri: /newextent/180/data, method: GET, req_id: 86be73b5-5838-4eba-a10e-73d46abb33c9, remote_addr: 127.0.0.1:46864, local_addr: 127.0.0.1:52864, task: repair
51506 Sep 22 23:22:28.545 INFO request completed, latency_us: 273, response_code: 200, uri: /newextent/180/data, method: GET, req_id: 86be73b5-5838-4eba-a10e-73d46abb33c9, remote_addr: 127.0.0.1:46864, local_addr: 127.0.0.1:52864, task: repair
51507 Sep 22 23:22:28.550 TRCE incoming request, uri: /newextent/180/db, method: GET, req_id: a040e66d-c668-4bef-a3a2-19a7c98faf8c, remote_addr: 127.0.0.1:46864, local_addr: 127.0.0.1:52864, task: repair
51508 Sep 22 23:22:28.550 INFO request completed, latency_us: 312, response_code: 200, uri: /newextent/180/db, method: GET, req_id: a040e66d-c668-4bef-a3a2-19a7c98faf8c, remote_addr: 127.0.0.1:46864, local_addr: 127.0.0.1:52864, task: repair
51509 Sep 22 23:22:28.551 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/0B4.copy" to "/tmp/downstairs-zrMnlo6G/00/000/0B4.replace"
51510 Sep 22 23:22:28.551 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51511 Sep 22 23:22:28.552 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/0B4.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
51512 Sep 22 23:22:28.553 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B4"
51513 Sep 22 23:22:28.553 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/0B4.db"
51514 Sep 22 23:22:28.553 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51515 Sep 22 23:22:28.553 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/0B4.replace" to "/tmp/downstairs-zrMnlo6G/00/000/0B4.completed"
51516 Sep 22 23:22:28.553 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51517 Sep 22 23:22:28.553 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51518 Sep 22 23:22:28.553 DEBG [0] It's time to notify for 734
51519 Sep 22 23:22:28.553 INFO Completion from [0] id:734 status:true
51520 Sep 22 23:22:28.553 INFO [735/752] Repair commands completed
51521 Sep 22 23:22:28.553 INFO Pop front: ReconcileIO { id: ReconciliationId(735), op: ExtentReopen { repair_id: ReconciliationId(735), extent_id: 180 }, state: ClientData([New, New, New]) }
51522 Sep 22 23:22:28.553 INFO Sent repair work, now wait for resp
51523 Sep 22 23:22:28.553 INFO [0] received reconcile message
51524 Sep 22 23:22:28.553 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(735), op: ExtentReopen { repair_id: ReconciliationId(735), extent_id: 180 }, state: ClientData([InProgress, New, New]) }, : downstairs
51525 Sep 22 23:22:28.553 INFO [0] client ExtentReopen { repair_id: ReconciliationId(735), extent_id: 180 }
51526 Sep 22 23:22:28.554 INFO [1] received reconcile message
51527 Sep 22 23:22:28.554 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(735), op: ExtentReopen { repair_id: ReconciliationId(735), extent_id: 180 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51528 Sep 22 23:22:28.554 INFO [1] client ExtentReopen { repair_id: ReconciliationId(735), extent_id: 180 }
51529 Sep 22 23:22:28.554 INFO [2] received reconcile message
51530 Sep 22 23:22:28.554 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(735), op: ExtentReopen { repair_id: ReconciliationId(735), extent_id: 180 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51531 Sep 22 23:22:28.554 INFO [2] client ExtentReopen { repair_id: ReconciliationId(735), extent_id: 180 }
51532 Sep 22 23:22:28.554 DEBG 735 Reopen extent 180
51533 Sep 22 23:22:28.555 DEBG 735 Reopen extent 180
51534 Sep 22 23:22:28.555 DEBG 735 Reopen extent 180
51535 Sep 22 23:22:28.556 DEBG [2] It's time to notify for 735
51536 Sep 22 23:22:28.556 INFO Completion from [2] id:735 status:true
51537 Sep 22 23:22:28.556 INFO [736/752] Repair commands completed
51538 Sep 22 23:22:28.556 INFO Pop front: ReconcileIO { id: ReconciliationId(736), op: ExtentFlush { repair_id: ReconciliationId(736), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51539 Sep 22 23:22:28.556 INFO Sent repair work, now wait for resp
51540 Sep 22 23:22:28.556 INFO [0] received reconcile message
51541 Sep 22 23:22:28.556 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(736), op: ExtentFlush { repair_id: ReconciliationId(736), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51542 Sep 22 23:22:28.556 INFO [0] client ExtentFlush { repair_id: ReconciliationId(736), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51543 Sep 22 23:22:28.556 INFO [1] received reconcile message
51544 Sep 22 23:22:28.556 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(736), op: ExtentFlush { repair_id: ReconciliationId(736), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51545 Sep 22 23:22:28.556 INFO [1] client ExtentFlush { repair_id: ReconciliationId(736), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51546 Sep 22 23:22:28.556 INFO [2] received reconcile message
51547 Sep 22 23:22:28.556 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(736), op: ExtentFlush { repair_id: ReconciliationId(736), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51548 Sep 22 23:22:28.556 INFO [2] client ExtentFlush { repair_id: ReconciliationId(736), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51549 Sep 22 23:22:28.556 DEBG 736 Flush extent 57 with f:2 g:2
51550 Sep 22 23:22:28.556 DEBG Flush just extent 57 with f:2 and g:2
51551 Sep 22 23:22:28.556 DEBG [1] It's time to notify for 736
51552 Sep 22 23:22:28.556 INFO Completion from [1] id:736 status:true
51553 Sep 22 23:22:28.556 INFO [737/752] Repair commands completed
51554 Sep 22 23:22:28.556 INFO Pop front: ReconcileIO { id: ReconciliationId(737), op: ExtentClose { repair_id: ReconciliationId(737), extent_id: 57 }, state: ClientData([New, New, New]) }
51555 Sep 22 23:22:28.556 INFO Sent repair work, now wait for resp
51556 Sep 22 23:22:28.556 INFO [0] received reconcile message
51557 Sep 22 23:22:28.556 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(737), op: ExtentClose { repair_id: ReconciliationId(737), extent_id: 57 }, state: ClientData([InProgress, New, New]) }, : downstairs
51558 Sep 22 23:22:28.556 INFO [0] client ExtentClose { repair_id: ReconciliationId(737), extent_id: 57 }
51559 Sep 22 23:22:28.556 INFO [1] received reconcile message
51560 Sep 22 23:22:28.556 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(737), op: ExtentClose { repair_id: ReconciliationId(737), extent_id: 57 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51561 Sep 22 23:22:28.557 INFO [1] client ExtentClose { repair_id: ReconciliationId(737), extent_id: 57 }
51562 Sep 22 23:22:28.557 INFO [2] received reconcile message
51563 Sep 22 23:22:28.557 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(737), op: ExtentClose { repair_id: ReconciliationId(737), extent_id: 57 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51564 Sep 22 23:22:28.557 INFO [2] client ExtentClose { repair_id: ReconciliationId(737), extent_id: 57 }
51565 Sep 22 23:22:28.557 DEBG 737 Close extent 57
51566 Sep 22 23:22:28.557 DEBG 737 Close extent 57
51567 Sep 22 23:22:28.557 DEBG 737 Close extent 57
51568 Sep 22 23:22:28.558 DEBG [2] It's time to notify for 737
51569 Sep 22 23:22:28.558 INFO Completion from [2] id:737 status:true
51570 Sep 22 23:22:28.558 INFO [738/752] Repair commands completed
51571 Sep 22 23:22:28.558 INFO Pop front: ReconcileIO { id: ReconciliationId(738), op: ExtentRepair { repair_id: ReconciliationId(738), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51572 Sep 22 23:22:28.558 INFO Sent repair work, now wait for resp
51573 Sep 22 23:22:28.558 INFO [0] received reconcile message
51574 Sep 22 23:22:28.558 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(738), op: ExtentRepair { repair_id: ReconciliationId(738), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51575 Sep 22 23:22:28.558 INFO [0] client ExtentRepair { repair_id: ReconciliationId(738), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51576 Sep 22 23:22:28.558 INFO [0] Sending repair request ReconciliationId(738)
51577 Sep 22 23:22:28.558 INFO [1] received reconcile message
51578 Sep 22 23:22:28.558 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(738), op: ExtentRepair { repair_id: ReconciliationId(738), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51579 Sep 22 23:22:28.558 INFO [1] client ExtentRepair { repair_id: ReconciliationId(738), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51580 Sep 22 23:22:28.558 INFO [1] No action required ReconciliationId(738)
51581 Sep 22 23:22:28.558 INFO [2] received reconcile message
51582 Sep 22 23:22:28.558 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(738), op: ExtentRepair { repair_id: ReconciliationId(738), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51583 Sep 22 23:22:28.558 INFO [2] client ExtentRepair { repair_id: ReconciliationId(738), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51584 Sep 22 23:22:28.558 INFO [2] No action required ReconciliationId(738)
51585 Sep 22 23:22:28.558 DEBG 738 Repair extent 57 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
51586 Sep 22 23:22:28.558 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/039.copy"
51587 Sep 22 23:22:28.621 INFO accepted connection, remote_addr: 127.0.0.1:37146, local_addr: 127.0.0.1:52864, task: repair
51588 Sep 22 23:22:28.622 TRCE incoming request, uri: /extent/57/files, method: GET, req_id: 456a0d50-d134-4fc8-b9e9-ee8e794a4c55, remote_addr: 127.0.0.1:37146, local_addr: 127.0.0.1:52864, task: repair
51589 Sep 22 23:22:28.622 INFO request completed, latency_us: 264, response_code: 200, uri: /extent/57/files, method: GET, req_id: 456a0d50-d134-4fc8-b9e9-ee8e794a4c55, remote_addr: 127.0.0.1:37146, local_addr: 127.0.0.1:52864, task: repair
51590 Sep 22 23:22:28.622 INFO eid:57 Found repair files: ["039", "039.db"]
51591 Sep 22 23:22:28.623 TRCE incoming request, uri: /newextent/57/data, method: GET, req_id: 721154bf-650c-48f8-931a-17a52b9a4dd1, remote_addr: 127.0.0.1:37146, local_addr: 127.0.0.1:52864, task: repair
51592 Sep 22 23:22:28.623 INFO request completed, latency_us: 341, response_code: 200, uri: /newextent/57/data, method: GET, req_id: 721154bf-650c-48f8-931a-17a52b9a4dd1, remote_addr: 127.0.0.1:37146, local_addr: 127.0.0.1:52864, task: repair
51593 Sep 22 23:22:28.628 TRCE incoming request, uri: /newextent/57/db, method: GET, req_id: 09fc977b-0d3b-4790-abf8-83c03db218b6, remote_addr: 127.0.0.1:37146, local_addr: 127.0.0.1:52864, task: repair
51594 Sep 22 23:22:28.628 INFO request completed, latency_us: 302, response_code: 200, uri: /newextent/57/db, method: GET, req_id: 09fc977b-0d3b-4790-abf8-83c03db218b6, remote_addr: 127.0.0.1:37146, local_addr: 127.0.0.1:52864, task: repair
51595 Sep 22 23:22:28.629 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/039.copy" to "/tmp/downstairs-zrMnlo6G/00/000/039.replace"
51596 Sep 22 23:22:28.629 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51597 Sep 22 23:22:28.631 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/039.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
51598 Sep 22 23:22:28.631 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/039"
51599 Sep 22 23:22:28.631 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/039.db"
51600 Sep 22 23:22:28.631 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51601 Sep 22 23:22:28.631 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/039.replace" to "/tmp/downstairs-zrMnlo6G/00/000/039.completed"
51602 Sep 22 23:22:28.631 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51603 Sep 22 23:22:28.631 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51604 Sep 22 23:22:28.631 DEBG [0] It's time to notify for 738
51605 Sep 22 23:22:28.631 INFO Completion from [0] id:738 status:true
51606 Sep 22 23:22:28.631 INFO [739/752] Repair commands completed
51607 Sep 22 23:22:28.631 INFO Pop front: ReconcileIO { id: ReconciliationId(739), op: ExtentReopen { repair_id: ReconciliationId(739), extent_id: 57 }, state: ClientData([New, New, New]) }
51608 Sep 22 23:22:28.631 INFO Sent repair work, now wait for resp
51609 Sep 22 23:22:28.632 INFO [0] received reconcile message
51610 Sep 22 23:22:28.632 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(739), op: ExtentReopen { repair_id: ReconciliationId(739), extent_id: 57 }, state: ClientData([InProgress, New, New]) }, : downstairs
51611 Sep 22 23:22:28.632 INFO [0] client ExtentReopen { repair_id: ReconciliationId(739), extent_id: 57 }
51612 Sep 22 23:22:28.632 INFO [1] received reconcile message
51613 Sep 22 23:22:28.632 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(739), op: ExtentReopen { repair_id: ReconciliationId(739), extent_id: 57 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51614 Sep 22 23:22:28.632 INFO [1] client ExtentReopen { repair_id: ReconciliationId(739), extent_id: 57 }
51615 Sep 22 23:22:28.632 INFO [2] received reconcile message
51616 Sep 22 23:22:28.632 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(739), op: ExtentReopen { repair_id: ReconciliationId(739), extent_id: 57 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51617 Sep 22 23:22:28.632 INFO [2] client ExtentReopen { repair_id: ReconciliationId(739), extent_id: 57 }
51618 Sep 22 23:22:28.632 DEBG 739 Reopen extent 57
51619 Sep 22 23:22:28.633 DEBG 739 Reopen extent 57
51620 Sep 22 23:22:28.633 DEBG 739 Reopen extent 57
51621 Sep 22 23:22:28.634 DEBG [2] It's time to notify for 739
51622 Sep 22 23:22:28.634 INFO Completion from [2] id:739 status:true
51623 Sep 22 23:22:28.634 INFO [740/752] Repair commands completed
51624 Sep 22 23:22:28.634 INFO Pop front: ReconcileIO { id: ReconciliationId(740), op: ExtentFlush { repair_id: ReconciliationId(740), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51625 Sep 22 23:22:28.634 INFO Sent repair work, now wait for resp
51626 Sep 22 23:22:28.634 INFO [0] received reconcile message
51627 Sep 22 23:22:28.634 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(740), op: ExtentFlush { repair_id: ReconciliationId(740), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51628 Sep 22 23:22:28.634 INFO [0] client ExtentFlush { repair_id: ReconciliationId(740), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51629 Sep 22 23:22:28.634 INFO [1] received reconcile message
51630 Sep 22 23:22:28.634 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(740), op: ExtentFlush { repair_id: ReconciliationId(740), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51631 Sep 22 23:22:28.634 INFO [1] client ExtentFlush { repair_id: ReconciliationId(740), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51632 Sep 22 23:22:28.634 INFO [2] received reconcile message
51633 Sep 22 23:22:28.634 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(740), op: ExtentFlush { repair_id: ReconciliationId(740), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51634 Sep 22 23:22:28.634 INFO [2] client ExtentFlush { repair_id: ReconciliationId(740), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51635 Sep 22 23:22:28.634 DEBG 740 Flush extent 14 with f:2 g:2
51636 Sep 22 23:22:28.634 DEBG Flush just extent 14 with f:2 and g:2
51637 Sep 22 23:22:28.634 DEBG [1] It's time to notify for 740
51638 Sep 22 23:22:28.634 INFO Completion from [1] id:740 status:true
51639 Sep 22 23:22:28.634 INFO [741/752] Repair commands completed
51640 Sep 22 23:22:28.634 INFO Pop front: ReconcileIO { id: ReconciliationId(741), op: ExtentClose { repair_id: ReconciliationId(741), extent_id: 14 }, state: ClientData([New, New, New]) }
51641 Sep 22 23:22:28.635 INFO Sent repair work, now wait for resp
51642 Sep 22 23:22:28.635 INFO [0] received reconcile message
51643 Sep 22 23:22:28.635 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(741), op: ExtentClose { repair_id: ReconciliationId(741), extent_id: 14 }, state: ClientData([InProgress, New, New]) }, : downstairs
51644 Sep 22 23:22:28.635 INFO [0] client ExtentClose { repair_id: ReconciliationId(741), extent_id: 14 }
51645 Sep 22 23:22:28.635 INFO [1] received reconcile message
51646 Sep 22 23:22:28.635 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(741), op: ExtentClose { repair_id: ReconciliationId(741), extent_id: 14 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51647 Sep 22 23:22:28.635 INFO [1] client ExtentClose { repair_id: ReconciliationId(741), extent_id: 14 }
51648 Sep 22 23:22:28.635 INFO [2] received reconcile message
51649 Sep 22 23:22:28.635 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(741), op: ExtentClose { repair_id: ReconciliationId(741), extent_id: 14 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51650 Sep 22 23:22:28.635 INFO [2] client ExtentClose { repair_id: ReconciliationId(741), extent_id: 14 }
51651 Sep 22 23:22:28.635 DEBG 741 Close extent 14
51652 Sep 22 23:22:28.635 DEBG 741 Close extent 14
51653 Sep 22 23:22:28.635 DEBG 741 Close extent 14
51654 Sep 22 23:22:28.636 DEBG [2] It's time to notify for 741
51655 Sep 22 23:22:28.636 INFO Completion from [2] id:741 status:true
51656 Sep 22 23:22:28.636 INFO [742/752] Repair commands completed
51657 Sep 22 23:22:28.636 INFO Pop front: ReconcileIO { id: ReconciliationId(742), op: ExtentRepair { repair_id: ReconciliationId(742), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51658 Sep 22 23:22:28.636 INFO Sent repair work, now wait for resp
51659 Sep 22 23:22:28.636 INFO [0] received reconcile message
51660 Sep 22 23:22:28.636 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(742), op: ExtentRepair { repair_id: ReconciliationId(742), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51661 Sep 22 23:22:28.636 INFO [0] client ExtentRepair { repair_id: ReconciliationId(742), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51662 Sep 22 23:22:28.636 INFO [0] Sending repair request ReconciliationId(742)
51663 Sep 22 23:22:28.636 INFO [1] received reconcile message
51664 Sep 22 23:22:28.636 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(742), op: ExtentRepair { repair_id: ReconciliationId(742), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51665 Sep 22 23:22:28.636 INFO [1] client ExtentRepair { repair_id: ReconciliationId(742), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51666 Sep 22 23:22:28.636 INFO [1] No action required ReconciliationId(742)
51667 Sep 22 23:22:28.636 INFO [2] received reconcile message
51668 Sep 22 23:22:28.636 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(742), op: ExtentRepair { repair_id: ReconciliationId(742), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51669 Sep 22 23:22:28.636 INFO [2] client ExtentRepair { repair_id: ReconciliationId(742), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51670 Sep 22 23:22:28.636 INFO [2] No action required ReconciliationId(742)
51671 Sep 22 23:22:28.636 DEBG 742 Repair extent 14 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
51672 Sep 22 23:22:28.636 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/00E.copy"
51673 Sep 22 23:22:28.657 ERRO [2] job id 1094 saw error GenericError("test error")
51674 Sep 22 23:22:28.657 DEBG up_ds_listen was notified
51675 Sep 22 23:22:28.657 DEBG up_ds_listen process 1094
51676 Sep 22 23:22:28.657 DEBG [A] ack job 1094:95, : downstairs
51677 Sep 22 23:22:28.657 DEBG up_ds_listen checked 1 jobs, back to waiting
51678 Sep 22 23:22:28.659 DEBG Flush :1094 extent_limit None deps:[JobId(1093), JobId(1092)] res:true f:36 g:1
51679 Sep 22 23:22:28.659 INFO [lossy] sleeping 1 second
51680 Sep 22 23:22:28.659 WARN returning error on read!
51681 Sep 22 23:22:28.659 DEBG Read :1095 deps:[JobId(1094)] res:false
51682 Sep 22 23:22:28.665 DEBG Read :1095 deps:[JobId(1094)] res:true
51683 Sep 22 23:22:28.703 INFO accepted connection, remote_addr: 127.0.0.1:56670, local_addr: 127.0.0.1:52864, task: repair
51684 Sep 22 23:22:28.703 TRCE incoming request, uri: /extent/14/files, method: GET, req_id: 2bed8d8a-06b4-490b-be53-7bf3beafe516, remote_addr: 127.0.0.1:56670, local_addr: 127.0.0.1:52864, task: repair
51685 Sep 22 23:22:28.703 INFO request completed, latency_us: 264, response_code: 200, uri: /extent/14/files, method: GET, req_id: 2bed8d8a-06b4-490b-be53-7bf3beafe516, remote_addr: 127.0.0.1:56670, local_addr: 127.0.0.1:52864, task: repair
51686 Sep 22 23:22:28.703 INFO eid:14 Found repair files: ["00E", "00E.db"]
51687 Sep 22 23:22:28.704 TRCE incoming request, uri: /newextent/14/data, method: GET, req_id: 91137b77-534b-49a7-b8db-12d767dae7bd, remote_addr: 127.0.0.1:56670, local_addr: 127.0.0.1:52864, task: repair
51688 Sep 22 23:22:28.704 INFO request completed, latency_us: 362, response_code: 200, uri: /newextent/14/data, method: GET, req_id: 91137b77-534b-49a7-b8db-12d767dae7bd, remote_addr: 127.0.0.1:56670, local_addr: 127.0.0.1:52864, task: repair
51689 Sep 22 23:22:28.709 TRCE incoming request, uri: /newextent/14/db, method: GET, req_id: 060add22-2817-41a6-b7ae-98bd0e88b945, remote_addr: 127.0.0.1:56670, local_addr: 127.0.0.1:52864, task: repair
51690 Sep 22 23:22:28.709 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/14/db, method: GET, req_id: 060add22-2817-41a6-b7ae-98bd0e88b945, remote_addr: 127.0.0.1:56670, local_addr: 127.0.0.1:52864, task: repair
51691 Sep 22 23:22:28.710 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/00E.copy" to "/tmp/downstairs-zrMnlo6G/00/000/00E.replace"
51692 Sep 22 23:22:28.710 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51693 Sep 22 23:22:28.712 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/00E.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
51694 Sep 22 23:22:28.712 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/00E"
51695 Sep 22 23:22:28.712 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/00E.db"
51696 Sep 22 23:22:28.712 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51697 Sep 22 23:22:28.712 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/00E.replace" to "/tmp/downstairs-zrMnlo6G/00/000/00E.completed"
51698 Sep 22 23:22:28.712 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51699 Sep 22 23:22:28.712 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51700 Sep 22 23:22:28.712 DEBG [0] It's time to notify for 742
51701 Sep 22 23:22:28.712 INFO Completion from [0] id:742 status:true
51702 Sep 22 23:22:28.712 INFO [743/752] Repair commands completed
51703 Sep 22 23:22:28.712 INFO Pop front: ReconcileIO { id: ReconciliationId(743), op: ExtentReopen { repair_id: ReconciliationId(743), extent_id: 14 }, state: ClientData([New, New, New]) }
51704 Sep 22 23:22:28.712 INFO Sent repair work, now wait for resp
51705 Sep 22 23:22:28.712 INFO [0] received reconcile message
51706 Sep 22 23:22:28.713 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(743), op: ExtentReopen { repair_id: ReconciliationId(743), extent_id: 14 }, state: ClientData([InProgress, New, New]) }, : downstairs
51707 Sep 22 23:22:28.713 INFO [0] client ExtentReopen { repair_id: ReconciliationId(743), extent_id: 14 }
51708 Sep 22 23:22:28.713 INFO [1] received reconcile message
51709 Sep 22 23:22:28.713 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(743), op: ExtentReopen { repair_id: ReconciliationId(743), extent_id: 14 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51710 Sep 22 23:22:28.713 INFO [1] client ExtentReopen { repair_id: ReconciliationId(743), extent_id: 14 }
51711 Sep 22 23:22:28.713 INFO [2] received reconcile message
51712 Sep 22 23:22:28.713 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(743), op: ExtentReopen { repair_id: ReconciliationId(743), extent_id: 14 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51713 Sep 22 23:22:28.713 INFO [2] client ExtentReopen { repair_id: ReconciliationId(743), extent_id: 14 }
51714 Sep 22 23:22:28.713 DEBG 743 Reopen extent 14
51715 Sep 22 23:22:28.714 DEBG 743 Reopen extent 14
51716 Sep 22 23:22:28.714 DEBG 743 Reopen extent 14
51717 Sep 22 23:22:28.715 DEBG [2] It's time to notify for 743
51718 Sep 22 23:22:28.715 INFO Completion from [2] id:743 status:true
51719 Sep 22 23:22:28.715 INFO [744/752] Repair commands completed
51720 Sep 22 23:22:28.715 INFO Pop front: ReconcileIO { id: ReconciliationId(744), op: ExtentFlush { repair_id: ReconciliationId(744), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51721 Sep 22 23:22:28.715 INFO Sent repair work, now wait for resp
51722 Sep 22 23:22:28.715 INFO [0] received reconcile message
51723 Sep 22 23:22:28.715 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(744), op: ExtentFlush { repair_id: ReconciliationId(744), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51724 Sep 22 23:22:28.715 INFO [0] client ExtentFlush { repair_id: ReconciliationId(744), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51725 Sep 22 23:22:28.715 INFO [1] received reconcile message
51726 Sep 22 23:22:28.715 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(744), op: ExtentFlush { repair_id: ReconciliationId(744), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51727 Sep 22 23:22:28.715 INFO [1] client ExtentFlush { repair_id: ReconciliationId(744), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51728 Sep 22 23:22:28.715 INFO [2] received reconcile message
51729 Sep 22 23:22:28.715 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(744), op: ExtentFlush { repair_id: ReconciliationId(744), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51730 Sep 22 23:22:28.715 INFO [2] client ExtentFlush { repair_id: ReconciliationId(744), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51731 Sep 22 23:22:28.715 DEBG 744 Flush extent 8 with f:2 g:2
51732 Sep 22 23:22:28.715 DEBG Flush just extent 8 with f:2 and g:2
51733 Sep 22 23:22:28.715 DEBG [1] It's time to notify for 744
51734 Sep 22 23:22:28.715 INFO Completion from [1] id:744 status:true
51735 Sep 22 23:22:28.715 INFO [745/752] Repair commands completed
51736 Sep 22 23:22:28.715 INFO Pop front: ReconcileIO { id: ReconciliationId(745), op: ExtentClose { repair_id: ReconciliationId(745), extent_id: 8 }, state: ClientData([New, New, New]) }
51737 Sep 22 23:22:28.715 INFO Sent repair work, now wait for resp
51738 Sep 22 23:22:28.716 INFO [0] received reconcile message
51739 Sep 22 23:22:28.716 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(745), op: ExtentClose { repair_id: ReconciliationId(745), extent_id: 8 }, state: ClientData([InProgress, New, New]) }, : downstairs
51740 Sep 22 23:22:28.716 INFO [0] client ExtentClose { repair_id: ReconciliationId(745), extent_id: 8 }
51741 Sep 22 23:22:28.716 INFO [1] received reconcile message
51742 Sep 22 23:22:28.716 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(745), op: ExtentClose { repair_id: ReconciliationId(745), extent_id: 8 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51743 Sep 22 23:22:28.716 INFO [1] client ExtentClose { repair_id: ReconciliationId(745), extent_id: 8 }
51744 Sep 22 23:22:28.716 INFO [2] received reconcile message
51745 Sep 22 23:22:28.716 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(745), op: ExtentClose { repair_id: ReconciliationId(745), extent_id: 8 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51746 Sep 22 23:22:28.716 INFO [2] client ExtentClose { repair_id: ReconciliationId(745), extent_id: 8 }
51747 Sep 22 23:22:28.716 DEBG 745 Close extent 8
51748 Sep 22 23:22:28.716 DEBG 745 Close extent 8
51749 Sep 22 23:22:28.716 DEBG 745 Close extent 8
51750 Sep 22 23:22:28.717 DEBG [2] It's time to notify for 745
51751 Sep 22 23:22:28.717 INFO Completion from [2] id:745 status:true
51752 Sep 22 23:22:28.717 INFO [746/752] Repair commands completed
51753 Sep 22 23:22:28.717 INFO Pop front: ReconcileIO { id: ReconciliationId(746), op: ExtentRepair { repair_id: ReconciliationId(746), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51754 Sep 22 23:22:28.717 INFO Sent repair work, now wait for resp
51755 Sep 22 23:22:28.717 INFO [0] received reconcile message
51756 Sep 22 23:22:28.717 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(746), op: ExtentRepair { repair_id: ReconciliationId(746), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51757 Sep 22 23:22:28.717 INFO [0] client ExtentRepair { repair_id: ReconciliationId(746), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51758 Sep 22 23:22:28.717 INFO [0] Sending repair request ReconciliationId(746)
51759 Sep 22 23:22:28.717 INFO [1] received reconcile message
51760 Sep 22 23:22:28.717 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(746), op: ExtentRepair { repair_id: ReconciliationId(746), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51761 Sep 22 23:22:28.717 INFO [1] client ExtentRepair { repair_id: ReconciliationId(746), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51762 Sep 22 23:22:28.717 INFO [1] No action required ReconciliationId(746)
51763 Sep 22 23:22:28.717 INFO [2] received reconcile message
51764 Sep 22 23:22:28.717 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(746), op: ExtentRepair { repair_id: ReconciliationId(746), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51765 Sep 22 23:22:28.717 INFO [2] client ExtentRepair { repair_id: ReconciliationId(746), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51766 Sep 22 23:22:28.717 INFO [2] No action required ReconciliationId(746)
51767 Sep 22 23:22:28.717 DEBG 746 Repair extent 8 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
51768 Sep 22 23:22:28.717 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/008.copy"
51769 Sep 22 23:22:28.735 ERRO [0] job id 1095 saw error GenericError("test error")
51770 Sep 22 23:22:28.782 INFO accepted connection, remote_addr: 127.0.0.1:39346, local_addr: 127.0.0.1:52864, task: repair
51771 Sep 22 23:22:28.782 TRCE incoming request, uri: /extent/8/files, method: GET, req_id: 53b51468-eb9d-40ab-b69e-e41875551f61, remote_addr: 127.0.0.1:39346, local_addr: 127.0.0.1:52864, task: repair
51772 Sep 22 23:22:28.783 INFO request completed, latency_us: 233, response_code: 200, uri: /extent/8/files, method: GET, req_id: 53b51468-eb9d-40ab-b69e-e41875551f61, remote_addr: 127.0.0.1:39346, local_addr: 127.0.0.1:52864, task: repair
51773 Sep 22 23:22:28.783 INFO eid:8 Found repair files: ["008", "008.db"]
51774 Sep 22 23:22:28.783 TRCE incoming request, uri: /newextent/8/data, method: GET, req_id: 74dbf164-1dde-492a-80ee-f65c5bf763b3, remote_addr: 127.0.0.1:39346, local_addr: 127.0.0.1:52864, task: repair
51775 Sep 22 23:22:28.784 INFO request completed, latency_us: 333, response_code: 200, uri: /newextent/8/data, method: GET, req_id: 74dbf164-1dde-492a-80ee-f65c5bf763b3, remote_addr: 127.0.0.1:39346, local_addr: 127.0.0.1:52864, task: repair
51776 Sep 22 23:22:28.788 TRCE incoming request, uri: /newextent/8/db, method: GET, req_id: cd545e92-1a51-4dd5-8fcf-30e62d6ebb67, remote_addr: 127.0.0.1:39346, local_addr: 127.0.0.1:52864, task: repair
51777 Sep 22 23:22:28.789 INFO request completed, latency_us: 290, response_code: 200, uri: /newextent/8/db, method: GET, req_id: cd545e92-1a51-4dd5-8fcf-30e62d6ebb67, remote_addr: 127.0.0.1:39346, local_addr: 127.0.0.1:52864, task: repair
51778 Sep 22 23:22:28.790 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/008.copy" to "/tmp/downstairs-zrMnlo6G/00/000/008.replace"
51779 Sep 22 23:22:28.790 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51780 Sep 22 23:22:28.791 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/008.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
51781 Sep 22 23:22:28.791 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/008"
51782 Sep 22 23:22:28.791 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/008.db"
51783 Sep 22 23:22:28.791 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51784 Sep 22 23:22:28.791 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/008.replace" to "/tmp/downstairs-zrMnlo6G/00/000/008.completed"
51785 Sep 22 23:22:28.791 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51786 Sep 22 23:22:28.791 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51787 Sep 22 23:22:28.791 DEBG [0] It's time to notify for 746
51788 Sep 22 23:22:28.792 INFO Completion from [0] id:746 status:true
51789 Sep 22 23:22:28.792 INFO [747/752] Repair commands completed
51790 Sep 22 23:22:28.792 INFO Pop front: ReconcileIO { id: ReconciliationId(747), op: ExtentReopen { repair_id: ReconciliationId(747), extent_id: 8 }, state: ClientData([New, New, New]) }
51791 Sep 22 23:22:28.792 INFO Sent repair work, now wait for resp
51792 Sep 22 23:22:28.792 INFO [0] received reconcile message
51793 Sep 22 23:22:28.792 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(747), op: ExtentReopen { repair_id: ReconciliationId(747), extent_id: 8 }, state: ClientData([InProgress, New, New]) }, : downstairs
51794 Sep 22 23:22:28.792 INFO [0] client ExtentReopen { repair_id: ReconciliationId(747), extent_id: 8 }
51795 Sep 22 23:22:28.792 INFO [1] received reconcile message
51796 Sep 22 23:22:28.792 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(747), op: ExtentReopen { repair_id: ReconciliationId(747), extent_id: 8 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51797 Sep 22 23:22:28.792 INFO [1] client ExtentReopen { repair_id: ReconciliationId(747), extent_id: 8 }
51798 Sep 22 23:22:28.792 INFO [2] received reconcile message
51799 Sep 22 23:22:28.792 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(747), op: ExtentReopen { repair_id: ReconciliationId(747), extent_id: 8 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51800 Sep 22 23:22:28.792 INFO [2] client ExtentReopen { repair_id: ReconciliationId(747), extent_id: 8 }
51801 Sep 22 23:22:28.792 DEBG 747 Reopen extent 8
51802 Sep 22 23:22:28.793 DEBG 747 Reopen extent 8
51803 Sep 22 23:22:28.793 DEBG 747 Reopen extent 8
51804 Sep 22 23:22:28.794 DEBG [2] It's time to notify for 747
51805 Sep 22 23:22:28.794 INFO Completion from [2] id:747 status:true
51806 Sep 22 23:22:28.794 INFO [748/752] Repair commands completed
51807 Sep 22 23:22:28.794 INFO Pop front: ReconcileIO { id: ReconciliationId(748), op: ExtentFlush { repair_id: ReconciliationId(748), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51808 Sep 22 23:22:28.794 INFO Sent repair work, now wait for resp
51809 Sep 22 23:22:28.794 INFO [0] received reconcile message
51810 Sep 22 23:22:28.794 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(748), op: ExtentFlush { repair_id: ReconciliationId(748), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51811 Sep 22 23:22:28.794 INFO [0] client ExtentFlush { repair_id: ReconciliationId(748), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51812 Sep 22 23:22:28.794 INFO [1] received reconcile message
51813 Sep 22 23:22:28.794 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(748), op: ExtentFlush { repair_id: ReconciliationId(748), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51814 Sep 22 23:22:28.794 INFO [1] client ExtentFlush { repair_id: ReconciliationId(748), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51815 Sep 22 23:22:28.794 INFO [2] received reconcile message
51816 Sep 22 23:22:28.794 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(748), op: ExtentFlush { repair_id: ReconciliationId(748), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51817 Sep 22 23:22:28.794 INFO [2] client ExtentFlush { repair_id: ReconciliationId(748), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51818 Sep 22 23:22:28.794 DEBG 748 Flush extent 114 with f:2 g:2
51819 Sep 22 23:22:28.794 DEBG Flush just extent 114 with f:2 and g:2
51820 Sep 22 23:22:28.794 DEBG [1] It's time to notify for 748
51821 Sep 22 23:22:28.794 INFO Completion from [1] id:748 status:true
51822 Sep 22 23:22:28.795 INFO [749/752] Repair commands completed
51823 Sep 22 23:22:28.795 INFO Pop front: ReconcileIO { id: ReconciliationId(749), op: ExtentClose { repair_id: ReconciliationId(749), extent_id: 114 }, state: ClientData([New, New, New]) }
51824 Sep 22 23:22:28.795 INFO Sent repair work, now wait for resp
51825 Sep 22 23:22:28.795 INFO [0] received reconcile message
51826 Sep 22 23:22:28.795 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(749), op: ExtentClose { repair_id: ReconciliationId(749), extent_id: 114 }, state: ClientData([InProgress, New, New]) }, : downstairs
51827 Sep 22 23:22:28.795 INFO [0] client ExtentClose { repair_id: ReconciliationId(749), extent_id: 114 }
51828 Sep 22 23:22:28.795 INFO [1] received reconcile message
51829 Sep 22 23:22:28.795 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(749), op: ExtentClose { repair_id: ReconciliationId(749), extent_id: 114 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51830 Sep 22 23:22:28.795 INFO [1] client ExtentClose { repair_id: ReconciliationId(749), extent_id: 114 }
51831 Sep 22 23:22:28.795 INFO [2] received reconcile message
51832 Sep 22 23:22:28.795 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(749), op: ExtentClose { repair_id: ReconciliationId(749), extent_id: 114 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51833 Sep 22 23:22:28.795 INFO [2] client ExtentClose { repair_id: ReconciliationId(749), extent_id: 114 }
51834 Sep 22 23:22:28.795 DEBG 749 Close extent 114
51835 Sep 22 23:22:28.795 DEBG 749 Close extent 114
51836 Sep 22 23:22:28.795 DEBG 749 Close extent 114
51837 Sep 22 23:22:28.796 DEBG [2] It's time to notify for 749
51838 Sep 22 23:22:28.796 INFO Completion from [2] id:749 status:true
51839 Sep 22 23:22:28.796 INFO [750/752] Repair commands completed
51840 Sep 22 23:22:28.796 INFO Pop front: ReconcileIO { id: ReconciliationId(750), op: ExtentRepair { repair_id: ReconciliationId(750), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51841 Sep 22 23:22:28.796 INFO Sent repair work, now wait for resp
51842 Sep 22 23:22:28.796 INFO [0] received reconcile message
51843 Sep 22 23:22:28.796 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(750), op: ExtentRepair { repair_id: ReconciliationId(750), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51844 Sep 22 23:22:28.796 INFO [0] client ExtentRepair { repair_id: ReconciliationId(750), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51845 Sep 22 23:22:28.796 INFO [0] Sending repair request ReconciliationId(750)
51846 Sep 22 23:22:28.796 INFO [1] received reconcile message
51847 Sep 22 23:22:28.796 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(750), op: ExtentRepair { repair_id: ReconciliationId(750), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51848 Sep 22 23:22:28.796 INFO [1] client ExtentRepair { repair_id: ReconciliationId(750), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51849 Sep 22 23:22:28.796 INFO [1] No action required ReconciliationId(750)
51850 Sep 22 23:22:28.796 INFO [2] received reconcile message
51851 Sep 22 23:22:28.796 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(750), op: ExtentRepair { repair_id: ReconciliationId(750), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51852 Sep 22 23:22:28.796 INFO [2] client ExtentRepair { repair_id: ReconciliationId(750), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:52864, dest_clients: [ClientId(0)] }
51853 Sep 22 23:22:28.796 INFO [2] No action required ReconciliationId(750)
51854 Sep 22 23:22:28.796 DEBG 750 Repair extent 114 source:[1] 127.0.0.1:52864 dest:[ClientId(0)]
51855 Sep 22 23:22:28.796 INFO Created copy dir "/tmp/downstairs-zrMnlo6G/00/000/072.copy"
51856 Sep 22 23:22:28.861 INFO accepted connection, remote_addr: 127.0.0.1:38532, local_addr: 127.0.0.1:52864, task: repair
51857 Sep 22 23:22:28.861 TRCE incoming request, uri: /extent/114/files, method: GET, req_id: 23b65fae-a1f5-44ab-9b64-bd94423e2b78, remote_addr: 127.0.0.1:38532, local_addr: 127.0.0.1:52864, task: repair
51858 Sep 22 23:22:28.862 INFO request completed, latency_us: 245, response_code: 200, uri: /extent/114/files, method: GET, req_id: 23b65fae-a1f5-44ab-9b64-bd94423e2b78, remote_addr: 127.0.0.1:38532, local_addr: 127.0.0.1:52864, task: repair
51859 Sep 22 23:22:28.862 INFO eid:114 Found repair files: ["072", "072.db"]
51860 Sep 22 23:22:28.862 TRCE incoming request, uri: /newextent/114/data, method: GET, req_id: 7406db28-6503-47d5-893f-a4fcda6dafd5, remote_addr: 127.0.0.1:38532, local_addr: 127.0.0.1:52864, task: repair
51861 Sep 22 23:22:28.863 INFO request completed, latency_us: 334, response_code: 200, uri: /newextent/114/data, method: GET, req_id: 7406db28-6503-47d5-893f-a4fcda6dafd5, remote_addr: 127.0.0.1:38532, local_addr: 127.0.0.1:52864, task: repair
51862 Sep 22 23:22:28.868 TRCE incoming request, uri: /newextent/114/db, method: GET, req_id: 9b1cb398-9752-4fbe-a136-d8b96bece8ff, remote_addr: 127.0.0.1:38532, local_addr: 127.0.0.1:52864, task: repair
51863 Sep 22 23:22:28.868 INFO request completed, latency_us: 403, response_code: 200, uri: /newextent/114/db, method: GET, req_id: 9b1cb398-9752-4fbe-a136-d8b96bece8ff, remote_addr: 127.0.0.1:38532, local_addr: 127.0.0.1:52864, task: repair
51864 Sep 22 23:22:28.869 INFO Repair files downloaded, move directory "/tmp/downstairs-zrMnlo6G/00/000/072.copy" to "/tmp/downstairs-zrMnlo6G/00/000/072.replace"
51865 Sep 22 23:22:28.869 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51866 Sep 22 23:22:28.870 INFO Copy files from "/tmp/downstairs-zrMnlo6G/00/000/072.replace" in "/tmp/downstairs-zrMnlo6G/00/000"
51867 Sep 22 23:22:28.871 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/072"
51868 Sep 22 23:22:28.871 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000/072.db"
51869 Sep 22 23:22:28.871 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51870 Sep 22 23:22:28.871 INFO Move directory "/tmp/downstairs-zrMnlo6G/00/000/072.replace" to "/tmp/downstairs-zrMnlo6G/00/000/072.completed"
51871 Sep 22 23:22:28.871 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51872 Sep 22 23:22:28.871 DEBG fsync completed for: "/tmp/downstairs-zrMnlo6G/00/000"
51873 Sep 22 23:22:28.871 DEBG [0] It's time to notify for 750
51874 Sep 22 23:22:28.871 INFO Completion from [0] id:750 status:true
51875 Sep 22 23:22:28.871 INFO [751/752] Repair commands completed
51876 Sep 22 23:22:28.871 INFO Pop front: ReconcileIO { id: ReconciliationId(751), op: ExtentReopen { repair_id: ReconciliationId(751), extent_id: 114 }, state: ClientData([New, New, New]) }
51877 Sep 22 23:22:28.871 INFO Sent repair work, now wait for resp
51878 Sep 22 23:22:28.871 INFO [0] received reconcile message
51879 Sep 22 23:22:28.871 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(751), op: ExtentReopen { repair_id: ReconciliationId(751), extent_id: 114 }, state: ClientData([InProgress, New, New]) }, : downstairs
51880 Sep 22 23:22:28.871 INFO [0] client ExtentReopen { repair_id: ReconciliationId(751), extent_id: 114 }
51881 Sep 22 23:22:28.871 INFO [1] received reconcile message
51882 Sep 22 23:22:28.871 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(751), op: ExtentReopen { repair_id: ReconciliationId(751), extent_id: 114 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51883 Sep 22 23:22:28.871 INFO [1] client ExtentReopen { repair_id: ReconciliationId(751), extent_id: 114 }
51884 Sep 22 23:22:28.871 INFO [2] received reconcile message
51885 Sep 22 23:22:28.871 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(751), op: ExtentReopen { repair_id: ReconciliationId(751), extent_id: 114 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51886 Sep 22 23:22:28.871 INFO [2] client ExtentReopen { repair_id: ReconciliationId(751), extent_id: 114 }
51887 Sep 22 23:22:28.872 DEBG 751 Reopen extent 114
51888 Sep 22 23:22:28.872 DEBG 751 Reopen extent 114
51889 Sep 22 23:22:28.873 DEBG 751 Reopen extent 114
51890 Sep 22 23:22:28.874 DEBG [2] It's time to notify for 751
51891 Sep 22 23:22:28.874 INFO Completion from [2] id:751 status:true
51892 Sep 22 23:22:28.874 INFO [752/752] Repair commands completed
51893 Sep 22 23:22:28.874 INFO 188 extents repaired in 14.739 ave:0.0784
51894 Sep 22 23:22:28.874 INFO All required repair work is completed
51895 Sep 22 23:22:28.874 INFO Set Downstairs and Upstairs active after repairs
51896 Sep 22 23:22:28.874 INFO 9144e02c-c312-47c4-9b1c-f03618834608 is now active with session: ec8dc28c-8140-4e2d-b538-f84f8abf147b
51897 Sep 22 23:22:28.874 INFO Notify all downstairs, region set compare is done.
51898 Sep 22 23:22:28.874 INFO Set check for repair
51899 Sep 22 23:22:28.874 INFO [1] 127.0.0.1:48339 task reports connection:true
51900 Sep 22 23:22:28.874 INFO 9144e02c-c312-47c4-9b1c-f03618834608 Active Active Active
51901 Sep 22 23:22:28.874 INFO Set check for repair
51902 Sep 22 23:22:28.874 INFO [2] 127.0.0.1:33021 task reports connection:true
51903 Sep 22 23:22:28.874 INFO 9144e02c-c312-47c4-9b1c-f03618834608 Active Active Active
51904 Sep 22 23:22:28.874 INFO Set check for repair
51905 Sep 22 23:22:28.874 INFO [0] received reconcile message
51906 Sep 22 23:22:28.874 INFO [0] All repairs completed, exit
51907 Sep 22 23:22:28.874 INFO [0] Starts cmd_loop
51908 Sep 22 23:22:28.874 INFO [1] received reconcile message
51909 Sep 22 23:22:28.874 INFO [1] All repairs completed, exit
51910 Sep 22 23:22:28.874 INFO [1] Starts cmd_loop
51911 Sep 22 23:22:28.874 INFO [2] received reconcile message
51912 Sep 22 23:22:28.874 INFO [2] All repairs completed, exit
51913 Sep 22 23:22:28.874 INFO [2] Starts cmd_loop
51914 The guest has finished waiting for activation
51915 Sep 22 23:22:28.893 DEBG IO Read 1000 has deps []
51916 Sep 22 23:22:29.069 DEBG [rc] retire 1094 clears [JobId(1093), JobId(1094)], : downstairs
51917 Sep 22 23:22:29.069 DEBG IO Flush 1096 has deps [JobId(1095)]
51918 Sep 22 23:22:29.073 DEBG Flush :1096 extent_limit None deps:[JobId(1095)] res:true f:37 g:1
51919 Sep 22 23:22:29.073 INFO [lossy] sleeping 1 second
51920 Sep 22 23:22:29.451 DEBG [2] Read AckReady 1095, : downstairs
51921 Sep 22 23:22:29.452 DEBG up_ds_listen was notified
51922 Sep 22 23:22:29.452 DEBG up_ds_listen process 1095
51923 Sep 22 23:22:29.452 DEBG [A] ack job 1095:96, : downstairs
51924 Sep 22 23:22:29.505 DEBG up_ds_listen checked 1 jobs, back to waiting
51925 Sep 22 23:22:29.508 INFO [lossy] skipping 1096
51926 Sep 22 23:22:29.508 DEBG Flush :1096 extent_limit None deps:[JobId(1095)] res:true f:37 g:1
51927 Sep 22 23:22:29.508 INFO [lossy] sleeping 1 second
51928 Sep 22 23:22:29.577 DEBG IO Read 1097 has deps [JobId(1096)]
51929 Sep 22 23:22:29.977 DEBG up_ds_listen was notified
51930 Sep 22 23:22:29.977 DEBG up_ds_listen process 1096
51931 Sep 22 23:22:29.977 DEBG [A] ack job 1096:97, : downstairs
51932 Sep 22 23:22:29.977 DEBG up_ds_listen checked 1 jobs, back to waiting
51933 Sep 22 23:22:29.983 DEBG IO Flush 1098 has deps [JobId(1097), JobId(1096)]
51934 Sep 22 23:22:30.011 DEBG Read :1095 deps:[JobId(1094)] res:true
51935 Sep 22 23:22:30.035 DEBG Flush :1096 extent_limit None deps:[JobId(1095)] res:true f:37 g:1
51936 Sep 22 23:22:30.035 INFO [lossy] skipping 1097
51937 Sep 22 23:22:30.052 DEBG Read :1097 deps:[JobId(1096)] res:true
51938 Sep 22 23:22:30.633 DEBG [rc] retire 1096 clears [JobId(1095), JobId(1096)], : downstairs
51939 Sep 22 23:22:30.633 WARN returning error on read!
51940 Sep 22 23:22:30.633 DEBG Read :1097 deps:[JobId(1096)] res:false
51941 Sep 22 23:22:30.634 INFO [lossy] skipping 1097
51942 Sep 22 23:22:30.641 DEBG Read :1097 deps:[JobId(1096)] res:true
51943 Sep 22 23:22:30.663 INFO [lossy] skipping 1097
51944 Sep 22 23:22:30.686 DEBG Read :1097 deps:[JobId(1096)] res:true
51945 Sep 22 23:22:30.708 ERRO [2] job id 1097 saw error GenericError("test error")
51946 Sep 22 23:22:30.710 DEBG Flush :1098 extent_limit None deps:[JobId(1097), JobId(1096)] res:true f:38 g:1
51947 Sep 22 23:22:30.711 INFO [lossy] sleeping 1 second
51948 Sep 22 23:22:31.098 DEBG [1] Read AckReady 1097, : downstairs
51949 Sep 22 23:22:31.099 DEBG up_ds_listen was notified
51950 Sep 22 23:22:31.099 DEBG up_ds_listen process 1097
51951 Sep 22 23:22:31.099 DEBG [A] ack job 1097:98, : downstairs
51952 Sep 22 23:22:31.152 DEBG up_ds_listen checked 1 jobs, back to waiting
51953 Sep 22 23:22:31.155 INFO [lossy] sleeping 1 second
51954 Sep 22 23:22:31.157 WARN returning error on flush!
51955 Sep 22 23:22:31.157 DEBG Flush :1098 extent_limit None deps:[JobId(1097), JobId(1096)] res:false f:38 g:1
51956 Sep 22 23:22:31.157 DEBG Flush :1098 extent_limit None deps:[JobId(1097), JobId(1096)] res:true f:38 g:1
51957 Sep 22 23:22:31.157 INFO [lossy] sleeping 1 second
51958 Sep 22 23:22:31.486 DEBG Read :1000 deps:[] res:true
51959 Sep 22 23:22:31.499 DEBG IO Read 1099 has deps [JobId(1098)]
51960 Sep 22 23:22:32.188 ERRO [2] job id 1098 saw error GenericError("test error")
51961 Sep 22 23:22:32.194 DEBG up_ds_listen was notified
51962 Sep 22 23:22:32.194 DEBG up_ds_listen process 1098
51963 Sep 22 23:22:32.194 DEBG [A] ack job 1098:99, : downstairs
51964 Sep 22 23:22:32.194 DEBG up_ds_listen checked 1 jobs, back to waiting
51965 Sep 22 23:22:32.200 DEBG IO Flush 1100 has deps [JobId(1099), JobId(1098)]
51966 Sep 22 23:22:32.201 INFO [lossy] sleeping 1 second
51967 Sep 22 23:22:32.201 DEBG Flush :1098 extent_limit None deps:[JobId(1097), JobId(1096)] res:true f:38 g:1
51968 Sep 22 23:22:32.201 INFO [lossy] sleeping 1 second
51969 Sep 22 23:22:32.201 INFO [lossy] sleeping 1 second
51970 Sep 22 23:22:32.201 DEBG [rc] retire 1098 clears [JobId(1097), JobId(1098)], : downstairs
51971 Sep 22 23:22:32.941 DEBG Read :1000 deps:[] res:true
51972 Sep 22 23:22:33.202 INFO [lossy] skipping 1099
51973 Sep 22 23:22:33.224 DEBG Read :1099 deps:[JobId(1098)] res:true
51974 Sep 22 23:22:33.246 WARN returning error on read!
51975 Sep 22 23:22:33.246 DEBG Read :1099 deps:[JobId(1098)] res:false
51976 Sep 22 23:22:33.274 DEBG Read :1099 deps:[JobId(1098)] res:true
51977 Sep 22 23:22:33.326 DEBG Read :1099 deps:[JobId(1098)] res:true
51978 Sep 22 23:22:33.348 ERRO [0] job id 1099 saw error GenericError("test error")
51979 Sep 22 23:22:33.351 DEBG Flush :1100 extent_limit None deps:[JobId(1099), JobId(1098)] res:true f:39 g:1
51980 Sep 22 23:22:33.351 INFO [lossy] sleeping 1 second
51981 Sep 22 23:22:33.352 DEBG Flush :1100 extent_limit None deps:[JobId(1099), JobId(1098)] res:true f:39 g:1
51982 Sep 22 23:22:33.352 INFO [lossy] sleeping 1 second
51983 Sep 22 23:22:33.353 DEBG Flush :1100 extent_limit None deps:[JobId(1099), JobId(1098)] res:true f:39 g:1
51984 Sep 22 23:22:33.353 INFO [lossy] sleeping 1 second
51985 Sep 22 23:22:33.849 DEBG [0] Read AckReady 1099, : downstairs
51986 Sep 22 23:22:34.179 DEBG [1] Read already AckReady 1099, : downstairs
51987 Sep 22 23:22:34.352 DEBG Read :1000 deps:[] res:true
51988 Sep 22 23:22:34.509 DEBG [2] Read already AckReady 1099, : downstairs
51989 Sep 22 23:22:34.511 DEBG up_ds_listen was notified
51990 Sep 22 23:22:34.511 DEBG up_ds_listen process 1099
51991 Sep 22 23:22:34.511 DEBG [A] ack job 1099:100, : downstairs
51992 Sep 22 23:22:34.564 DEBG up_ds_listen process 1100
51993 Sep 22 23:22:34.564 DEBG [A] ack job 1100:101, : downstairs
51994 Sep 22 23:22:34.564 DEBG [rc] retire 1100 clears [JobId(1099), JobId(1100)], : downstairs
51995 Sep 22 23:22:34.564 DEBG up_ds_listen checked 2 jobs, back to waiting
51996 Sep 22 23:22:34.564 DEBG up_ds_listen was notified
51997 Sep 22 23:22:34.564 DEBG up_ds_listen checked 0 jobs, back to waiting
51998 Sep 22 23:22:34.566 INFO [lossy] sleeping 1 second
51999 Sep 22 23:22:34.566 INFO [lossy] sleeping 1 second
52000 Sep 22 23:22:34.566 INFO [lossy] sleeping 1 second
52001 Sep 22 23:22:34.566 DEBG IO Read 1101 has deps []
52002 Sep 22 23:22:35.066 DEBG IO Flush 1102 has deps [JobId(1101)]
52003 Sep 22 23:22:35.572 DEBG Read :1101 deps:[] res:true
52004 Sep 22 23:22:35.574 INFO Checking if live repair is needed
52005 Sep 22 23:22:35.574 INFO No Live Repair required at this time
52006 Sep 22 23:22:35.574 DEBG IO Flush 1001 has deps [JobId(1000)]
52007 Sep 22 23:22:35.600 DEBG Read :1101 deps:[] res:true
52008 Sep 22 23:22:35.628 DEBG Read :1101 deps:[] res:true
52009 Sep 22 23:22:35.653 DEBG Flush :1102 extent_limit None deps:[JobId(1101)] res:true f:40 g:1
52010 Sep 22 23:22:35.653 INFO [lossy] sleeping 1 second
52011 Sep 22 23:22:35.654 DEBG Flush :1102 extent_limit None deps:[JobId(1101)] res:true f:40 g:1
52012 Sep 22 23:22:35.654 INFO [lossy] sleeping 1 second
52013 Sep 22 23:22:35.655 INFO [lossy] skipping 1102
52014 Sep 22 23:22:35.655 DEBG Flush :1102 extent_limit None deps:[JobId(1101)] res:true f:40 g:1
52015 Sep 22 23:22:35.655 INFO [lossy] sleeping 1 second
52016 Sep 22 23:22:35.749 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:2 g:2
52017 Sep 22 23:22:35.844 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:2 g:2
52018 Sep 22 23:22:35.949 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:2 g:2
52019 Sep 22 23:22:36.127 DEBG [0] Read AckReady 1101, : downstairs
52020 Sep 22 23:22:36.457 DEBG [1] Read already AckReady 1101, : downstairs
52021 Sep 22 23:22:36.789 DEBG [2] Read already AckReady 1101, : downstairs
52022 Sep 22 23:22:36.791 DEBG up_ds_listen was notified
52023 Sep 22 23:22:36.791 DEBG up_ds_listen process 1101
52024 Sep 22 23:22:36.792 DEBG [A] ack job 1101:102, : downstairs
52025 Sep 22 23:22:36.844 DEBG up_ds_listen process 1102
52026 Sep 22 23:22:36.844 DEBG [A] ack job 1102:103, : downstairs
52027 Sep 22 23:22:36.844 DEBG [rc] retire 1102 clears [JobId(1101), JobId(1102)], : downstairs
52028 Sep 22 23:22:36.844 DEBG up_ds_listen checked 2 jobs, back to waiting
52029 Sep 22 23:22:36.844 DEBG up_ds_listen was notified
52030 Sep 22 23:22:36.844 DEBG up_ds_listen checked 0 jobs, back to waiting
52031 Sep 22 23:22:36.846 INFO [lossy] sleeping 1 second
52032 Sep 22 23:22:36.846 INFO [lossy] sleeping 1 second
52033 Sep 22 23:22:36.846 INFO [lossy] sleeping 1 second
52034 Sep 22 23:22:36.846 DEBG IO Read 1103 has deps []
52035 Sep 22 23:22:37.347 DEBG IO Flush 1104 has deps [JobId(1103)]
52036 test test::integration_test_problematic_downstairs has been running for over 60 seconds
52037 Sep 22 23:22:37.847 INFO [lossy] skipping 1103
52038 Sep 22 23:22:37.847 INFO [lossy] skipping 1104
52039 Sep 22 23:22:37.847 INFO [lossy] skipping 1103
52040 Sep 22 23:22:37.853 DEBG Read :1103 deps:[] res:true
52041 Sep 22 23:22:37.880 DEBG Read :1103 deps:[] res:true
52042 Sep 22 23:22:37.902 INFO [lossy] skipping 1103
52043 Sep 22 23:22:37.908 DEBG Read :1103 deps:[] res:true
52044 Sep 22 23:22:37.932 INFO [lossy] skipping 1104
52045 Sep 22 23:22:37.932 INFO [lossy] skipping 1104
52046 Sep 22 23:22:37.932 DEBG Flush :1104 extent_limit None deps:[JobId(1103)] res:true f:41 g:1
52047 Sep 22 23:22:37.933 INFO [lossy] skipping 1104
52048 Sep 22 23:22:37.933 DEBG Flush :1104 extent_limit None deps:[JobId(1103)] res:true f:41 g:1
52049 Sep 22 23:22:37.933 INFO [lossy] sleeping 1 second
52050 Sep 22 23:22:37.934 WARN returning error on flush!
52051 Sep 22 23:22:37.934 DEBG Flush :1104 extent_limit None deps:[JobId(1103)] res:false f:41 g:1
52052 Sep 22 23:22:37.935 INFO [lossy] skipping 1104
52053 Sep 22 23:22:37.935 DEBG Flush :1104 extent_limit None deps:[JobId(1103)] res:true f:41 g:1
52054 Sep 22 23:22:37.935 INFO [lossy] sleeping 1 second
52055 test test::integration_test_volume_replace_downstairs_then_takeover has been running for over 60 seconds
52056 Sep 22 23:22:38.408 DEBG [0] Read AckReady 1103, : downstairs
52057 Sep 22 23:22:38.738 DEBG [1] Read already AckReady 1103, : downstairs
52058 Sep 22 23:22:39.068 DEBG [2] Read already AckReady 1103, : downstairs
52059 Sep 22 23:22:39.074 ERRO [2] job id 1104 saw error GenericError("test error")
52060 Sep 22 23:22:39.074 DEBG up_ds_listen was notified
52061 Sep 22 23:22:39.075 DEBG up_ds_listen process 1103
52062 Sep 22 23:22:39.075 DEBG [A] ack job 1103:104, : downstairs
52063 Sep 22 23:22:39.131 DEBG up_ds_listen process 1104
52064 Sep 22 23:22:39.131 DEBG [A] ack job 1104:105, : downstairs
52065 Sep 22 23:22:39.131 DEBG [rc] retire 1104 clears [JobId(1103), JobId(1104)], : downstairs
52066 Sep 22 23:22:39.131 DEBG up_ds_listen checked 2 jobs, back to waiting
52067 Sep 22 23:22:39.131 DEBG up_ds_listen was notified
52068 Sep 22 23:22:39.131 DEBG up_ds_listen checked 0 jobs, back to waiting
52069 Sep 22 23:22:39.132 INFO [lossy] sleeping 1 second
52070 Sep 22 23:22:39.132 INFO [lossy] sleeping 1 second
52071 Sep 22 23:22:39.133 DEBG IO Read 1105 has deps []
52072 Sep 22 23:22:39.152 DEBG Read :1105 deps:[] res:true
52073 Sep 22 23:22:39.557 DEBG [0] Read AckReady 1105, : downstairs
52074 Sep 22 23:22:39.558 DEBG up_ds_listen was notified
52075 Sep 22 23:22:39.558 DEBG up_ds_listen process 1105
52076 Sep 22 23:22:39.558 DEBG [A] ack job 1105:106, : downstairs
52077 Sep 22 23:22:39.611 DEBG up_ds_listen checked 1 jobs, back to waiting
52078 Sep 22 23:22:39.612 DEBG IO Read 1106 has deps []
52079 Sep 22 23:22:39.631 DEBG Read :1106 deps:[] res:true
52080 Sep 22 23:22:39.653 DEBG IO Flush 1107 has deps [JobId(1106), JobId(1105)]
52081 Sep 22 23:22:39.655 DEBG Flush :1107 extent_limit None deps:[JobId(1106), JobId(1105)] res:true f:42 g:1
52082 Sep 22 23:22:40.035 DEBG [0] Read AckReady 1106, : downstairs
52083 Sep 22 23:22:40.036 DEBG up_ds_listen was notified
52084 Sep 22 23:22:40.036 DEBG up_ds_listen process 1106
52085 Sep 22 23:22:40.036 DEBG [A] ack job 1106:107, : downstairs
52086 Sep 22 23:22:40.089 DEBG up_ds_listen checked 1 jobs, back to waiting
52087 Sep 22 23:22:40.090 DEBG IO Read 1108 has deps [JobId(1107)]
52088 Sep 22 23:22:40.103 WARN returning error on read!
52089 Sep 22 23:22:40.103 DEBG Read :1108 deps:[JobId(1107)] res:false
52090 Sep 22 23:22:40.103 INFO [lossy] skipping 1108
52091 Sep 22 23:22:40.103 WARN returning error on read!
52092 Sep 22 23:22:40.103 DEBG Read :1108 deps:[JobId(1107)] res:false
52093 Sep 22 23:22:40.103 INFO [lossy] skipping 1108
52094 Sep 22 23:22:40.125 DEBG Read :1108 deps:[JobId(1107)] res:true
52095 Sep 22 23:22:40.183 DEBG Read :1105 deps:[] res:true
52096 Sep 22 23:22:40.240 DEBG Read :1105 deps:[] res:true
52097 Sep 22 23:22:40.262 ERRO [0] job id 1108 saw error GenericError("test error")
52098 Sep 22 23:22:40.262 ERRO [0] job id 1108 saw error GenericError("test error")
52099 Sep 22 23:22:40.262 DEBG IO Flush 1109 has deps [JobId(1108), JobId(1107)]
52100 Sep 22 23:22:40.266 WARN returning error on flush!
52101 Sep 22 23:22:40.266 DEBG Flush :1109 extent_limit None deps:[JobId(1108), JobId(1107)] res:false f:43 g:1
52102 Sep 22 23:22:40.266 INFO [lossy] skipping 1109
52103 Sep 22 23:22:40.266 WARN returning error on flush!
52104 Sep 22 23:22:40.266 DEBG Flush :1109 extent_limit None deps:[JobId(1108), JobId(1107)] res:false f:43 g:1
52105 Sep 22 23:22:40.266 DEBG Flush :1109 extent_limit None deps:[JobId(1108), JobId(1107)] res:true f:43 g:1
52106 Sep 22 23:22:40.345 DEBG Read :1106 deps:[] res:true
52107 Sep 22 23:22:40.375 DEBG Read :1106 deps:[] res:true
52108 Sep 22 23:22:40.745 DEBG [0] Read AckReady 1108, : downstairs
52109 Sep 22 23:22:40.746 ERRO [0] job id 1109 saw error GenericError("test error")
52110 Sep 22 23:22:40.746 ERRO [0] job id 1109 saw error GenericError("test error")
52111 Sep 22 23:22:40.746 DEBG up_ds_listen was notified
52112 Sep 22 23:22:40.746 DEBG up_ds_listen process 1108
52113 Sep 22 23:22:40.746 DEBG [A] ack job 1108:109, : downstairs
52114 Sep 22 23:22:40.799 DEBG up_ds_listen checked 1 jobs, back to waiting
52115 Sep 22 23:22:41.583 DEBG IO Read 1110 has deps [JobId(1109)]
52116 Sep 22 23:22:41.583 DEBG IO Flush 1111 has deps [JobId(1110), JobId(1109)]
52117 Sep 22 23:22:41.595 INFO [lossy] sleeping 1 second
52118 Sep 22 23:22:41.598 WARN returning error on flush!
52119 Sep 22 23:22:41.598 DEBG Flush :1107 extent_limit None deps:[JobId(1106), JobId(1105)] res:false f:42 g:1
52120 Sep 22 23:22:41.598 DEBG Flush :1107 extent_limit None deps:[JobId(1106), JobId(1105)] res:true f:42 g:1
52121 Sep 22 23:22:41.598 INFO [lossy] skipping 1108
52122 Sep 22 23:22:41.598 INFO [lossy] skipping 1110
52123 Sep 22 23:22:41.598 INFO [lossy] skipping 1111
52124 Sep 22 23:22:41.604 DEBG Read :1108 deps:[JobId(1107)] res:true
52125 Sep 22 23:22:41.626 WARN returning error on flush!
52126 Sep 22 23:22:41.626 DEBG Flush :1107 extent_limit None deps:[JobId(1106), JobId(1105)] res:false f:42 g:1
52127 Sep 22 23:22:41.626 INFO [lossy] skipping 1107
52128 Sep 22 23:22:41.626 DEBG Flush :1107 extent_limit None deps:[JobId(1106), JobId(1105)] res:true f:42 g:1
52129 Sep 22 23:22:41.626 INFO [lossy] skipping 1108
52130 Sep 22 23:22:41.632 DEBG Read :1108 deps:[JobId(1107)] res:true
52131 Sep 22 23:22:42.115 ERRO [1] job id 1107 saw error GenericError("test error")
52132 Sep 22 23:22:42.445 ERRO [2] job id 1107 saw error GenericError("test error")
52133 Sep 22 23:22:42.446 DEBG up_ds_listen was notified
52134 Sep 22 23:22:42.446 DEBG up_ds_listen process 1107
52135 Sep 22 23:22:42.446 DEBG [A] ack job 1107:108, : downstairs
52136 Sep 22 23:22:42.446 DEBG [rc] retire 1107 clears [JobId(1105), JobId(1106), JobId(1107)], : downstairs
52137 Sep 22 23:22:42.446 DEBG up_ds_listen checked 1 jobs, back to waiting
52138 Sep 22 23:22:42.449 INFO [lossy] skipping 1109
52139 Sep 22 23:22:42.449 INFO [lossy] skipping 1110
52140 Sep 22 23:22:42.449 DEBG Flush :1109 extent_limit None deps:[JobId(1108), JobId(1107)] res:true f:43 g:1
52141 Sep 22 23:22:42.449 INFO [lossy] skipping 1110
52142 Sep 22 23:22:42.455 DEBG Read :1110 deps:[JobId(1109)] res:true
52143 Sep 22 23:22:42.477 DEBG Flush :1109 extent_limit None deps:[JobId(1108), JobId(1107)] res:true f:43 g:1
52144 Sep 22 23:22:42.482 DEBG Read :1110 deps:[JobId(1109)] res:true
52145 Sep 22 23:22:43.269 DEBG up_ds_listen was notified
52146 Sep 22 23:22:43.269 DEBG up_ds_listen process 1109
52147 Sep 22 23:22:43.269 DEBG [A] ack job 1109:110, : downstairs
52148 Sep 22 23:22:43.269 DEBG [rc] retire 1109 clears [JobId(1108), JobId(1109)], : downstairs
52149 Sep 22 23:22:43.269 DEBG up_ds_listen checked 1 jobs, back to waiting
52150 Sep 22 23:22:43.290 DEBG Read :1110 deps:[JobId(1109)] res:true
52151 Sep 22 23:22:43.314 INFO [lossy] sleeping 1 second
52152 Sep 22 23:22:43.316 DEBG Flush :1111 extent_limit None deps:[JobId(1110), JobId(1109)] res:true f:44 g:1
52153 Sep 22 23:22:43.316 INFO [lossy] sleeping 1 second
52154 Sep 22 23:22:43.989 DEBG [1] Read AckReady 1110, : downstairs
52155 Sep 22 23:22:44.330 DEBG [2] Read already AckReady 1110, : downstairs
52156 Sep 22 23:22:44.332 DEBG up_ds_listen was notified
52157 Sep 22 23:22:44.332 DEBG up_ds_listen process 1110
52158 Sep 22 23:22:44.332 DEBG [A] ack job 1110:111, : downstairs
52159 Sep 22 23:22:44.385 DEBG up_ds_listen checked 1 jobs, back to waiting
52160 Sep 22 23:22:44.387 DEBG Flush :1111 extent_limit None deps:[JobId(1110), JobId(1109)] res:true f:44 g:1
52161 Sep 22 23:22:44.388 INFO [lossy] sleeping 1 second
52162 Sep 22 23:22:44.388 DEBG Flush :1111 extent_limit None deps:[JobId(1110), JobId(1109)] res:true f:44 g:1
52163 Sep 22 23:22:44.388 INFO [lossy] sleeping 1 second
52164 Sep 22 23:22:44.388 INFO [lossy] sleeping 1 second
52165 Sep 22 23:22:44.435 DEBG IO Read 1112 has deps [JobId(1111)]
52166 Sep 22 23:22:44.771 DEBG up_ds_listen was notified
52167 Sep 22 23:22:44.771 DEBG up_ds_listen process 1111
52168 Sep 22 23:22:44.771 DEBG [A] ack job 1111:112, : downstairs
52169 Sep 22 23:22:44.771 DEBG [rc] retire 1111 clears [JobId(1110), JobId(1111)], : downstairs
52170 Sep 22 23:22:44.771 DEBG up_ds_listen checked 1 jobs, back to waiting
52171 Sep 22 23:22:44.889 DEBG IO Flush 1113 has deps [JobId(1112)]
52172 Sep 22 23:22:45.395 DEBG Read :1112 deps:[JobId(1111)] res:true
52173 Sep 22 23:22:45.422 DEBG Read :1112 deps:[JobId(1111)] res:true
52174 Sep 22 23:22:45.444 INFO [lossy] skipping 1112
52175 Sep 22 23:22:45.450 DEBG Read :1112 deps:[JobId(1111)] res:true
52176 Sep 22 23:22:45.474 DEBG Flush :1113 extent_limit None deps:[JobId(1112)] res:true f:45 g:1
52177 Sep 22 23:22:45.474 INFO [lossy] sleeping 1 second
52178 Sep 22 23:22:45.522 DEBG Flush :1113 extent_limit None deps:[JobId(1112)] res:true f:45 g:1
52179 Sep 22 23:22:45.522 INFO [lossy] sleeping 1 second
52180 Sep 22 23:22:45.523 DEBG Flush :1113 extent_limit None deps:[JobId(1112)] res:true f:45 g:1
52181 Sep 22 23:22:45.523 INFO [lossy] sleeping 1 second
52182 Sep 22 23:22:45.851 DEBG [0] Read AckReady 1112, : downstairs
52183 Sep 22 23:22:45.852 DEBG up_ds_listen was notified
52184 Sep 22 23:22:45.852 DEBG up_ds_listen process 1112
52185 Sep 22 23:22:45.852 DEBG [A] ack job 1112:113, : downstairs
52186 Sep 22 23:22:45.905 DEBG up_ds_listen checked 1 jobs, back to waiting
52187 Sep 22 23:22:46.662 DEBG up_ds_listen was notified
52188 Sep 22 23:22:46.662 DEBG up_ds_listen process 1113
52189 Sep 22 23:22:46.662 DEBG [A] ack job 1113:114, : downstairs
52190 Sep 22 23:22:46.663 DEBG [rc] retire 1113 clears [JobId(1112), JobId(1113)], : downstairs
52191 Sep 22 23:22:46.663 DEBG up_ds_listen checked 1 jobs, back to waiting
52192 Sep 22 23:22:46.663 DEBG IO Read 1114 has deps []
52193 Sep 22 23:22:46.663 DEBG IO Flush 1115 has deps [JobId(1114)]
52194 Sep 22 23:22:46.663 INFO [lossy] sleeping 1 second
52195 Sep 22 23:22:46.675 INFO [lossy] sleeping 1 second
52196 Sep 22 23:22:46.681 DEBG Read :1114 deps:[] res:true
52197 Sep 22 23:22:46.704 DEBG Flush :1115 extent_limit None deps:[JobId(1114)] res:true f:46 g:1
52198 Sep 22 23:22:47.080 DEBG [1] Read AckReady 1114, : downstairs
52199 Sep 22 23:22:47.081 DEBG up_ds_listen was notified
52200 Sep 22 23:22:47.081 DEBG up_ds_listen process 1114
52201 Sep 22 23:22:47.081 DEBG [A] ack job 1114:115, : downstairs
52202 Sep 22 23:22:47.134 DEBG up_ds_listen checked 1 jobs, back to waiting
52203 Sep 22 23:22:47.135 DEBG IO Read 1116 has deps [JobId(1115)]
52204 Sep 22 23:22:47.153 DEBG Read :1116 deps:[JobId(1115)] res:true
52205 Sep 22 23:22:47.174 DEBG IO Flush 1117 has deps [JobId(1116), JobId(1115)]
52206 Sep 22 23:22:47.176 DEBG Flush :1117 extent_limit None deps:[JobId(1116), JobId(1115)] res:true f:47 g:1
52207 Sep 22 23:22:47.552 DEBG [1] Read AckReady 1116, : downstairs
52208 Sep 22 23:22:47.553 DEBG up_ds_listen was notified
52209 Sep 22 23:22:47.553 DEBG up_ds_listen process 1116
52210 Sep 22 23:22:47.553 DEBG [A] ack job 1116:117, : downstairs
52211 Sep 22 23:22:47.606 DEBG up_ds_listen checked 1 jobs, back to waiting
52212 Sep 22 23:22:47.607 DEBG IO Read 1118 has deps [JobId(1117)]
52213 Sep 22 23:22:47.620 WARN returning error on read!
52214 Sep 22 23:22:47.620 DEBG Read :1118 deps:[JobId(1117)] res:false
52215 Sep 22 23:22:47.620 INFO [lossy] skipping 1118
52216 Sep 22 23:22:47.620 WARN returning error on read!
52217 Sep 22 23:22:47.620 DEBG Read :1118 deps:[JobId(1117)] res:false
52218 Sep 22 23:22:47.620 INFO [lossy] skipping 1118
52219 Sep 22 23:22:47.626 DEBG Read :1118 deps:[JobId(1117)] res:true
52220 Sep 22 23:22:47.647 ERRO [1] job id 1118 saw error GenericError("test error")
52221 Sep 22 23:22:47.647 ERRO [1] job id 1118 saw error GenericError("test error")
52222 Sep 22 23:22:48.024 DEBG [1] Read AckReady 1118, : downstairs
52223 Sep 22 23:22:48.025 DEBG up_ds_listen was notified
52224 Sep 22 23:22:48.025 DEBG up_ds_listen process 1118
52225 Sep 22 23:22:48.026 DEBG [A] ack job 1118:119, : downstairs
52226 Sep 22 23:22:48.078 DEBG up_ds_listen checked 1 jobs, back to waiting
52227 Sep 22 23:22:48.079 WARN returning error on read!
52228 Sep 22 23:22:48.079 DEBG Read :1114 deps:[] res:false
52229 Sep 22 23:22:48.080 INFO [lossy] skipping 1115
52230 Sep 22 23:22:48.085 DEBG Read :1114 deps:[] res:true
52231 Sep 22 23:22:48.106 DEBG IO Flush 1119 has deps [JobId(1118), JobId(1117)]
52232 Sep 22 23:22:48.107 INFO [lossy] skipping 1114
52233 Sep 22 23:22:48.113 DEBG Read :1114 deps:[] res:true
52234 Sep 22 23:22:48.135 DEBG IO Read 1120 has deps [JobId(1119)]
52235 Sep 22 23:22:48.135 ERRO [2] job id 1114 saw error GenericError("test error")
52236 Sep 22 23:22:48.140 INFO [lossy] skipping 1119
52237 Sep 22 23:22:48.140 DEBG Flush :1119 extent_limit None deps:[JobId(1118), JobId(1117)] res:true f:48 g:1
52238 Sep 22 23:22:48.147 INFO [lossy] sleeping 1 second
52239 Sep 22 23:22:48.149 INFO [lossy] skipping 1115
52240 Sep 22 23:22:48.149 INFO [lossy] skipping 1119
52241 Sep 22 23:22:48.149 DEBG Flush :1115 extent_limit None deps:[JobId(1114)] res:true f:46 g:1
52242 Sep 22 23:22:48.149 INFO [lossy] skipping 1116
52243 Sep 22 23:22:48.149 WARN 1117 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
52244 Sep 22 23:22:48.155 DEBG Read :1116 deps:[JobId(1115)] res:true
52245 Sep 22 23:22:48.177 INFO [lossy] skipping 1115
52246 Sep 22 23:22:48.177 DEBG Flush :1115 extent_limit None deps:[JobId(1114)] res:true f:46 g:1
52247 Sep 22 23:22:48.182 DEBG Read :1116 deps:[JobId(1115)] res:true
52248 Sep 22 23:22:48.968 DEBG up_ds_listen was notified
52249 Sep 22 23:22:48.968 DEBG up_ds_listen process 1115
52250 Sep 22 23:22:48.968 DEBG [A] ack job 1115:116, : downstairs
52251 Sep 22 23:22:48.968 DEBG [rc] retire 1115 clears [JobId(1114), JobId(1115)], : downstairs
52252 Sep 22 23:22:48.968 DEBG up_ds_listen checked 1 jobs, back to waiting
52253 Sep 22 23:22:48.968 DEBG IO Flush 1121 has deps [JobId(1120), JobId(1119)]
52254 Sep 22 23:22:48.971 DEBG Flush :1117 extent_limit None deps:[JobId(1116), JobId(1115)] res:true f:47 g:1
52255 Sep 22 23:22:48.971 INFO [lossy] skipping 1118
52256 Sep 22 23:22:48.971 INFO [lossy] skipping 1119
52257 Sep 22 23:22:48.971 INFO [lossy] skipping 1120
52258 Sep 22 23:22:48.977 DEBG Read :1118 deps:[JobId(1117)] res:true
52259 Sep 22 23:22:48.999 WARN returning error on flush!
52260 Sep 22 23:22:48.999 DEBG Flush :1117 extent_limit None deps:[JobId(1116), JobId(1115)] res:false f:47 g:1
52261 Sep 22 23:22:49.000 INFO [lossy] skipping 1117
52262 Sep 22 23:22:49.000 DEBG Flush :1117 extent_limit None deps:[JobId(1116), JobId(1115)] res:true f:47 g:1
52263 Sep 22 23:22:49.000 WARN returning error on read!
52264 Sep 22 23:22:49.000 DEBG Read :1118 deps:[JobId(1117)] res:false
52265 Sep 22 23:22:49.000 WARN 1119 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
52266 Sep 22 23:22:49.006 DEBG Read :1118 deps:[JobId(1117)] res:true
52267 Sep 22 23:22:49.784 ERRO [2] job id 1117 saw error GenericError("test error")
52268 Sep 22 23:22:49.784 ERRO [2] job id 1118 saw error GenericError("test error")
52269 Sep 22 23:22:49.784 DEBG up_ds_listen was notified
52270 Sep 22 23:22:49.784 DEBG up_ds_listen process 1117
52271 Sep 22 23:22:49.784 DEBG [A] ack job 1117:118, : downstairs
52272 Sep 22 23:22:49.784 DEBG [rc] retire 1117 clears [JobId(1116), JobId(1117)], : downstairs
52273 Sep 22 23:22:49.784 DEBG up_ds_listen checked 1 jobs, back to waiting
52274 Sep 22 23:22:49.790 DEBG Read :1120 deps:[JobId(1119)] res:true
52275 Sep 22 23:22:49.814 INFO [lossy] skipping 1119
52276 Sep 22 23:22:49.814 INFO [lossy] skipping 1120
52277 Sep 22 23:22:49.814 WARN returning error on flush!
52278 Sep 22 23:22:49.815 DEBG Flush :1119 extent_limit None deps:[JobId(1118), JobId(1117)] res:false f:48 g:1
52279 Sep 22 23:22:49.815 DEBG Flush :1119 extent_limit None deps:[JobId(1118), JobId(1117)] res:true f:48 g:1
52280 Sep 22 23:22:49.815 WARN returning error on read!
52281 Sep 22 23:22:49.815 DEBG Read :1120 deps:[JobId(1119)] res:false
52282 Sep 22 23:22:49.815 WARN 1121 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
52283 Sep 22 23:22:49.815 WARN returning error on read!
52284 Sep 22 23:22:49.815 DEBG Read :1120 deps:[JobId(1119)] res:false
52285 Sep 22 23:22:49.821 DEBG Read :1120 deps:[JobId(1119)] res:true
52286 Sep 22 23:22:49.842 WARN returning error on flush!
52287 Sep 22 23:22:49.842 DEBG Flush :1121 extent_limit None deps:[JobId(1120), JobId(1119)] res:false f:49 g:1
52288 Sep 22 23:22:49.842 INFO [lossy] skipping 1121
52289 Sep 22 23:22:49.842 INFO [lossy] skipping 1121
52290 Sep 22 23:22:49.842 DEBG Flush :1121 extent_limit None deps:[JobId(1120), JobId(1119)] res:true f:49 g:1
52291 Sep 22 23:22:49.844 DEBG Flush :1119 extent_limit None deps:[JobId(1118), JobId(1117)] res:true f:48 g:1
52292 Sep 22 23:22:49.850 DEBG Read :1120 deps:[JobId(1119)] res:true
52293 Sep 22 23:22:50.351 ERRO [0] job id 1119 saw error GenericError("test error")
52294 Sep 22 23:22:50.352 ERRO [0] job id 1120 saw error GenericError("test error")
52295 Sep 22 23:22:50.352 ERRO [0] job id 1120 saw error GenericError("test error")
52296 Sep 22 23:22:50.680 DEBG [1] Read AckReady 1120, : downstairs
52297 Sep 22 23:22:50.681 ERRO [1] job id 1121 saw error GenericError("test error")
52298 Sep 22 23:22:51.012 DEBG up_ds_listen was notified
52299 Sep 22 23:22:51.012 DEBG up_ds_listen process 1119
52300 Sep 22 23:22:51.012 DEBG [A] ack job 1119:120, : downstairs
52301 Sep 22 23:22:51.012 DEBG [rc] retire 1119 clears [JobId(1118), JobId(1119)], : downstairs
52302 Sep 22 23:22:51.012 DEBG up_ds_listen process 1120
52303 Sep 22 23:22:51.012 DEBG [A] ack job 1120:121, : downstairs
52304 Sep 22 23:22:51.065 DEBG up_ds_listen checked 2 jobs, back to waiting
52305 Sep 22 23:22:51.065 DEBG up_ds_listen was notified
52306 Sep 22 23:22:51.065 DEBG up_ds_listen checked 0 jobs, back to waiting
52307 Sep 22 23:22:51.066 DEBG IO Read 1122 has deps [JobId(1121)]
52308 Sep 22 23:22:51.079 INFO [lossy] skipping 1122
52309 Sep 22 23:22:51.079 WARN returning error on read!
52310 Sep 22 23:22:51.079 DEBG Read :1122 deps:[JobId(1121)] res:false
52311 Sep 22 23:22:51.085 DEBG Read :1122 deps:[JobId(1121)] res:true
52312 Sep 22 23:22:51.106 ERRO [1] job id 1122 saw error GenericError("test error")
52313 Sep 22 23:22:51.108 WARN returning error on flush!
52314 Sep 22 23:22:51.108 DEBG Flush :1121 extent_limit None deps:[JobId(1120), JobId(1119)] res:false f:49 g:1
52315 Sep 22 23:22:51.109 DEBG Flush :1121 extent_limit None deps:[JobId(1120), JobId(1119)] res:true f:49 g:1
52316 Sep 22 23:22:51.114 DEBG Read :1122 deps:[JobId(1121)] res:true
52317 Sep 22 23:22:51.136 WARN returning error on flush!
52318 Sep 22 23:22:51.136 DEBG Flush :1121 extent_limit None deps:[JobId(1120), JobId(1119)] res:false f:49 g:1
52319 Sep 22 23:22:51.136 DEBG Flush :1121 extent_limit None deps:[JobId(1120), JobId(1119)] res:true f:49 g:1
52320 Sep 22 23:22:51.142 DEBG Read :1122 deps:[JobId(1121)] res:true
52321 Sep 22 23:22:51.588 ERRO [0] job id 1121 saw error GenericError("test error")
52322 Sep 22 23:22:51.918 ERRO [2] job id 1121 saw error GenericError("test error")
52323 Sep 22 23:22:51.918 DEBG up_ds_listen was notified
52324 Sep 22 23:22:51.918 DEBG up_ds_listen process 1121
52325 Sep 22 23:22:51.918 DEBG [A] ack job 1121:122, : downstairs
52326 Sep 22 23:22:51.918 DEBG [rc] retire 1121 clears [JobId(1120), JobId(1121)], : downstairs
52327 Sep 22 23:22:51.918 DEBG up_ds_listen checked 1 jobs, back to waiting
52328 Sep 22 23:22:51.918 DEBG IO Flush 1123 has deps [JobId(1122)]
52329 Sep 22 23:22:51.920 DEBG Flush :1123 extent_limit None deps:[JobId(1122)] res:true f:50 g:1
52330 Sep 22 23:22:52.296 DEBG [1] Read AckReady 1122, : downstairs
52331 Sep 22 23:22:52.297 DEBG up_ds_listen was notified
52332 Sep 22 23:22:52.297 DEBG up_ds_listen process 1122
52333 Sep 22 23:22:52.297 DEBG [A] ack job 1122:123, : downstairs
52334 Sep 22 23:22:52.350 DEBG up_ds_listen checked 1 jobs, back to waiting
52335 Sep 22 23:22:52.352 DEBG IO Read 1124 has deps [JobId(1123)]
52336 Sep 22 23:22:52.370 DEBG Read :1124 deps:[JobId(1123)] res:true
52337 Sep 22 23:22:52.393 DEBG Flush :1123 extent_limit None deps:[JobId(1122)] res:true f:50 g:1
52338 Sep 22 23:22:52.398 DEBG Read :1124 deps:[JobId(1123)] res:true
52339 Sep 22 23:22:52.420 DEBG Flush :1123 extent_limit None deps:[JobId(1122)] res:true f:50 g:1
52340 Sep 22 23:22:52.426 DEBG Read :1124 deps:[JobId(1123)] res:true
52341 Sep 22 23:22:52.543 DEBG IO Flush 1125 has deps [JobId(1124), JobId(1123)]
52342 Sep 22 23:22:53.204 DEBG up_ds_listen was notified
52343 Sep 22 23:22:53.204 DEBG up_ds_listen process 1123
52344 Sep 22 23:22:53.204 DEBG [A] ack job 1123:124, : downstairs
52345 Sep 22 23:22:53.204 DEBG [rc] retire 1123 clears [JobId(1122), JobId(1123)], : downstairs
52346 Sep 22 23:22:53.204 DEBG up_ds_listen checked 1 jobs, back to waiting
52347 Sep 22 23:22:53.207 WARN returning error on flush!
52348 Sep 22 23:22:53.207 DEBG Flush :1125 extent_limit None deps:[JobId(1124), JobId(1123)] res:false f:51 g:1
52349 Sep 22 23:22:53.207 DEBG Flush :1125 extent_limit None deps:[JobId(1124), JobId(1123)] res:true f:51 g:1
52350 Sep 22 23:22:53.584 DEBG [1] Read AckReady 1124, : downstairs
52351 Sep 22 23:22:53.584 ERRO [1] job id 1125 saw error GenericError("test error")
52352 Sep 22 23:22:53.585 DEBG up_ds_listen was notified
52353 Sep 22 23:22:53.585 DEBG up_ds_listen process 1124
52354 Sep 22 23:22:53.585 DEBG [A] ack job 1124:125, : downstairs
52355 Sep 22 23:22:53.637 DEBG up_ds_listen checked 1 jobs, back to waiting
52356 Sep 22 23:22:53.639 DEBG IO Read 1126 has deps [JobId(1125)]
52357 Sep 22 23:22:53.657 DEBG Read :1126 deps:[JobId(1125)] res:true
52358 Sep 22 23:22:53.680 INFO [lossy] sleeping 1 second
52359 Sep 22 23:22:53.681 DEBG Flush :1125 extent_limit None deps:[JobId(1124), JobId(1123)] res:true f:51 g:1
52360 Sep 22 23:22:53.681 INFO [lossy] skipping 1126
52361 Sep 22 23:22:53.687 DEBG Read :1126 deps:[JobId(1125)] res:true
52362 Sep 22 23:22:53.804 DEBG IO Flush 1127 has deps [JobId(1126), JobId(1125)]
52363 Sep 22 23:22:54.466 DEBG up_ds_listen was notified
52364 Sep 22 23:22:54.466 DEBG up_ds_listen process 1125
52365 Sep 22 23:22:54.466 DEBG [A] ack job 1125:126, : downstairs
52366 Sep 22 23:22:54.466 DEBG up_ds_listen checked 1 jobs, back to waiting
52367 Sep 22 23:22:54.468 DEBG Flush :1127 extent_limit None deps:[JobId(1126), JobId(1125)] res:true f:52 g:1
52368 Sep 22 23:22:54.845 DEBG [1] Read AckReady 1126, : downstairs
52369 Sep 22 23:22:54.846 DEBG up_ds_listen was notified
52370 Sep 22 23:22:54.846 DEBG up_ds_listen process 1126
52371 Sep 22 23:22:54.846 DEBG [A] ack job 1126:127, : downstairs
52372 Sep 22 23:22:54.899 DEBG up_ds_listen checked 1 jobs, back to waiting
52373 Sep 22 23:22:54.900 WARN returning error on flush!
52374 Sep 22 23:22:54.900 DEBG Flush :1125 extent_limit None deps:[JobId(1124), JobId(1123)] res:false f:51 g:1
52375 Sep 22 23:22:54.900 INFO [lossy] skipping 1125
52376 Sep 22 23:22:54.900 DEBG Flush :1125 extent_limit None deps:[JobId(1124), JobId(1123)] res:true f:51 g:1
52377 Sep 22 23:22:54.900 INFO [lossy] sleeping 1 second
52378 Sep 22 23:22:54.900 DEBG IO Read 1128 has deps [JobId(1127)]
52379 Sep 22 23:22:54.901 ERRO [0] job id 1125 saw error GenericError("test error")
52380 Sep 22 23:22:54.901 DEBG [rc] retire 1125 clears [JobId(1124), JobId(1125)], : downstairs
52381 Sep 22 23:22:54.913 INFO [lossy] sleeping 1 second
52382 Sep 22 23:22:54.914 WARN returning error on flush!
52383 Sep 22 23:22:54.915 DEBG Flush :1127 extent_limit None deps:[JobId(1126), JobId(1125)] res:false f:52 g:1
52384 Sep 22 23:22:54.915 WARN returning error on flush!
52385 Sep 22 23:22:54.915 DEBG Flush :1127 extent_limit None deps:[JobId(1126), JobId(1125)] res:false f:52 g:1
52386 Sep 22 23:22:54.915 INFO [lossy] skipping 1127
52387 Sep 22 23:22:54.915 WARN returning error on flush!
52388 Sep 22 23:22:54.915 DEBG Flush :1127 extent_limit None deps:[JobId(1126), JobId(1125)] res:false f:52 g:1
52389 Sep 22 23:22:54.915 DEBG Flush :1127 extent_limit None deps:[JobId(1126), JobId(1125)] res:true f:52 g:1
52390 Sep 22 23:22:54.921 DEBG Read :1128 deps:[JobId(1127)] res:true
52391 Sep 22 23:22:55.320 ERRO [2] job id 1127 saw error GenericError("test error")
52392 Sep 22 23:22:55.320 ERRO [2] job id 1127 saw error GenericError("test error")
52393 Sep 22 23:22:55.320 ERRO [2] job id 1127 saw error GenericError("test error")
52394 Sep 22 23:22:55.320 DEBG up_ds_listen was notified
52395 Sep 22 23:22:55.320 DEBG up_ds_listen process 1127
52396 Sep 22 23:22:55.320 DEBG [A] ack job 1127:128, : downstairs
52397 Sep 22 23:22:55.320 DEBG up_ds_listen checked 1 jobs, back to waiting
52398 Sep 22 23:22:55.320 DEBG IO Flush 1129 has deps [JobId(1128), JobId(1127)]
52399 Sep 22 23:22:55.323 WARN returning error on flush!
52400 Sep 22 23:22:55.323 DEBG Flush :1129 extent_limit None deps:[JobId(1128), JobId(1127)] res:false f:53 g:1
52401 Sep 22 23:22:55.323 DEBG Flush :1129 extent_limit None deps:[JobId(1128), JobId(1127)] res:true f:53 g:1
52402 Sep 22 23:22:55.323 INFO [lossy] sleeping 1 second
52403 Sep 22 23:22:55.699 DEBG [2] Read AckReady 1128, : downstairs
52404 Sep 22 23:22:55.700 ERRO [2] job id 1129 saw error GenericError("test error")
52405 Sep 22 23:22:55.700 DEBG up_ds_listen was notified
52406 Sep 22 23:22:55.700 DEBG up_ds_listen process 1128
52407 Sep 22 23:22:55.700 DEBG [A] ack job 1128:129, : downstairs
52408 Sep 22 23:22:55.753 DEBG up_ds_listen checked 1 jobs, back to waiting
52409 Sep 22 23:22:55.754 DEBG IO Read 1130 has deps [JobId(1129)]
52410 Sep 22 23:22:55.821 DEBG IO Flush 1131 has deps [JobId(1130), JobId(1129)]
52411 Sep 22 23:22:55.907 DEBG Read :1126 deps:[JobId(1125)] res:true
52412 Sep 22 23:22:55.935 DEBG Read :1128 deps:[JobId(1127)] res:true
52413 Sep 22 23:22:55.958 DEBG Flush :1127 extent_limit None deps:[JobId(1126), JobId(1125)] res:true f:52 g:1
52414 Sep 22 23:22:55.964 DEBG Read :1128 deps:[JobId(1127)] res:true
52415 Sep 22 23:22:56.035 DEBG Flush :1129 extent_limit None deps:[JobId(1128), JobId(1127)] res:true f:53 g:1
52416 Sep 22 23:22:56.040 DEBG Read :1130 deps:[JobId(1129)] res:true
52417 Sep 22 23:22:56.393 DEBG [rc] retire 1127 clears [JobId(1126), JobId(1127)], : downstairs
52418 Sep 22 23:22:56.446 DEBG Read :1130 deps:[JobId(1129)] res:true
52419 Sep 22 23:22:56.798 DEBG up_ds_listen was notified
52420 Sep 22 23:22:56.798 DEBG up_ds_listen process 1129
52421 Sep 22 23:22:56.798 DEBG [A] ack job 1129:130, : downstairs
52422 Sep 22 23:22:56.798 DEBG up_ds_listen checked 1 jobs, back to waiting
52423 Sep 22 23:22:56.802 INFO [lossy] skipping 1129
52424 Sep 22 23:22:56.802 INFO [lossy] skipping 1130
52425 Sep 22 23:22:56.802 INFO [lossy] skipping 1131
52426 Sep 22 23:22:56.802 DEBG Flush :1129 extent_limit None deps:[JobId(1128), JobId(1127)] res:true f:53 g:1
52427 Sep 22 23:22:56.808 DEBG Read :1130 deps:[JobId(1129)] res:true
52428 Sep 22 23:22:56.878 DEBG Flush :1131 extent_limit None deps:[JobId(1130), JobId(1129)] res:true f:54 g:1
52429 Sep 22 23:22:56.878 INFO [lossy] sleeping 1 second
52430 Sep 22 23:22:57.208 DEBG [rc] retire 1129 clears [JobId(1128), JobId(1129)], : downstairs
52431 Sep 22 23:22:57.256 DEBG Flush :1131 extent_limit None deps:[JobId(1130), JobId(1129)] res:true f:54 g:1
52432 Sep 22 23:22:57.584 DEBG [1] Read AckReady 1130, : downstairs
52433 Sep 22 23:22:57.585 DEBG up_ds_listen was notified
52434 Sep 22 23:22:57.585 DEBG up_ds_listen process 1130
52435 Sep 22 23:22:57.585 DEBG [A] ack job 1130:131, : downstairs
52436 Sep 22 23:22:57.638 DEBG up_ds_listen checked 1 jobs, back to waiting
52437 Sep 22 23:22:58.017 DEBG up_ds_listen was notified
52438 Sep 22 23:22:58.017 DEBG up_ds_listen process 1131
52439 Sep 22 23:22:58.017 DEBG [A] ack job 1131:132, : downstairs
52440 Sep 22 23:22:58.017 DEBG up_ds_listen checked 1 jobs, back to waiting
52441 Sep 22 23:22:58.017 DEBG IO Read 1132 has deps [JobId(1131)]
52442 Sep 22 23:22:58.017 INFO [lossy] sleeping 1 second
52443 Sep 22 23:22:58.035 DEBG Read :1132 deps:[JobId(1131)] res:true
52444 Sep 22 23:22:58.059 DEBG Flush :1131 extent_limit None deps:[JobId(1130), JobId(1129)] res:true f:54 g:1
52445 Sep 22 23:22:58.064 DEBG Read :1132 deps:[JobId(1131)] res:true
52446 Sep 22 23:22:58.464 DEBG [rc] retire 1131 clears [JobId(1130), JobId(1131)], : downstairs
52447 Sep 22 23:22:58.841 DEBG [2] Read AckReady 1132, : downstairs
52448 Sep 22 23:22:58.842 DEBG up_ds_listen was notified
52449 Sep 22 23:22:58.842 DEBG up_ds_listen process 1132
52450 Sep 22 23:22:58.842 DEBG [A] ack job 1132:133, : downstairs
52451 Sep 22 23:22:58.895 DEBG up_ds_listen checked 1 jobs, back to waiting
52452 Sep 22 23:22:58.896 DEBG IO Flush 1133 has deps [JobId(1132)]
52453 Sep 22 23:22:58.897 DEBG IO Read 1134 has deps [JobId(1133)]
52454 Sep 22 23:22:58.902 DEBG Flush :1133 extent_limit None deps:[JobId(1132)] res:true f:55 g:1
52455 Sep 22 23:22:58.909 WARN returning error on read!
52456 Sep 22 23:22:58.909 DEBG Read :1134 deps:[JobId(1133)] res:false
52457 Sep 22 23:22:58.915 DEBG Read :1134 deps:[JobId(1133)] res:true
52458 Sep 22 23:22:58.936 ERRO [2] job id 1134 saw error GenericError("test error")
52459 Sep 22 23:22:58.938 INFO [lossy] skipping 1133
52460 Sep 22 23:22:58.938 INFO [lossy] skipping 1134
52461 Sep 22 23:22:58.938 INFO [lossy] skipping 1133
52462 Sep 22 23:22:58.938 WARN returning error on flush!
52463 Sep 22 23:22:58.938 DEBG Flush :1133 extent_limit None deps:[JobId(1132)] res:false f:55 g:1
52464 Sep 22 23:22:58.938 DEBG Flush :1133 extent_limit None deps:[JobId(1132)] res:true f:55 g:1
52465 Sep 22 23:22:58.943 DEBG Read :1134 deps:[JobId(1133)] res:true
52466 Sep 22 23:22:59.343 ERRO [0] job id 1133 saw error GenericError("test error")
52467 Sep 22 23:22:59.343 DEBG up_ds_listen was notified
52468 Sep 22 23:22:59.343 DEBG up_ds_listen process 1133
52469 Sep 22 23:22:59.343 DEBG [A] ack job 1133:134, : downstairs
52470 Sep 22 23:22:59.343 DEBG up_ds_listen checked 1 jobs, back to waiting
52471 Sep 22 23:22:59.344 INFO [lossy] skipping 1132
52472 Sep 22 23:22:59.344 INFO [lossy] skipping 1134
52473 Sep 22 23:22:59.349 DEBG Read :1132 deps:[JobId(1131)] res:true
52474 Sep 22 23:22:59.353 DEBG [0] Read AckReady 1000, : downstairs
52475 Sep 22 23:22:59.749 DEBG [2] Read AckReady 1134, : downstairs
52476 Sep 22 23:22:59.750 DEBG up_ds_listen was notified
52477 Sep 22 23:22:59.750 DEBG up_ds_listen process 1134
52478 Sep 22 23:22:59.750 DEBG [A] ack job 1134:135, : downstairs
52479 Sep 22 23:22:59.802 DEBG up_ds_listen checked 1 jobs, back to waiting
52480 Sep 22 23:22:59.804 DEBG IO Flush 1135 has deps [JobId(1134), JobId(1133)]
52481 Sep 22 23:22:59.805 DEBG IO Read 1136 has deps [JobId(1135)]
52482 Sep 22 23:22:59.810 DEBG Flush :1135 extent_limit None deps:[JobId(1134), JobId(1133)] res:true f:56 g:1
52483 Sep 22 23:22:59.823 DEBG Read :1136 deps:[JobId(1135)] res:true
52484 Sep 22 23:22:59.845 DEBG Flush :1135 extent_limit None deps:[JobId(1134), JobId(1133)] res:true f:56 g:1
52485 Sep 22 23:22:59.845 INFO [lossy] skipping 1136
52486 Sep 22 23:22:59.846 WARN returning error on read!
52487 Sep 22 23:22:59.846 DEBG Read :1136 deps:[JobId(1135)] res:false
52488 Sep 22 23:22:59.846 INFO [lossy] skipping 1136
52489 Sep 22 23:22:59.846 WARN returning error on read!
52490 Sep 22 23:22:59.846 DEBG Read :1136 deps:[JobId(1135)] res:false
52491 Sep 22 23:22:59.846 INFO [lossy] skipping 1136
52492 Sep 22 23:22:59.846 INFO [lossy] skipping 1136
52493 Sep 22 23:22:59.851 DEBG Read :1136 deps:[JobId(1135)] res:true
52494 Sep 22 23:23:00.251 ERRO [0] job id 1136 saw error GenericError("test error")
52495 Sep 22 23:23:00.251 ERRO [0] job id 1136 saw error GenericError("test error")
52496 Sep 22 23:23:00.252 DEBG up_ds_listen was notified
52497 Sep 22 23:23:00.252 DEBG up_ds_listen process 1135
52498 Sep 22 23:23:00.252 DEBG [A] ack job 1135:136, : downstairs
52499 Sep 22 23:23:00.252 DEBG up_ds_listen checked 1 jobs, back to waiting
52500 Sep 22 23:23:00.253 INFO [lossy] skipping 1134
52501 Sep 22 23:23:00.253 INFO [lossy] skipping 1134
52502 Sep 22 23:23:00.253 INFO [lossy] skipping 1133
52503 Sep 22 23:23:00.253 INFO [lossy] skipping 1134
52504 Sep 22 23:23:00.253 INFO [lossy] skipping 1133
52505 Sep 22 23:23:00.253 DEBG Flush :1133 extent_limit None deps:[JobId(1132)] res:true f:55 g:1
52506 Sep 22 23:23:00.253 INFO [lossy] skipping 1134
52507 Sep 22 23:23:00.253 WARN 1135 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
52508 Sep 22 23:23:00.253 INFO [lossy] skipping 1136
52509 Sep 22 23:23:00.259 DEBG Read :1134 deps:[JobId(1133)] res:true
52510 Sep 22 23:23:00.658 DEBG [rc] retire 1133 clears [JobId(1132), JobId(1133)], : downstairs
52511 Sep 22 23:23:00.659 DEBG IO Flush 1137 has deps [JobId(1136), JobId(1135)]
52512 Sep 22 23:23:00.662 INFO [lossy] skipping 1137
52513 Sep 22 23:23:00.662 DEBG Flush :1137 extent_limit None deps:[JobId(1136), JobId(1135)] res:true f:57 g:1
52514 Sep 22 23:23:01.038 DEBG [2] Read AckReady 1136, : downstairs
52515 Sep 22 23:23:01.039 DEBG up_ds_listen was notified
52516 Sep 22 23:23:01.039 DEBG up_ds_listen process 1136
52517 Sep 22 23:23:01.039 DEBG [A] ack job 1136:137, : downstairs
52518 Sep 22 23:23:01.092 DEBG up_ds_listen checked 1 jobs, back to waiting
52519 Sep 22 23:23:01.093 DEBG IO Read 1138 has deps [JobId(1137)]
52520 Sep 22 23:23:01.112 DEBG Read :1138 deps:[JobId(1137)] res:true
52521 Sep 22 23:23:01.135 WARN returning error on flush!
52522 Sep 22 23:23:01.135 DEBG Flush :1137 extent_limit None deps:[JobId(1136), JobId(1135)] res:false f:57 g:1
52523 Sep 22 23:23:01.135 INFO [lossy] skipping 1137
52524 Sep 22 23:23:01.135 DEBG Flush :1137 extent_limit None deps:[JobId(1136), JobId(1135)] res:true f:57 g:1
52525 Sep 22 23:23:01.135 INFO [lossy] sleeping 1 second
52526 Sep 22 23:23:01.136 INFO [lossy] sleeping 1 second
52527 Sep 22 23:23:01.563 ERRO [0] job id 1137 saw error GenericError("test error")
52528 Sep 22 23:23:01.894 DEBG up_ds_listen was notified
52529 Sep 22 23:23:01.894 DEBG up_ds_listen process 1137
52530 Sep 22 23:23:01.894 DEBG [A] ack job 1137:138, : downstairs
52531 Sep 22 23:23:01.894 DEBG up_ds_listen checked 1 jobs, back to waiting
52532 Sep 22 23:23:01.894 DEBG IO Flush 1139 has deps [JobId(1138), JobId(1137)]
52533 Sep 22 23:23:01.896 DEBG Flush :1139 extent_limit None deps:[JobId(1138), JobId(1137)] res:true f:58 g:1
52534 Sep 22 23:23:02.273 DEBG [2] Read AckReady 1138, : downstairs
52535 Sep 22 23:23:02.273 DEBG up_ds_listen was notified
52536 Sep 22 23:23:02.273 DEBG up_ds_listen process 1138
52537 Sep 22 23:23:02.274 DEBG [A] ack job 1138:139, : downstairs
52538 Sep 22 23:23:02.326 DEBG up_ds_listen checked 1 jobs, back to waiting
52539 Sep 22 23:23:02.333 DEBG Read :1138 deps:[JobId(1137)] res:true
52540 Sep 22 23:23:02.354 INFO [lossy] skipping 1135
52541 Sep 22 23:23:02.354 INFO [lossy] skipping 1135
52542 Sep 22 23:23:02.354 INFO [lossy] skipping 1135
52543 Sep 22 23:23:02.354 INFO [lossy] skipping 1135
52544 Sep 22 23:23:02.354 DEBG Flush :1135 extent_limit None deps:[JobId(1134), JobId(1133)] res:true f:56 g:1
52545 Sep 22 23:23:02.354 INFO [lossy] skipping 1136
52546 Sep 22 23:23:02.354 INFO [lossy] skipping 1137
52547 Sep 22 23:23:02.354 INFO [lossy] skipping 1139
52548 Sep 22 23:23:02.360 DEBG Read :1136 deps:[JobId(1135)] res:true
52549 Sep 22 23:23:02.382 DEBG IO Read 1140 has deps [JobId(1139)]
52550 Sep 22 23:23:02.382 DEBG [rc] retire 1135 clears [JobId(1134), JobId(1135)], : downstairs
52551 Sep 22 23:23:02.394 INFO [lossy] sleeping 1 second
52552 Sep 22 23:23:02.395 DEBG IO Flush 1141 has deps [JobId(1140), JobId(1139)]
52553 Sep 22 23:23:02.397 INFO [lossy] skipping 1139
52554 Sep 22 23:23:02.397 WARN returning error on flush!
52555 Sep 22 23:23:02.397 DEBG Flush :1139 extent_limit None deps:[JobId(1138), JobId(1137)] res:false f:58 g:1
52556 Sep 22 23:23:02.397 INFO [lossy] skipping 1139
52557 Sep 22 23:23:02.397 DEBG Flush :1139 extent_limit None deps:[JobId(1138), JobId(1137)] res:true f:58 g:1
52558 Sep 22 23:23:02.397 INFO [lossy] skipping 1140
52559 Sep 22 23:23:02.397 INFO [lossy] skipping 1141
52560 Sep 22 23:23:02.397 INFO [lossy] skipping 1140
52561 Sep 22 23:23:02.397 INFO [lossy] skipping 1140
52562 Sep 22 23:23:02.403 DEBG Read :1140 deps:[JobId(1139)] res:true
52563 Sep 22 23:23:02.425 INFO [lossy] skipping 1137
52564 Sep 22 23:23:02.425 INFO [lossy] skipping 1139
52565 Sep 22 23:23:02.425 WARN returning error on flush!
52566 Sep 22 23:23:02.425 DEBG Flush :1137 extent_limit None deps:[JobId(1136), JobId(1135)] res:false f:57 g:1
52567 Sep 22 23:23:02.425 INFO [lossy] skipping 1139
52568 Sep 22 23:23:02.425 INFO [lossy] skipping 1137
52569 Sep 22 23:23:02.425 WARN returning error on flush!
52570 Sep 22 23:23:02.425 DEBG Flush :1137 extent_limit None deps:[JobId(1136), JobId(1135)] res:false f:57 g:1
52571 Sep 22 23:23:02.425 WARN returning error on flush!
52572 Sep 22 23:23:02.425 DEBG Flush :1137 extent_limit None deps:[JobId(1136), JobId(1135)] res:false f:57 g:1
52573 Sep 22 23:23:02.425 DEBG Flush :1137 extent_limit None deps:[JobId(1136), JobId(1135)] res:true f:57 g:1
52574 Sep 22 23:23:02.431 DEBG Read :1138 deps:[JobId(1137)] res:true
52575 Sep 22 23:23:02.879 ERRO [0] job id 1139 saw error GenericError("test error")
52576 Sep 22 23:23:03.209 ERRO [1] job id 1137 saw error GenericError("test error")
52577 Sep 22 23:23:03.209 ERRO [1] job id 1137 saw error GenericError("test error")
52578 Sep 22 23:23:03.209 ERRO [1] job id 1137 saw error GenericError("test error")
52579 Sep 22 23:23:03.210 DEBG [rc] retire 1137 clears [JobId(1136), JobId(1137)], : downstairs
52580 Sep 22 23:23:03.210 DEBG up_ds_listen was notified
52581 Sep 22 23:23:03.210 DEBG up_ds_listen process 1139
52582 Sep 22 23:23:03.210 DEBG [A] ack job 1139:140, : downstairs
52583 Sep 22 23:23:03.210 DEBG up_ds_listen checked 1 jobs, back to waiting
52584 Sep 22 23:23:03.213 INFO [lossy] sleeping 1 second
52585 Sep 22 23:23:03.214 INFO [lossy] skipping 1139
52586 Sep 22 23:23:03.214 WARN returning error on flush!
52587 Sep 22 23:23:03.214 DEBG Flush :1139 extent_limit None deps:[JobId(1138), JobId(1137)] res:false f:58 g:1
52588 Sep 22 23:23:03.214 DEBG Flush :1139 extent_limit None deps:[JobId(1138), JobId(1137)] res:true f:58 g:1
52589 Sep 22 23:23:03.214 INFO [lossy] skipping 1140
52590 Sep 22 23:23:03.214 INFO [lossy] skipping 1141
52591 Sep 22 23:23:03.214 INFO [lossy] skipping 1140
52592 Sep 22 23:23:03.214 INFO [lossy] skipping 1141
52593 Sep 22 23:23:03.220 DEBG Read :1140 deps:[JobId(1139)] res:true
52594 Sep 22 23:23:03.665 DEBG [0] Read AckReady 1140, : downstairs
52595 Sep 22 23:23:03.995 ERRO [1] job id 1139 saw error GenericError("test error")
52596 Sep 22 23:23:03.996 DEBG [rc] retire 1139 clears [JobId(1138), JobId(1139)], : downstairs
52597 Sep 22 23:23:03.996 DEBG up_ds_listen was notified
52598 Sep 22 23:23:03.996 DEBG up_ds_listen process 1140
52599 Sep 22 23:23:03.996 DEBG [A] ack job 1140:141, : downstairs
52600 Sep 22 23:23:04.049 DEBG up_ds_listen checked 1 jobs, back to waiting
52601 Sep 22 23:23:04.056 DEBG Read :1140 deps:[JobId(1139)] res:true
52602 Sep 22 23:23:04.077 DEBG IO Read 1142 has deps [JobId(1141)]
52603 Sep 22 23:23:04.092 DEBG Flush :1141 extent_limit None deps:[JobId(1140), JobId(1139)] res:true f:59 g:1
52604 Sep 22 23:23:04.097 DEBG Read :1142 deps:[JobId(1141)] res:true
52605 Sep 22 23:23:04.119 INFO [lossy] skipping 1141
52606 Sep 22 23:23:04.119 DEBG Flush :1141 extent_limit None deps:[JobId(1140), JobId(1139)] res:true f:59 g:1
52607 Sep 22 23:23:04.119 INFO [lossy] skipping 1142
52608 Sep 22 23:23:04.124 DEBG Read :1142 deps:[JobId(1141)] res:true
52609 Sep 22 23:23:04.902 DEBG up_ds_listen was notified
52610 Sep 22 23:23:04.902 DEBG up_ds_listen process 1141
52611 Sep 22 23:23:04.902 DEBG [A] ack job 1141:142, : downstairs
52612 Sep 22 23:23:04.902 DEBG up_ds_listen checked 1 jobs, back to waiting
52613 Sep 22 23:23:04.902 DEBG Flush :1141 extent_limit None deps:[JobId(1140), JobId(1139)] res:true f:59 g:1
52614 Sep 22 23:23:04.902 INFO [lossy] skipping 1142
52615 Sep 22 23:23:04.908 DEBG Read :1142 deps:[JobId(1141)] res:true
52616 Sep 22 23:23:04.929 DEBG IO Flush 1143 has deps [JobId(1142), JobId(1141)]
52617 Sep 22 23:23:04.930 DEBG [rc] retire 1141 clears [JobId(1140), JobId(1141)], : downstairs
52618 Sep 22 23:23:04.932 DEBG Flush :1143 extent_limit None deps:[JobId(1142), JobId(1141)] res:true f:60 g:1
52619 Sep 22 23:23:04.932 INFO [lossy] sleeping 1 second
52620 Sep 22 23:23:04.934 DEBG Flush :1143 extent_limit None deps:[JobId(1142), JobId(1141)] res:true f:60 g:1
52621 Sep 22 23:23:05.358 DEBG [1] Read AckReady 1142, : downstairs
52622 Sep 22 23:23:05.688 DEBG [2] Read already AckReady 1142, : downstairs
52623 Sep 22 23:23:05.689 DEBG up_ds_listen was notified
52624 Sep 22 23:23:05.689 DEBG up_ds_listen process 1142
52625 Sep 22 23:23:05.690 DEBG [A] ack job 1142:143, : downstairs
52626 Sep 22 23:23:05.742 DEBG up_ds_listen process 1143
52627 Sep 22 23:23:05.742 DEBG [A] ack job 1143:144, : downstairs
52628 Sep 22 23:23:05.743 DEBG up_ds_listen checked 2 jobs, back to waiting
52629 Sep 22 23:23:05.743 DEBG up_ds_listen was notified
52630 Sep 22 23:23:05.743 DEBG up_ds_listen checked 0 jobs, back to waiting
52631 Sep 22 23:23:05.745 INFO [lossy] sleeping 1 second
52632 Sep 22 23:23:05.792 DEBG IO Read 1144 has deps [JobId(1143)]
52633 Sep 22 23:23:06.135 INFO [lossy] skipping 1144
52634 Sep 22 23:23:06.135 INFO [lossy] skipping 1144
52635 Sep 22 23:23:06.135 WARN returning error on read!
52636 Sep 22 23:23:06.135 DEBG Read :1144 deps:[JobId(1143)] res:false
52637 Sep 22 23:23:06.141 DEBG Read :1144 deps:[JobId(1143)] res:true
52638 Sep 22 23:23:06.162 INFO [lossy] sleeping 1 second
52639 Sep 22 23:23:06.162 ERRO [1] job id 1144 saw error GenericError("test error")
52640 Sep 22 23:23:06.540 DEBG [1] Read AckReady 1144, : downstairs
52641 Sep 22 23:23:06.541 DEBG up_ds_listen was notified
52642 Sep 22 23:23:06.541 DEBG up_ds_listen process 1144
52643 Sep 22 23:23:06.541 DEBG [A] ack job 1144:145, : downstairs
52644 Sep 22 23:23:06.593 DEBG up_ds_listen checked 1 jobs, back to waiting
52645 Sep 22 23:23:06.594 DEBG IO Flush 1145 has deps [JobId(1144), JobId(1143)]
52646 Sep 22 23:23:06.595 DEBG IO Read 1146 has deps [JobId(1145)]
52647 Sep 22 23:23:06.601 INFO [lossy] skipping 1145
52648 Sep 22 23:23:06.601 DEBG Flush :1145 extent_limit None deps:[JobId(1144), JobId(1143)] res:true f:61 g:1
52649 Sep 22 23:23:06.613 DEBG Read :1146 deps:[JobId(1145)] res:true
52650 Sep 22 23:23:07.011 DEBG [1] Read AckReady 1146, : downstairs
52651 Sep 22 23:23:07.012 DEBG up_ds_listen was notified
52652 Sep 22 23:23:07.012 DEBG up_ds_listen process 1146
52653 Sep 22 23:23:07.012 DEBG [A] ack job 1146:147, : downstairs
52654 Sep 22 23:23:07.065 DEBG up_ds_listen checked 1 jobs, back to waiting
52655 Sep 22 23:23:07.066 DEBG Flush :1143 extent_limit None deps:[JobId(1142), JobId(1141)] res:true f:60 g:1
52656 Sep 22 23:23:07.072 DEBG Read :1144 deps:[JobId(1143)] res:true
52657 Sep 22 23:23:07.094 DEBG IO Read 1147 has deps [JobId(1145)]
52658 Sep 22 23:23:07.094 DEBG [rc] retire 1143 clears [JobId(1142), JobId(1143)], : downstairs
52659 Sep 22 23:23:07.106 DEBG IO Flush 1148 has deps [JobId(1147), JobId(1146), JobId(1145)]
52660 Sep 22 23:23:07.106 INFO [lossy] sleeping 1 second
52661 Sep 22 23:23:07.108 WARN returning error on flush!
52662 Sep 22 23:23:07.108 DEBG Flush :1145 extent_limit None deps:[JobId(1144), JobId(1143)] res:false f:61 g:1
52663 Sep 22 23:23:07.108 INFO [lossy] skipping 1145
52664 Sep 22 23:23:07.108 DEBG Flush :1145 extent_limit None deps:[JobId(1144), JobId(1143)] res:true f:61 g:1
52665 Sep 22 23:23:07.108 WARN returning error on read!
52666 Sep 22 23:23:07.108 DEBG Read :1146 deps:[JobId(1145)] res:false
52667 Sep 22 23:23:07.108 WARN returning error on read!
52668 Sep 22 23:23:07.108 DEBG Read :1147 deps:[JobId(1145)] res:false
52669 Sep 22 23:23:07.114 DEBG Read :1146 deps:[JobId(1145)] res:true
52670 Sep 22 23:23:07.513 ERRO [0] job id 1145 saw error GenericError("test error")
52671 Sep 22 23:23:07.513 ERRO [0] job id 1146 saw error GenericError("test error")
52672 Sep 22 23:23:07.513 ERRO [0] job id 1147 saw error GenericError("test error")
52673 Sep 22 23:23:07.513 DEBG up_ds_listen was notified
52674 Sep 22 23:23:07.513 DEBG up_ds_listen process 1145
52675 Sep 22 23:23:07.513 DEBG [A] ack job 1145:146, : downstairs
52676 Sep 22 23:23:07.513 DEBG up_ds_listen checked 1 jobs, back to waiting
52677 Sep 22 23:23:07.513 WARN returning error on read!
52678 Sep 22 23:23:07.513 DEBG Read :1144 deps:[JobId(1143)] res:false
52679 Sep 22 23:23:07.513 INFO [lossy] skipping 1147
52680 Sep 22 23:23:07.519 DEBG Read :1144 deps:[JobId(1143)] res:true
52681 Sep 22 23:23:07.541 ERRO [2] job id 1144 saw error GenericError("test error")
52682 Sep 22 23:23:07.543 WARN returning error on read!
52683 Sep 22 23:23:07.543 DEBG Read :1147 deps:[JobId(1145)] res:false
52684 Sep 22 23:23:07.549 DEBG Read :1147 deps:[JobId(1145)] res:true
52685 Sep 22 23:23:07.949 ERRO [0] job id 1147 saw error GenericError("test error")
52686 Sep 22 23:23:07.950 INFO [lossy] sleeping 1 second
52687 Sep 22 23:23:08.333 DEBG Read :1147 deps:[JobId(1145)] res:true
52688 Sep 22 23:23:08.357 INFO [lossy] sleeping 1 second
52689 Sep 22 23:23:08.732 DEBG [0] Read AckReady 1147, : downstairs
52690 Sep 22 23:23:08.733 DEBG up_ds_listen was notified
52691 Sep 22 23:23:08.733 DEBG up_ds_listen process 1147
52692 Sep 22 23:23:08.733 DEBG [A] ack job 1147:148, : downstairs
52693 Sep 22 23:23:08.786 DEBG up_ds_listen checked 1 jobs, back to waiting
52694 Sep 22 23:23:08.788 DEBG Flush :1148 extent_limit None deps:[JobId(1147), JobId(1146), JobId(1145)] res:true f:62 g:1
52695 Sep 22 23:23:08.836 DEBG IO Read 1149 has deps [JobId(1148)]
52696 Sep 22 23:23:09.179 DEBG Flush :1145 extent_limit None deps:[JobId(1144), JobId(1143)] res:true f:61 g:1
52697 Sep 22 23:23:09.184 DEBG Read :1146 deps:[JobId(1145)] res:true
52698 Sep 22 23:23:09.206 INFO [lossy] sleeping 1 second
52699 Sep 22 23:23:09.206 DEBG [rc] retire 1145 clears [JobId(1144), JobId(1145)], : downstairs
52700 Sep 22 23:23:09.213 DEBG Read :1147 deps:[JobId(1145)] res:true
52701 Sep 22 23:23:09.613 DEBG IO Flush 1150 has deps [JobId(1149), JobId(1148)]
52702 Sep 22 23:23:09.613 INFO [lossy] skipping 1148
52703 Sep 22 23:23:09.613 DEBG Flush :1148 extent_limit None deps:[JobId(1147), JobId(1146), JobId(1145)] res:true f:62 g:1
52704 Sep 22 23:23:09.613 INFO [lossy] skipping 1149
52705 Sep 22 23:23:09.619 DEBG Read :1149 deps:[JobId(1148)] res:true
52706 Sep 22 23:23:09.641 DEBG up_ds_listen was notified
52707 Sep 22 23:23:09.641 DEBG up_ds_listen process 1148
52708 Sep 22 23:23:09.641 DEBG [A] ack job 1148:149, : downstairs
52709 Sep 22 23:23:09.641 DEBG up_ds_listen checked 1 jobs, back to waiting
52710 Sep 22 23:23:09.643 DEBG Flush :1148 extent_limit None deps:[JobId(1147), JobId(1146), JobId(1145)] res:true f:62 g:1
52711 Sep 22 23:23:09.643 WARN returning error on read!
52712 Sep 22 23:23:09.643 DEBG Read :1149 deps:[JobId(1148)] res:false
52713 Sep 22 23:23:09.649 DEBG Read :1149 deps:[JobId(1148)] res:true
52714 Sep 22 23:23:10.049 DEBG [rc] retire 1148 clears [JobId(1146), JobId(1147), JobId(1148)], : downstairs
52715 Sep 22 23:23:10.049 ERRO [2] job id 1149 saw error GenericError("test error")
52716 Sep 22 23:23:10.051 INFO [lossy] skipping 1150
52717 Sep 22 23:23:10.051 DEBG Flush :1150 extent_limit None deps:[JobId(1149), JobId(1148)] res:true f:63 g:1
52718 Sep 22 23:23:10.051 INFO [lossy] sleeping 1 second
52719 Sep 22 23:23:10.428 DEBG [0] Read AckReady 1149, : downstairs
52720 Sep 22 23:23:10.428 DEBG up_ds_listen was notified
52721 Sep 22 23:23:10.428 DEBG up_ds_listen process 1149
52722 Sep 22 23:23:10.428 DEBG [A] ack job 1149:150, : downstairs
52723 Sep 22 23:23:10.481 DEBG up_ds_listen checked 1 jobs, back to waiting
52724 Sep 22 23:23:10.483 INFO [lossy] skipping 1149
52725 Sep 22 23:23:10.489 DEBG Read :1149 deps:[JobId(1148)] res:true
52726 Sep 22 23:23:10.510 DEBG IO Read 1151 has deps [JobId(1150)]
52727 Sep 22 23:23:10.524 DEBG Flush :1150 extent_limit None deps:[JobId(1149), JobId(1148)] res:true f:63 g:1
52728 Sep 22 23:23:10.530 DEBG Read :1151 deps:[JobId(1150)] res:true
52729 Sep 22 23:23:10.930 DEBG up_ds_listen was notified
52730 Sep 22 23:23:10.930 DEBG up_ds_listen process 1150
52731 Sep 22 23:23:10.930 DEBG [A] ack job 1150:151, : downstairs
52732 Sep 22 23:23:10.930 DEBG up_ds_listen checked 1 jobs, back to waiting
52733 Sep 22 23:23:10.932 DEBG Flush :1150 extent_limit None deps:[JobId(1149), JobId(1148)] res:true f:63 g:1
52734 Sep 22 23:23:10.937 DEBG Read :1151 deps:[JobId(1150)] res:true
52735 Sep 22 23:23:11.338 DEBG [rc] retire 1150 clears [JobId(1149), JobId(1150)], : downstairs
52736 Sep 22 23:23:11.338 DEBG IO Flush 1152 has deps [JobId(1151)]
52737 Sep 22 23:23:11.338 WARN returning error on read!
52738 Sep 22 23:23:11.338 DEBG Read :1151 deps:[JobId(1150)] res:false
52739 Sep 22 23:23:11.338 WARN returning error on read!
52740 Sep 22 23:23:11.338 DEBG Read :1151 deps:[JobId(1150)] res:false
52741 Sep 22 23:23:11.338 INFO [lossy] skipping 1151
52742 Sep 22 23:23:11.338 WARN returning error on read!
52743 Sep 22 23:23:11.338 DEBG Read :1151 deps:[JobId(1150)] res:false
52744 Sep 22 23:23:11.339 WARN returning error on read!
52745 Sep 22 23:23:11.339 DEBG Read :1151 deps:[JobId(1150)] res:false
52746 Sep 22 23:23:11.339 INFO [lossy] skipping 1151
52747 Sep 22 23:23:11.339 WARN returning error on read!
52748 Sep 22 23:23:11.339 DEBG Read :1151 deps:[JobId(1150)] res:false
52749 Sep 22 23:23:11.339 INFO [lossy] skipping 1151
52750 Sep 22 23:23:11.344 DEBG Read :1151 deps:[JobId(1150)] res:true
52751 Sep 22 23:23:11.366 ERRO [0] job id 1151 saw error GenericError("test error")
52752 Sep 22 23:23:11.366 ERRO [0] job id 1151 saw error GenericError("test error")
52753 Sep 22 23:23:11.366 ERRO [0] job id 1151 saw error GenericError("test error")
52754 Sep 22 23:23:11.366 ERRO [0] job id 1151 saw error GenericError("test error")
52755 Sep 22 23:23:11.366 ERRO [0] job id 1151 saw error GenericError("test error")
52756 Sep 22 23:23:11.369 INFO [lossy] sleeping 1 second
52757 Sep 22 23:23:11.746 DEBG [2] Read AckReady 1151, : downstairs
52758 Sep 22 23:23:11.747 DEBG up_ds_listen was notified
52759 Sep 22 23:23:11.747 DEBG up_ds_listen process 1151
52760 Sep 22 23:23:11.747 DEBG [A] ack job 1151:152, : downstairs
52761 Sep 22 23:23:11.799 DEBG up_ds_listen checked 1 jobs, back to waiting
52762 Sep 22 23:23:11.802 INFO [lossy] sleeping 1 second
52763 Sep 22 23:23:11.849 DEBG IO Read 1153 has deps [JobId(1152)]
52764 Sep 22 23:23:12.187 INFO [lossy] sleeping 1 second
52765 Sep 22 23:23:12.194 DEBG IO Flush 1154 has deps [JobId(1153), JobId(1152)]
52766 Sep 22 23:23:12.571 INFO [lossy] skipping 1152
52767 Sep 22 23:23:12.571 INFO [lossy] skipping 1154
52768 Sep 22 23:23:12.571 INFO [lossy] skipping 1152
52769 Sep 22 23:23:12.571 INFO [lossy] skipping 1154
52770 Sep 22 23:23:12.571 DEBG Flush :1152 extent_limit None deps:[JobId(1151)] res:true f:64 g:1
52771 Sep 22 23:23:12.571 INFO [lossy] skipping 1153
52772 Sep 22 23:23:12.572 WARN returning error on read!
52773 Sep 22 23:23:12.572 DEBG Read :1153 deps:[JobId(1152)] res:false
52774 Sep 22 23:23:12.572 INFO [lossy] skipping 1153
52775 Sep 22 23:23:12.572 INFO [lossy] skipping 1153
52776 Sep 22 23:23:12.577 DEBG Read :1153 deps:[JobId(1152)] res:true
52777 Sep 22 23:23:12.599 ERRO [2] job id 1153 saw error GenericError("test error")
52778 Sep 22 23:23:12.600 WARN returning error on flush!
52779 Sep 22 23:23:12.600 DEBG Flush :1154 extent_limit None deps:[JobId(1153), JobId(1152)] res:false f:65 g:1
52780 Sep 22 23:23:12.600 INFO [lossy] skipping 1154
52781 Sep 22 23:23:12.600 DEBG Flush :1154 extent_limit None deps:[JobId(1153), JobId(1152)] res:true f:65 g:1
52782 Sep 22 23:23:12.600 INFO [lossy] sleeping 1 second
52783 Sep 22 23:23:12.976 DEBG [2] Read AckReady 1153, : downstairs
52784 Sep 22 23:23:12.977 ERRO [2] job id 1154 saw error GenericError("test error")
52785 Sep 22 23:23:12.977 DEBG up_ds_listen was notified
52786 Sep 22 23:23:12.977 DEBG up_ds_listen process 1153
52787 Sep 22 23:23:12.977 DEBG [A] ack job 1153:154, : downstairs
52788 Sep 22 23:23:13.030 DEBG up_ds_listen checked 1 jobs, back to waiting
52789 Sep 22 23:23:13.031 DEBG Flush :1152 extent_limit None deps:[JobId(1151)] res:true f:64 g:1
52790 Sep 22 23:23:13.037 DEBG Read :1153 deps:[JobId(1152)] res:true
52791 Sep 22 23:23:13.058 DEBG IO Read 1155 has deps [JobId(1154)]
52792 Sep 22 23:23:13.064 DEBG up_ds_listen was notified
52793 Sep 22 23:23:13.064 DEBG up_ds_listen process 1152
52794 Sep 22 23:23:13.064 DEBG [A] ack job 1152:153, : downstairs
52795 Sep 22 23:23:13.064 DEBG up_ds_listen checked 1 jobs, back to waiting
52796 Sep 22 23:23:13.072 INFO [lossy] skipping 1154
52797 Sep 22 23:23:13.072 DEBG Flush :1154 extent_limit None deps:[JobId(1153), JobId(1152)] res:true f:65 g:1
52798 Sep 22 23:23:13.072 INFO [lossy] sleeping 1 second
52799 Sep 22 23:23:13.448 DEBG up_ds_listen was notified
52800 Sep 22 23:23:13.448 DEBG up_ds_listen process 1154
52801 Sep 22 23:23:13.448 DEBG [A] ack job 1154:155, : downstairs
52802 Sep 22 23:23:13.448 DEBG up_ds_listen checked 1 jobs, back to waiting
52803 Sep 22 23:23:13.449 INFO [lossy] skipping 1152
52804 Sep 22 23:23:13.449 INFO [lossy] skipping 1154
52805 Sep 22 23:23:13.449 DEBG Flush :1152 extent_limit None deps:[JobId(1151)] res:true f:64 g:1
52806 Sep 22 23:23:13.449 INFO [lossy] skipping 1153
52807 Sep 22 23:23:13.449 INFO [lossy] skipping 1155
52808 Sep 22 23:23:13.455 DEBG Read :1153 deps:[JobId(1152)] res:true
52809 Sep 22 23:23:13.476 DEBG [rc] retire 1152 clears [JobId(1151), JobId(1152)], : downstairs
52810 Sep 22 23:23:13.477 INFO [lossy] skipping 1155
52811 Sep 22 23:23:13.477 INFO [lossy] skipping 1155
52812 Sep 22 23:23:13.477 DEBG Flush :1154 extent_limit None deps:[JobId(1153), JobId(1152)] res:true f:65 g:1
52813 Sep 22 23:23:13.483 DEBG Read :1155 deps:[JobId(1154)] res:true
52814 Sep 22 23:23:13.882 DEBG [rc] retire 1154 clears [JobId(1153), JobId(1154)], : downstairs
52815 Sep 22 23:23:13.883 DEBG IO Flush 1156 has deps [JobId(1155)]
52816 Sep 22 23:23:13.883 INFO [lossy] skipping 1155
52817 Sep 22 23:23:13.888 DEBG Read :1155 deps:[JobId(1154)] res:true
52818 Sep 22 23:23:13.912 INFO [lossy] sleeping 1 second
52819 Sep 22 23:23:14.288 DEBG [0] Read AckReady 1155, : downstairs
52820 Sep 22 23:23:14.289 DEBG up_ds_listen was notified
52821 Sep 22 23:23:14.289 DEBG up_ds_listen process 1155
52822 Sep 22 23:23:14.289 DEBG [A] ack job 1155:156, : downstairs
52823 Sep 22 23:23:14.342 DEBG up_ds_listen checked 1 jobs, back to waiting
52824 Sep 22 23:23:14.344 DEBG Flush :1156 extent_limit None deps:[JobId(1155)] res:true f:66 g:1
52825 Sep 22 23:23:14.344 INFO [lossy] skipping 1155
52826 Sep 22 23:23:14.344 WARN returning error on read!
52827 Sep 22 23:23:14.344 DEBG Read :1155 deps:[JobId(1154)] res:false
52828 Sep 22 23:23:14.350 DEBG Read :1155 deps:[JobId(1154)] res:true
52829 Sep 22 23:23:14.371 INFO [lossy] sleeping 1 second
52830 Sep 22 23:23:14.419 DEBG IO Read 1157 has deps [JobId(1156)]
52831 Sep 22 23:23:14.420 ERRO [1] job id 1155 saw error GenericError("test error")
52832 Sep 22 23:23:14.762 DEBG IO Flush 1158 has deps [JobId(1157), JobId(1156)]
52833 Sep 22 23:23:14.765 INFO [lossy] sleeping 1 second
52834 Sep 22 23:23:14.849 DEBG [1] Read already AckReady 1000, : downstairs
52835 Sep 22 23:23:15.143 DEBG Flush :1156 extent_limit None deps:[JobId(1155)] res:true f:66 g:1
52836 Sep 22 23:23:15.143 INFO [lossy] skipping 1157
52837 Sep 22 23:23:15.143 INFO [lossy] skipping 1158
52838 Sep 22 23:23:15.149 DEBG Read :1157 deps:[JobId(1156)] res:true
52839 Sep 22 23:23:15.170 DEBG up_ds_listen was notified
52840 Sep 22 23:23:15.170 DEBG up_ds_listen process 1156
52841 Sep 22 23:23:15.170 DEBG [A] ack job 1156:157, : downstairs
52842 Sep 22 23:23:15.170 DEBG up_ds_listen checked 1 jobs, back to waiting
52843 Sep 22 23:23:15.171 DEBG Flush :1158 extent_limit None deps:[JobId(1157), JobId(1156)] res:true f:67 g:1
52844 Sep 22 23:23:15.172 INFO [lossy] sleeping 1 second
52845 Sep 22 23:23:15.547 DEBG [0] Read AckReady 1157, : downstairs
52846 Sep 22 23:23:15.547 DEBG up_ds_listen was notified
52847 Sep 22 23:23:15.547 DEBG up_ds_listen process 1157
52848 Sep 22 23:23:15.547 DEBG [A] ack job 1157:158, : downstairs
52849 Sep 22 23:23:15.600 DEBG up_ds_listen checked 1 jobs, back to waiting
52850 Sep 22 23:23:15.607 DEBG Read :1157 deps:[JobId(1156)] res:true
52851 Sep 22 23:23:15.628 DEBG IO Read 1159 has deps [JobId(1158)]
52852 Sep 22 23:23:15.642 INFO [lossy] skipping 1158
52853 Sep 22 23:23:15.642 DEBG Flush :1158 extent_limit None deps:[JobId(1157), JobId(1156)] res:true f:67 g:1
52854 Sep 22 23:23:15.647 DEBG Read :1159 deps:[JobId(1158)] res:true
52855 Sep 22 23:23:16.047 DEBG up_ds_listen was notified
52856 Sep 22 23:23:16.047 DEBG up_ds_listen process 1158
52857 Sep 22 23:23:16.047 DEBG [A] ack job 1158:159, : downstairs
52858 Sep 22 23:23:16.047 DEBG up_ds_listen checked 1 jobs, back to waiting
52859 Sep 22 23:23:16.047 DEBG Flush :1156 extent_limit None deps:[JobId(1155)] res:true f:66 g:1
52860 Sep 22 23:23:16.053 DEBG Read :1157 deps:[JobId(1156)] res:true
52861 Sep 22 23:23:16.075 DEBG [rc] retire 1156 clears [JobId(1155), JobId(1156)], : downstairs
52862 Sep 22 23:23:16.453 DEBG [2] Read AckReady 1159, : downstairs
52863 Sep 22 23:23:16.454 DEBG up_ds_listen was notified
52864 Sep 22 23:23:16.454 DEBG up_ds_listen process 1159
52865 Sep 22 23:23:16.454 DEBG [A] ack job 1159:160, : downstairs
52866 Sep 22 23:23:16.507 DEBG up_ds_listen checked 1 jobs, back to waiting
52867 Sep 22 23:23:16.509 DEBG Flush :1158 extent_limit None deps:[JobId(1157), JobId(1156)] res:true f:67 g:1
52868 Sep 22 23:23:16.509 WARN returning error on read!
52869 Sep 22 23:23:16.509 DEBG Read :1159 deps:[JobId(1158)] res:false
52870 Sep 22 23:23:16.509 INFO [lossy] skipping 1159
52871 Sep 22 23:23:16.509 INFO [lossy] skipping 1159
52872 Sep 22 23:23:16.515 DEBG Read :1159 deps:[JobId(1158)] res:true
52873 Sep 22 23:23:16.536 DEBG IO Flush 1160 has deps [JobId(1159), JobId(1158)]
52874 Sep 22 23:23:16.542 DEBG Read :1159 deps:[JobId(1158)] res:true
52875 Sep 22 23:23:16.612 DEBG IO Read 1161 has deps [JobId(1160)]
52876 Sep 22 23:23:16.942 DEBG [rc] retire 1158 clears [JobId(1157), JobId(1158)], : downstairs
52877 Sep 22 23:23:16.942 ERRO [1] job id 1159 saw error GenericError("test error")
52878 Sep 22 23:23:16.948 WARN returning error on flush!
52879 Sep 22 23:23:16.948 DEBG Flush :1160 extent_limit None deps:[JobId(1159), JobId(1158)] res:false f:68 g:1
52880 Sep 22 23:23:16.948 INFO [lossy] skipping 1160
52881 Sep 22 23:23:16.948 INFO [lossy] skipping 1160
52882 Sep 22 23:23:16.948 DEBG Flush :1160 extent_limit None deps:[JobId(1159), JobId(1158)] res:true f:68 g:1
52883 Sep 22 23:23:16.955 ERRO [2] job id 1160 saw error GenericError("test error")
52884 Sep 22 23:23:16.955 INFO [lossy] skipping 1161
52885 Sep 22 23:23:16.961 DEBG Read :1161 deps:[JobId(1160)] res:true
52886 Sep 22 23:23:16.985 INFO [lossy] sleeping 1 second
52887 Sep 22 23:23:16.986 DEBG Flush :1160 extent_limit None deps:[JobId(1159), JobId(1158)] res:true f:68 g:1
52888 Sep 22 23:23:16.992 DEBG Read :1161 deps:[JobId(1160)] res:true
52889 Sep 22 23:23:17.770 DEBG up_ds_listen was notified
52890 Sep 22 23:23:17.770 DEBG up_ds_listen process 1160
52891 Sep 22 23:23:17.770 DEBG [A] ack job 1160:161, : downstairs
52892 Sep 22 23:23:17.770 DEBG up_ds_listen checked 1 jobs, back to waiting
52893 Sep 22 23:23:17.772 DEBG IO Flush 1162 has deps [JobId(1161), JobId(1160)]
52894 Sep 22 23:23:18.148 DEBG [2] Read AckReady 1161, : downstairs
52895 Sep 22 23:23:18.149 DEBG up_ds_listen was notified
52896 Sep 22 23:23:18.149 DEBG up_ds_listen process 1161
52897 Sep 22 23:23:18.149 DEBG [A] ack job 1161:162, : downstairs
52898 Sep 22 23:23:18.201 DEBG up_ds_listen checked 1 jobs, back to waiting
52899 Sep 22 23:23:18.201 DEBG Flush :1162 extent_limit None deps:[JobId(1161), JobId(1160)] res:true f:69 g:1
52900 Sep 22 23:23:19.989 INFO [lossy] skipping 1160
52901 Sep 22 23:23:19.989 DEBG Flush :1160 extent_limit None deps:[JobId(1159), JobId(1158)] res:true f:68 g:1
52902 Sep 22 23:23:19.989 INFO [lossy] sleeping 1 second
52903 Sep 22 23:23:20.318 DEBG IO Write 1163 has deps [JobId(1162), JobId(1160)]
52904 Sep 22 23:23:20.318 DEBG IO Flush 1164 has deps [JobId(1163), JobId(1162)]
52905 Sep 22 23:23:20.319 DEBG [rc] retire 1160 clears [JobId(1159), JobId(1160)], : downstairs
52906 Sep 22 23:23:20.319 DEBG up_ds_listen was notified
52907 Sep 22 23:23:20.319 DEBG up_ds_listen process 1163
52908 Sep 22 23:23:20.319 DEBG [A] ack job 1163:164, : downstairs
52909 Sep 22 23:23:20.319 DEBG up_ds_listen checked 1 jobs, back to waiting
52910 Sep 22 23:23:20.711 DEBG IO Write 1165 has deps [JobId(1164), JobId(1162)]
52911 Sep 22 23:23:20.711 DEBG up_ds_listen was notified
52912 Sep 22 23:23:20.711 DEBG up_ds_listen process 1165
52913 Sep 22 23:23:20.711 DEBG [A] ack job 1165:166, : downstairs
52914 Sep 22 23:23:20.711 DEBG up_ds_listen checked 1 jobs, back to waiting
52915 Sep 22 23:23:21.041 DEBG IO Write 1166 has deps [JobId(1164), JobId(1162)]
52916 Sep 22 23:23:21.041 DEBG up_ds_listen was notified
52917 Sep 22 23:23:21.041 DEBG up_ds_listen process 1166
52918 Sep 22 23:23:21.041 DEBG [A] ack job 1166:167, : downstairs
52919 Sep 22 23:23:21.041 DEBG up_ds_listen checked 1 jobs, back to waiting
52920 Sep 22 23:23:21.042 DEBG IO Flush 1167 has deps [JobId(1166), JobId(1165), JobId(1164)]
52921 Sep 22 23:23:21.042 INFO [lossy] skipping 1161
52922 Sep 22 23:23:21.042 INFO [lossy] skipping 1162
52923 Sep 22 23:23:21.042 INFO [lossy] skipping 1161
52924 Sep 22 23:23:21.042 INFO [lossy] skipping 1162
52925 Sep 22 23:23:21.048 DEBG Read :1161 deps:[JobId(1160)] res:true
52926 Sep 22 23:23:21.399 DEBG IO Write 1168 has deps [JobId(1167), JobId(1164), JobId(1162)]
52927 Sep 22 23:23:21.399 DEBG up_ds_listen was notified
52928 Sep 22 23:23:21.399 DEBG up_ds_listen process 1168
52929 Sep 22 23:23:21.399 DEBG [A] ack job 1168:169, : downstairs
52930 Sep 22 23:23:21.399 DEBG up_ds_listen checked 1 jobs, back to waiting
52931 Sep 22 23:23:21.730 DEBG IO Write 1169 has deps [JobId(1167), JobId(1164), JobId(1162)]
52932 Sep 22 23:23:21.730 DEBG up_ds_listen was notified
52933 Sep 22 23:23:21.730 DEBG up_ds_listen process 1169
52934 Sep 22 23:23:21.730 DEBG [A] ack job 1169:170, : downstairs
52935 Sep 22 23:23:21.730 DEBG up_ds_listen checked 1 jobs, back to waiting
52936 Sep 22 23:23:21.731 DEBG IO Flush 1170 has deps [JobId(1169), JobId(1168), JobId(1167)]
52937 Sep 22 23:23:22.060 DEBG IO Write 1171 has deps [JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
52938 Sep 22 23:23:22.061 DEBG up_ds_listen was notified
52939 Sep 22 23:23:22.061 DEBG up_ds_listen process 1171
52940 Sep 22 23:23:22.061 DEBG [A] ack job 1171:172, : downstairs
52941 Sep 22 23:23:22.061 DEBG up_ds_listen checked 1 jobs, back to waiting
52942 Sep 22 23:23:22.391 DEBG IO Write 1172 has deps [JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
52943 Sep 22 23:23:22.391 DEBG up_ds_listen was notified
52944 Sep 22 23:23:22.391 DEBG up_ds_listen process 1172
52945 Sep 22 23:23:22.391 DEBG [A] ack job 1172:173, : downstairs
52946 Sep 22 23:23:22.391 DEBG up_ds_listen checked 1 jobs, back to waiting
52947 Sep 22 23:23:22.392 DEBG IO Flush 1173 has deps [JobId(1172), JobId(1171), JobId(1170)]
52948 Sep 22 23:23:22.721 DEBG IO Write 1174 has deps [JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
52949 Sep 22 23:23:22.722 DEBG up_ds_listen was notified
52950 Sep 22 23:23:22.722 DEBG up_ds_listen process 1174
52951 Sep 22 23:23:22.722 DEBG [A] ack job 1174:175, : downstairs
52952 Sep 22 23:23:22.722 DEBG up_ds_listen checked 1 jobs, back to waiting
52953 Sep 22 23:23:23.052 DEBG IO Write 1175 has deps [JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
52954 Sep 22 23:23:23.052 DEBG up_ds_listen was notified
52955 Sep 22 23:23:23.052 DEBG up_ds_listen process 1175
52956 Sep 22 23:23:23.052 DEBG [A] ack job 1175:176, : downstairs
52957 Sep 22 23:23:23.052 DEBG up_ds_listen checked 1 jobs, back to waiting
52958 Sep 22 23:23:23.053 DEBG IO Flush 1176 has deps [JobId(1175), JobId(1174), JobId(1173)]
52959 Sep 22 23:23:23.446 DEBG IO Write 1177 has deps [JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
52960 Sep 22 23:23:23.446 DEBG up_ds_listen was notified
52961 Sep 22 23:23:23.446 DEBG up_ds_listen process 1177
52962 Sep 22 23:23:23.446 DEBG [A] ack job 1177:178, : downstairs
52963 Sep 22 23:23:23.446 DEBG up_ds_listen checked 1 jobs, back to waiting
52964 Sep 22 23:23:23.587 INFO [lossy] skipping 1163
52965 Sep 22 23:23:23.588 WARN returning error on write!
52966 Sep 22 23:23:23.589 DEBG Write :1163 deps:[JobId(1162), JobId(1160)] res:false
52967 Sep 22 23:23:23.619 DEBG Write :1163 deps:[JobId(1162), JobId(1160)] res:true
52968 Sep 22 23:23:23.624 DEBG Flush :1164 extent_limit None deps:[JobId(1163), JobId(1162)] res:true f:70 g:1
52969 Sep 22 23:23:23.624 DEBG IO Flush 1178 has deps [JobId(1177), JobId(1176)]
52970 Sep 22 23:23:23.953 DEBG IO Write 1179 has deps [JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
52971 Sep 22 23:23:23.953 DEBG up_ds_listen was notified
52972 Sep 22 23:23:23.953 DEBG up_ds_listen process 1179
52973 Sep 22 23:23:23.953 DEBG [A] ack job 1179:180, : downstairs
52974 Sep 22 23:23:23.953 DEBG up_ds_listen checked 1 jobs, back to waiting
52975 Sep 22 23:23:24.283 DEBG IO Write 1180 has deps [JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
52976 Sep 22 23:23:24.283 DEBG up_ds_listen was notified
52977 Sep 22 23:23:24.283 DEBG up_ds_listen process 1180
52978 Sep 22 23:23:24.283 DEBG [A] ack job 1180:181, : downstairs
52979 Sep 22 23:23:24.283 DEBG up_ds_listen checked 1 jobs, back to waiting
52980 Sep 22 23:23:24.284 DEBG IO Flush 1181 has deps [JobId(1180), JobId(1179), JobId(1178)]
52981 Sep 22 23:23:24.613 DEBG IO Write 1182 has deps [JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
52982 Sep 22 23:23:24.613 DEBG up_ds_listen was notified
52983 Sep 22 23:23:24.613 DEBG up_ds_listen process 1182
52984 Sep 22 23:23:24.613 DEBG [A] ack job 1182:183, : downstairs
52985 Sep 22 23:23:24.613 DEBG up_ds_listen checked 1 jobs, back to waiting
52986 Sep 22 23:23:24.943 DEBG IO Write 1183 has deps [JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
52987 Sep 22 23:23:24.943 DEBG up_ds_listen was notified
52988 Sep 22 23:23:24.943 DEBG up_ds_listen process 1183
52989 Sep 22 23:23:24.943 DEBG [A] ack job 1183:184, : downstairs
52990 Sep 22 23:23:24.943 DEBG up_ds_listen checked 1 jobs, back to waiting
52991 Sep 22 23:23:24.944 DEBG IO Flush 1184 has deps [JobId(1183), JobId(1182), JobId(1181)]
52992 Sep 22 23:23:25.273 DEBG IO Write 1185 has deps [JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
52993 Sep 22 23:23:25.273 DEBG up_ds_listen was notified
52994 Sep 22 23:23:25.273 DEBG up_ds_listen process 1185
52995 Sep 22 23:23:25.273 DEBG [A] ack job 1185:186, : downstairs
52996 Sep 22 23:23:25.274 DEBG up_ds_listen checked 1 jobs, back to waiting
52997 Sep 22 23:23:25.603 DEBG IO Write 1186 has deps [JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
52998 Sep 22 23:23:25.603 DEBG up_ds_listen was notified
52999 Sep 22 23:23:25.604 DEBG up_ds_listen process 1186
53000 Sep 22 23:23:25.604 DEBG [A] ack job 1186:187, : downstairs
53001 Sep 22 23:23:25.604 DEBG up_ds_listen checked 1 jobs, back to waiting
53002 Sep 22 23:23:25.604 DEBG IO Flush 1187 has deps [JobId(1186), JobId(1185), JobId(1184)]
53003 Sep 22 23:23:25.604 INFO [lossy] sleeping 1 second
53004 Sep 22 23:23:25.933 DEBG IO Write 1188 has deps [JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53005 Sep 22 23:23:25.934 DEBG up_ds_listen was notified
53006 Sep 22 23:23:25.934 DEBG up_ds_listen process 1188
53007 Sep 22 23:23:25.934 DEBG [A] ack job 1188:189, : downstairs
53008 Sep 22 23:23:25.934 DEBG up_ds_listen checked 1 jobs, back to waiting
53009 Sep 22 23:23:26.264 DEBG IO Write 1189 has deps [JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53010 Sep 22 23:23:26.264 DEBG up_ds_listen was notified
53011 Sep 22 23:23:26.264 DEBG up_ds_listen process 1189
53012 Sep 22 23:23:26.264 DEBG [A] ack job 1189:190, : downstairs
53013 Sep 22 23:23:26.264 DEBG up_ds_listen checked 1 jobs, back to waiting
53014 Sep 22 23:23:26.265 DEBG IO Flush 1190 has deps [JobId(1189), JobId(1188), JobId(1187)]
53015 Sep 22 23:23:26.657 DEBG IO Write 1191 has deps [JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53016 Sep 22 23:23:26.657 DEBG up_ds_listen was notified
53017 Sep 22 23:23:26.657 DEBG up_ds_listen process 1191
53018 Sep 22 23:23:26.657 DEBG [A] ack job 1191:192, : downstairs
53019 Sep 22 23:23:26.657 DEBG up_ds_listen checked 1 jobs, back to waiting
53020 Sep 22 23:23:26.828 DEBG Write :1165 deps:[JobId(1164), JobId(1162)] res:true
53021 Sep 22 23:23:26.829 DEBG IO Flush 1192 has deps [JobId(1191), JobId(1190)]
53022 Sep 22 23:23:27.158 DEBG IO Write 1193 has deps [JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53023 Sep 22 23:23:27.159 DEBG up_ds_listen was notified
53024 Sep 22 23:23:27.159 DEBG up_ds_listen process 1193
53025 Sep 22 23:23:27.159 DEBG [A] ack job 1193:194, : downstairs
53026 Sep 22 23:23:27.159 DEBG up_ds_listen checked 1 jobs, back to waiting
53027 Sep 22 23:23:27.488 DEBG IO Write 1194 has deps [JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53028 Sep 22 23:23:27.489 DEBG up_ds_listen was notified
53029 Sep 22 23:23:27.489 DEBG up_ds_listen process 1194
53030 Sep 22 23:23:27.489 DEBG [A] ack job 1194:195, : downstairs
53031 Sep 22 23:23:27.489 DEBG up_ds_listen checked 1 jobs, back to waiting
53032 Sep 22 23:23:27.489 DEBG IO Flush 1195 has deps [JobId(1194), JobId(1193), JobId(1192)]
53033 Sep 22 23:23:27.819 DEBG IO Write 1196 has deps [JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53034 Sep 22 23:23:27.819 DEBG up_ds_listen was notified
53035 Sep 22 23:23:27.819 DEBG up_ds_listen process 1196
53036 Sep 22 23:23:27.819 DEBG [A] ack job 1196:197, : downstairs
53037 Sep 22 23:23:27.819 DEBG up_ds_listen checked 1 jobs, back to waiting
53038 Sep 22 23:23:28.150 DEBG IO Write 1197 has deps [JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53039 Sep 22 23:23:28.150 DEBG up_ds_listen was notified
53040 Sep 22 23:23:28.150 DEBG up_ds_listen process 1197
53041 Sep 22 23:23:28.150 DEBG [A] ack job 1197:198, : downstairs
53042 Sep 22 23:23:28.150 DEBG up_ds_listen checked 1 jobs, back to waiting
53043 Sep 22 23:23:28.151 DEBG IO Flush 1198 has deps [JobId(1197), JobId(1196), JobId(1195)]
53044 Sep 22 23:23:28.480 DEBG IO Write 1199 has deps [JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53045 Sep 22 23:23:28.480 DEBG up_ds_listen was notified
53046 Sep 22 23:23:28.480 DEBG up_ds_listen process 1199
53047 Sep 22 23:23:28.481 DEBG [A] ack job 1199:200, : downstairs
53048 Sep 22 23:23:28.481 DEBG up_ds_listen checked 1 jobs, back to waiting
53049 Sep 22 23:23:28.811 DEBG IO Write 1200 has deps [JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53050 Sep 22 23:23:28.811 DEBG up_ds_listen was notified
53051 Sep 22 23:23:28.811 DEBG up_ds_listen process 1200
53052 Sep 22 23:23:28.811 DEBG [A] ack job 1200:201, : downstairs
53053 Sep 22 23:23:28.811 DEBG up_ds_listen checked 1 jobs, back to waiting
53054 Sep 22 23:23:28.812 DEBG IO Flush 1201 has deps [JobId(1200), JobId(1199), JobId(1198)]
53055 Sep 22 23:23:29.141 DEBG IO Write 1202 has deps [JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53056 Sep 22 23:23:29.141 DEBG up_ds_listen was notified
53057 Sep 22 23:23:29.142 DEBG up_ds_listen process 1202
53058 Sep 22 23:23:29.142 DEBG [A] ack job 1202:203, : downstairs
53059 Sep 22 23:23:29.142 DEBG up_ds_listen checked 1 jobs, back to waiting
53060 Sep 22 23:23:29.472 DEBG IO Write 1203 has deps [JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53061 Sep 22 23:23:29.472 DEBG up_ds_listen was notified
53062 Sep 22 23:23:29.472 DEBG up_ds_listen process 1203
53063 Sep 22 23:23:29.472 DEBG [A] ack job 1203:204, : downstairs
53064 Sep 22 23:23:29.472 DEBG up_ds_listen checked 1 jobs, back to waiting
53065 Sep 22 23:23:29.473 DEBG IO Flush 1204 has deps [JobId(1203), JobId(1202), JobId(1201)]
53066 Sep 22 23:23:29.865 DEBG IO Write 1205 has deps [JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53067 Sep 22 23:23:29.865 DEBG up_ds_listen was notified
53068 Sep 22 23:23:29.865 DEBG up_ds_listen process 1205
53069 Sep 22 23:23:29.865 DEBG [A] ack job 1205:206, : downstairs
53070 Sep 22 23:23:29.865 DEBG up_ds_listen checked 1 jobs, back to waiting
53071 Sep 22 23:23:30.005 INFO [lossy] skipping 1166
53072 Sep 22 23:23:30.006 INFO [lossy] skipping 1167
53073 Sep 22 23:23:30.037 DEBG Write :1166 deps:[JobId(1164), JobId(1162)] res:true
53074 Sep 22 23:23:30.038 INFO [lossy] skipping 1167
53075 Sep 22 23:23:30.046 DEBG Flush :1167 extent_limit None deps:[JobId(1166), JobId(1165), JobId(1164)] res:true f:71 g:1
53076 Sep 22 23:23:30.047 DEBG IO Flush 1206 has deps [JobId(1205), JobId(1204)]
53077 Sep 22 23:23:30.376 DEBG IO Write 1207 has deps [JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53078 Sep 22 23:23:30.376 DEBG up_ds_listen was notified
53079 Sep 22 23:23:30.376 DEBG up_ds_listen process 1207
53080 Sep 22 23:23:30.376 DEBG [A] ack job 1207:208, : downstairs
53081 Sep 22 23:23:30.376 DEBG up_ds_listen checked 1 jobs, back to waiting
53082 Sep 22 23:23:30.406 DEBG [2] Read already AckReady 1000, : downstairs
53083 Sep 22 23:23:30.520 DEBG up_ds_listen was notified
53084 Sep 22 23:23:30.520 DEBG up_ds_listen process 1000
53085 Sep 22 23:23:30.523 DEBG [A] ack job 1000:1, : downstairs
53086 Sep 22 23:23:30.706 DEBG IO Write 1208 has deps [JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53087 Sep 22 23:23:30.706 DEBG up_ds_listen was notified
53088 Sep 22 23:23:30.706 DEBG up_ds_listen process 1208
53089 Sep 22 23:23:30.706 DEBG [A] ack job 1208:209, : downstairs
53090 Sep 22 23:23:30.707 DEBG up_ds_listen checked 1 jobs, back to waiting
53091 Sep 22 23:23:30.707 DEBG IO Flush 1209 has deps [JobId(1208), JobId(1207), JobId(1206)]
53092 Sep 22 23:23:31.037 DEBG IO Write 1210 has deps [JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53093 Sep 22 23:23:31.037 DEBG up_ds_listen was notified
53094 Sep 22 23:23:31.037 DEBG up_ds_listen process 1210
53095 Sep 22 23:23:31.037 DEBG [A] ack job 1210:211, : downstairs
53096 Sep 22 23:23:31.037 DEBG up_ds_listen checked 1 jobs, back to waiting
53097 Sep 22 23:23:31.367 DEBG IO Write 1211 has deps [JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53098 Sep 22 23:23:31.367 DEBG up_ds_listen was notified
53099 Sep 22 23:23:31.367 DEBG up_ds_listen process 1211
53100 Sep 22 23:23:31.367 DEBG [A] ack job 1211:212, : downstairs
53101 Sep 22 23:23:31.367 DEBG up_ds_listen checked 1 jobs, back to waiting
53102 Sep 22 23:23:31.368 DEBG IO Flush 1212 has deps [JobId(1211), JobId(1210), JobId(1209)]
53103 Sep 22 23:23:31.697 DEBG IO Write 1213 has deps [JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53104 Sep 22 23:23:31.697 DEBG up_ds_listen was notified
53105 Sep 22 23:23:31.697 DEBG up_ds_listen process 1213
53106 Sep 22 23:23:31.697 DEBG [A] ack job 1213:214, : downstairs
53107 Sep 22 23:23:31.697 DEBG up_ds_listen checked 1 jobs, back to waiting
53108 Sep 22 23:23:32.027 DEBG IO Write 1214 has deps [JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53109 Sep 22 23:23:32.027 DEBG up_ds_listen was notified
53110 Sep 22 23:23:32.027 DEBG up_ds_listen process 1214
53111 Sep 22 23:23:32.027 DEBG [A] ack job 1214:215, : downstairs
53112 Sep 22 23:23:32.027 DEBG up_ds_listen checked 1 jobs, back to waiting
53113 Sep 22 23:23:32.028 DEBG IO Flush 1215 has deps [JobId(1214), JobId(1213), JobId(1212)]
53114 Sep 22 23:23:32.357 DEBG IO Write 1216 has deps [JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53115 Sep 22 23:23:32.357 DEBG up_ds_listen was notified
53116 Sep 22 23:23:32.357 DEBG up_ds_listen process 1216
53117 Sep 22 23:23:32.358 DEBG [A] ack job 1216:217, : downstairs
53118 Sep 22 23:23:32.358 DEBG up_ds_listen checked 1 jobs, back to waiting
53119 Sep 22 23:23:32.687 DEBG IO Write 1217 has deps [JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53120 Sep 22 23:23:32.687 DEBG up_ds_listen was notified
53121 Sep 22 23:23:32.687 DEBG up_ds_listen process 1217
53122 Sep 22 23:23:32.687 DEBG [A] ack job 1217:218, : downstairs
53123 Sep 22 23:23:32.687 DEBG up_ds_listen checked 1 jobs, back to waiting
53124 Sep 22 23:23:32.688 DEBG IO Flush 1218 has deps [JobId(1217), JobId(1216), JobId(1215)]
53125 Sep 22 23:23:33.016 DEBG up_ds_listen process 1001
53126 Sep 22 23:23:33.016 DEBG [A] ack job 1001:2, : downstairs
53127 Sep 22 23:23:33.016 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
53128 Sep 22 23:23:33.017 DEBG up_ds_listen checked 2 jobs, back to waiting
53129 Sep 22 23:23:33.017 DEBG up_ds_listen was notified
53130 Sep 22 23:23:33.017 DEBG up_ds_listen checked 0 jobs, back to waiting
53131 Sep 22 23:23:33.017 DEBG IO Write 1219 has deps [JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53132 Sep 22 23:23:33.017 DEBG up_ds_listen was notified
53133 Sep 22 23:23:33.017 DEBG up_ds_listen process 1219
53134 Sep 22 23:23:33.017 DEBG [A] ack job 1219:220, : downstairs
53135 Sep 22 23:23:33.017 DEBG up_ds_listen checked 1 jobs, back to waiting
53136 test test::integration_test_volume_replace_downstairs_then_takeover ... ok
53137 Sep 22 23:23:33.415 DEBG IO Write 1220 has deps [JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53138 Sep 22 23:23:33.416 DEBG up_ds_listen was notified
53139 Sep 22 23:23:33.416 DEBG up_ds_listen process 1220
53140 Sep 22 23:23:33.416 DEBG [A] ack job 1220:221, : downstairs
53141 Sep 22 23:23:33.416 DEBG up_ds_listen checked 1 jobs, back to waiting
53142 Sep 22 23:23:33.558 DEBG IO Flush 1221 has deps [JobId(1220), JobId(1219), JobId(1218)]
53143 Sep 22 23:23:33.558 INFO [lossy] skipping 1168
53144 Sep 22 23:23:33.560 WARN returning error on write!
53145 Sep 22 23:23:33.560 DEBG Write :1168 deps:[JobId(1167), JobId(1164), JobId(1162)] res:false
53146 Sep 22 23:23:33.561 INFO [lossy] skipping 1168
53147 Sep 22 23:23:33.591 DEBG Write :1168 deps:[JobId(1167), JobId(1164), JobId(1162)] res:true
53148 Sep 22 23:23:33.921 DEBG IO Write 1222 has deps [JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53149 Sep 22 23:23:33.921 DEBG up_ds_listen was notified
53150 Sep 22 23:23:33.921 DEBG up_ds_listen process 1222
53151 Sep 22 23:23:33.921 DEBG [A] ack job 1222:223, : downstairs
53152 Sep 22 23:23:33.921 DEBG up_ds_listen checked 1 jobs, back to waiting
53153 Sep 22 23:23:34.251 DEBG IO Write 1223 has deps [JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53154 Sep 22 23:23:34.251 DEBG up_ds_listen was notified
53155 Sep 22 23:23:34.251 DEBG up_ds_listen process 1223
53156 Sep 22 23:23:34.251 DEBG [A] ack job 1223:224, : downstairs
53157 Sep 22 23:23:34.251 DEBG up_ds_listen checked 1 jobs, back to waiting
53158 Sep 22 23:23:34.252 DEBG IO Flush 1224 has deps [JobId(1223), JobId(1222), JobId(1221)]
53159 Sep 22 23:23:34.581 DEBG IO Write 1225 has deps [JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53160 Sep 22 23:23:34.582 DEBG up_ds_listen was notified
53161 Sep 22 23:23:34.582 DEBG up_ds_listen process 1225
53162 Sep 22 23:23:34.582 DEBG [A] ack job 1225:226, : downstairs
53163 Sep 22 23:23:34.582 DEBG up_ds_listen checked 1 jobs, back to waiting
53164 Sep 22 23:23:34.912 DEBG IO Write 1226 has deps [JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53165 Sep 22 23:23:34.912 DEBG up_ds_listen was notified
53166 Sep 22 23:23:34.912 DEBG up_ds_listen process 1226
53167 Sep 22 23:23:34.912 DEBG [A] ack job 1226:227, : downstairs
53168 Sep 22 23:23:34.912 DEBG up_ds_listen checked 1 jobs, back to waiting
53169 Sep 22 23:23:34.913 DEBG IO Flush 1227 has deps [JobId(1226), JobId(1225), JobId(1224)]
53170 Sep 22 23:23:35.242 DEBG IO Write 1228 has deps [JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53171 Sep 22 23:23:35.242 DEBG up_ds_listen was notified
53172 Sep 22 23:23:35.242 DEBG up_ds_listen process 1228
53173 Sep 22 23:23:35.242 DEBG [A] ack job 1228:229, : downstairs
53174 Sep 22 23:23:35.242 DEBG up_ds_listen checked 1 jobs, back to waiting
53175 Sep 22 23:23:35.572 DEBG IO Write 1229 has deps [JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53176 Sep 22 23:23:35.572 DEBG up_ds_listen was notified
53177 Sep 22 23:23:35.572 DEBG up_ds_listen process 1229
53178 Sep 22 23:23:35.572 DEBG [A] ack job 1229:230, : downstairs
53179 Sep 22 23:23:35.572 DEBG up_ds_listen checked 1 jobs, back to waiting
53180 Sep 22 23:23:35.573 DEBG IO Flush 1230 has deps [JobId(1229), JobId(1228), JobId(1227)]
53181 Sep 22 23:23:35.902 DEBG IO Write 1231 has deps [JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53182 Sep 22 23:23:35.902 DEBG up_ds_listen was notified
53183 Sep 22 23:23:35.902 DEBG up_ds_listen process 1231
53184 Sep 22 23:23:35.902 DEBG [A] ack job 1231:232, : downstairs
53185 Sep 22 23:23:35.902 DEBG up_ds_listen checked 1 jobs, back to waiting
53186 Sep 22 23:23:35.903 INFO [lossy] sleeping 1 second
53187 Sep 22 23:23:36.232 DEBG IO Write 1232 has deps [JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
53188 Sep 22 23:23:36.233 DEBG up_ds_listen was notified
53189 Sep 22 23:23:36.233 DEBG up_ds_listen process 1232
53190 Sep 22 23:23:36.233 DEBG [A] ack job 1232:233, : downstairs
53191 Sep 22 23:23:36.233 DEBG up_ds_listen checked 1 jobs, back to waiting
53192 Sep 22 23:23:36.233 DEBG IO Flush 1233 has deps [JobId(1232), JobId(1231), JobId(1230)]
53193 Sep 22 23:23:36.625 DEBG IO Write 1234 has deps [JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162), JobId(1161)]
53194 Sep 22 23:23:36.626 DEBG up_ds_listen was notified
53195 Sep 22 23:23:36.626 DEBG up_ds_listen process 1234
53196 Sep 22 23:23:36.626 DEBG [A] ack job 1234:235, : downstairs
53197 Sep 22 23:23:36.626 DEBG up_ds_listen checked 1 jobs, back to waiting
53198 Sep 22 23:23:36.766 DEBG IO Read 1235 has deps [JobId(1233)]
53199 Sep 22 23:23:36.766 DEBG IO Flush 1236 has deps [JobId(1235), JobId(1234), JobId(1233)]
53200 Sep 22 23:23:37.002 DEBG Write :1169 deps:[JobId(1167), JobId(1164), JobId(1162)] res:true
53201 Sep 22 23:23:37.011 DEBG Flush :1170 extent_limit None deps:[JobId(1169), JobId(1168), JobId(1167)] res:true f:72 g:1
53202 Sep 22 23:23:37.011 INFO [lossy] skipping 1171
53203 Sep 22 23:23:37.011 INFO [lossy] skipping 1171
53204 Sep 22 23:23:37.041 DEBG Write :1171 deps:[JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53205 Sep 22 23:23:37.247 INFO [lossy] skipping 1172
53206 Sep 22 23:23:37.278 DEBG Write :1172 deps:[JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53207 Sep 22 23:23:37.279 INFO [lossy] skipping 1173
53208 Sep 22 23:23:37.287 DEBG Flush :1173 extent_limit None deps:[JobId(1172), JobId(1171), JobId(1170)] res:true f:73 g:1
53209 Sep 22 23:23:37.491 INFO [lossy] sleeping 1 second
53210 Sep 22 23:23:37.556 ERRO [2] job id 1163 saw error GenericError("test error")
53211 Sep 22 23:23:37.556 ERRO [2] job id 1168 saw error GenericError("test error")
53212 Sep 22 23:23:38.549 DEBG Write :1174 deps:[JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53213 Sep 22 23:23:38.580 DEBG Write :1175 deps:[JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53214 Sep 22 23:23:38.581 WARN returning error on flush!
53215 Sep 22 23:23:38.581 DEBG Flush :1176 extent_limit None deps:[JobId(1175), JobId(1174), JobId(1173)] res:false f:74 g:1
53216 Sep 22 23:23:38.581 INFO [lossy] skipping 1179
53217 Sep 22 23:23:38.581 INFO [lossy] skipping 1180
53218 Sep 22 23:23:38.581 INFO [lossy] skipping 1181
53219 Sep 22 23:23:38.581 INFO [lossy] skipping 1183
53220 Sep 22 23:23:38.581 INFO [lossy] skipping 1187
53221 Sep 22 23:23:38.581 INFO [lossy] skipping 1191
53222 Sep 22 23:23:38.581 WARN returning error on flush!
53223 Sep 22 23:23:38.581 DEBG Flush :1176 extent_limit None deps:[JobId(1175), JobId(1174), JobId(1173)] res:false f:74 g:1
53224 Sep 22 23:23:38.581 INFO [lossy] skipping 1179
53225 Sep 22 23:23:38.581 INFO [lossy] skipping 1181
53226 Sep 22 23:23:38.589 DEBG Flush :1176 extent_limit None deps:[JobId(1175), JobId(1174), JobId(1173)] res:true f:74 g:1
53227 Sep 22 23:23:38.590 WARN returning error on write!
53228 Sep 22 23:23:38.590 DEBG Write :1177 deps:[JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
53229 Sep 22 23:23:38.590 WARN 1178 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53230 Sep 22 23:23:38.590 INFO [lossy] skipping 1180
53231 Sep 22 23:23:38.590 WARN 1182 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53232 Sep 22 23:23:38.590 INFO [lossy] skipping 1183
53233 Sep 22 23:23:38.590 INFO [lossy] skipping 1184
53234 Sep 22 23:23:38.590 INFO [lossy] skipping 1185
53235 Sep 22 23:23:38.590 WARN 1186 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53236 Sep 22 23:23:38.590 WARN 1188 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53237 Sep 22 23:23:38.590 INFO [lossy] skipping 1189
53238 Sep 22 23:23:38.590 INFO [lossy] skipping 1190
53239 Sep 22 23:23:38.590 WARN 1191 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53240 Sep 22 23:23:38.590 INFO [lossy] skipping 1192
53241 Sep 22 23:23:38.590 WARN 1193 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53242 Sep 22 23:23:38.590 WARN 1194 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53243 Sep 22 23:23:38.591 WARN returning error on write!
53244 Sep 22 23:23:38.591 DEBG Write :1177 deps:[JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
53245 Sep 22 23:23:38.591 WARN 1180 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53246 Sep 22 23:23:38.591 WARN 1183 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53247 Sep 22 23:23:38.591 WARN 1185 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53248 Sep 22 23:23:38.591 WARN 1189 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53249 Sep 22 23:23:38.591 INFO [lossy] skipping 1190
53250 Sep 22 23:23:38.591 INFO [lossy] skipping 1192
53251 Sep 22 23:23:38.621 DEBG Write :1177 deps:[JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53252 Sep 22 23:23:38.626 DEBG Flush :1178 extent_limit None deps:[JobId(1177), JobId(1176)] res:true f:75 g:1
53253 Sep 22 23:23:38.656 DEBG Write :1179 deps:[JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53254 Sep 22 23:23:38.658 WARN returning error on write!
53255 Sep 22 23:23:38.658 DEBG Write :1180 deps:[JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
53256 Sep 22 23:23:38.658 WARN 1181 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53257 Sep 22 23:23:38.658 WARN 1182 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53258 Sep 22 23:23:38.658 INFO [lossy] skipping 1183
53259 Sep 22 23:23:38.658 INFO [lossy] skipping 1184
53260 Sep 22 23:23:38.658 WARN 1185 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53261 Sep 22 23:23:38.658 WARN 1186 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53262 Sep 22 23:23:38.658 INFO [lossy] skipping 1188
53263 Sep 22 23:23:38.658 WARN 1189 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53264 Sep 22 23:23:38.658 INFO [lossy] skipping 1191
53265 Sep 22 23:23:38.658 WARN 1193 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53266 Sep 22 23:23:38.658 WARN 1194 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53267 Sep 22 23:23:38.688 DEBG Write :1180 deps:[JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53268 Sep 22 23:23:38.689 WARN 1183 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53269 Sep 22 23:23:38.689 WARN 1188 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53270 Sep 22 23:23:38.690 WARN 1191 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53271 Sep 22 23:23:38.697 DEBG Flush :1181 extent_limit None deps:[JobId(1180), JobId(1179), JobId(1178)] res:true f:76 g:1
53272 Sep 22 23:23:38.728 DEBG Write :1182 deps:[JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53273 Sep 22 23:23:38.759 DEBG Write :1183 deps:[JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53274 Sep 22 23:23:38.768 DEBG Flush :1184 extent_limit None deps:[JobId(1183), JobId(1182), JobId(1181)] res:true f:77 g:1
53275 Sep 22 23:23:38.798 DEBG Write :1185 deps:[JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53276 Sep 22 23:23:38.830 DEBG Write :1186 deps:[JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53277 Sep 22 23:23:38.839 DEBG Flush :1187 extent_limit None deps:[JobId(1186), JobId(1185), JobId(1184)] res:true f:78 g:1
53278 Sep 22 23:23:38.839 INFO [lossy] skipping 1188
53279 Sep 22 23:23:38.869 DEBG Write :1189 deps:[JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53280 Sep 22 23:23:38.870 WARN 1190 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53281 Sep 22 23:23:38.870 WARN 1191 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53282 Sep 22 23:23:38.870 INFO [lossy] skipping 1192
53283 Sep 22 23:23:38.870 WARN 1193 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53284 Sep 22 23:23:38.870 INFO [lossy] skipping 1194
53285 Sep 22 23:23:38.871 WARN returning error on write!
53286 Sep 22 23:23:38.871 DEBG Write :1188 deps:[JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
53287 Sep 22 23:23:38.872 WARN 1194 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53288 Sep 22 23:23:38.902 DEBG Write :1188 deps:[JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53289 Sep 22 23:23:38.903 INFO [lossy] sleeping 1 second
53290 Sep 22 23:23:39.933 INFO [lossy] skipping 1190
53291 Sep 22 23:23:39.933 INFO [lossy] skipping 1197
53292 Sep 22 23:23:39.933 INFO [lossy] skipping 1204
53293 Sep 22 23:23:39.934 INFO [lossy] skipping 1213
53294 Sep 22 23:23:39.934 INFO [lossy] skipping 1214
53295 Sep 22 23:23:39.934 INFO [lossy] skipping 1217
53296 Sep 22 23:23:39.934 INFO [lossy] skipping 1218
53297 Sep 22 23:23:39.942 DEBG Flush :1190 extent_limit None deps:[JobId(1189), JobId(1188), JobId(1187)] res:true f:79 g:1
53298 Sep 22 23:23:39.942 INFO [lossy] skipping 1214
53299 Sep 22 23:23:39.942 INFO [lossy] skipping 1218
53300 Sep 22 23:23:39.973 DEBG Write :1191 deps:[JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53301 Sep 22 23:23:39.974 INFO [lossy] skipping 1192
53302 Sep 22 23:23:39.974 WARN 1193 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53303 Sep 22 23:23:39.974 WARN 1194 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53304 Sep 22 23:23:39.974 WARN 1196 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53305 Sep 22 23:23:39.974 INFO [lossy] skipping 1199
53306 Sep 22 23:23:39.974 WARN 1200 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53307 Sep 22 23:23:39.974 INFO [lossy] skipping 1202
53308 Sep 22 23:23:39.974 INFO [lossy] skipping 1203
53309 Sep 22 23:23:39.974 INFO [lossy] skipping 1204
53310 Sep 22 23:23:39.974 INFO [lossy] skipping 1205
53311 Sep 22 23:23:39.974 WARN 1207 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53312 Sep 22 23:23:39.974 INFO [lossy] skipping 1208
53313 Sep 22 23:23:39.974 WARN 1210 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53314 Sep 22 23:23:39.974 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53315 Sep 22 23:23:39.974 WARN 1216 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 9 deps, role: work
53316 Sep 22 23:23:39.974 INFO [lossy] skipping 1218
53317 Sep 22 23:23:39.974 WARN returning error on flush!
53318 Sep 22 23:23:39.974 DEBG Flush :1192 extent_limit None deps:[JobId(1191), JobId(1190)] res:false f:80 g:1
53319 Sep 22 23:23:39.974 WARN 1199 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53320 Sep 22 23:23:39.974 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53321 Sep 22 23:23:39.974 WARN 1203 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53322 Sep 22 23:23:39.974 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53323 Sep 22 23:23:39.974 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53324 Sep 22 23:23:39.978 DEBG Flush :1192 extent_limit None deps:[JobId(1191), JobId(1190)] res:true f:80 g:1
53325 Sep 22 23:23:39.978 INFO [lossy] sleeping 1 second
53326 Sep 22 23:23:40.718 ERRO [2] job id 1176 saw error GenericError("test error")
53327 Sep 22 23:23:40.718 ERRO [2] job id 1176 saw error GenericError("test error")
53328 Sep 22 23:23:40.718 ERRO [2] job id 1177 saw error GenericError("test error")
53329 Sep 22 23:23:40.718 ERRO [2] job id 1177 saw error GenericError("test error")
53330 Sep 22 23:23:40.718 ERRO [2] job id 1180 saw error GenericError("test error")
53331 Sep 22 23:23:40.718 ERRO [2] job id 1188 saw error GenericError("test error")
53332 Sep 22 23:23:40.718 ERRO [2] job id 1192 saw error GenericError("test error")
53333 Sep 22 23:23:41.010 DEBG Write :1193 deps:[JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53334 Sep 22 23:23:41.011 INFO [lossy] skipping 1194
53335 Sep 22 23:23:41.011 WARN 1195 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53336 Sep 22 23:23:41.011 WARN 1196 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53337 Sep 22 23:23:41.011 WARN 1197 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53338 Sep 22 23:23:41.011 INFO [lossy] skipping 1198
53339 Sep 22 23:23:41.011 WARN 1199 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53340 Sep 22 23:23:41.011 INFO [lossy] skipping 1200
53341 Sep 22 23:23:41.011 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53342 Sep 22 23:23:41.011 WARN 1203 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53343 Sep 22 23:23:41.011 INFO [lossy] skipping 1204
53344 Sep 22 23:23:41.012 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53345 Sep 22 23:23:41.012 WARN 1207 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53346 Sep 22 23:23:41.012 INFO [lossy] skipping 1208
53347 Sep 22 23:23:41.012 INFO [lossy] skipping 1209
53348 Sep 22 23:23:41.012 INFO [lossy] skipping 1210
53349 Sep 22 23:23:41.012 INFO [lossy] skipping 1211
53350 Sep 22 23:23:41.012 INFO [lossy] skipping 1212
53351 Sep 22 23:23:41.012 WARN 1213 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53352 Sep 22 23:23:41.012 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53353 Sep 22 23:23:41.012 INFO [lossy] skipping 1215
53354 Sep 22 23:23:41.012 WARN 1216 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 8 deps, role: work
53355 Sep 22 23:23:41.012 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 8 deps, role: work
53356 Sep 22 23:23:41.012 INFO [lossy] skipping 1220
53357 Sep 22 23:23:41.012 INFO [lossy] skipping 1226
53358 Sep 22 23:23:41.012 INFO [lossy] skipping 1232
53359 Sep 22 23:23:41.012 INFO [lossy] skipping 1236
53360 Sep 22 23:23:41.042 DEBG Write :1194 deps:[JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53361 Sep 22 23:23:41.043 INFO [lossy] skipping 1200
53362 Sep 22 23:23:41.043 INFO [lossy] skipping 1208
53363 Sep 22 23:23:41.043 INFO [lossy] skipping 1209
53364 Sep 22 23:23:41.043 WARN 1210 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53365 Sep 22 23:23:41.043 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53366 Sep 22 23:23:41.043 INFO [lossy] skipping 1220
53367 Sep 22 23:23:41.043 INFO [lossy] skipping 1226
53368 Sep 22 23:23:41.043 WARN 1200 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53369 Sep 22 23:23:41.043 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53370 Sep 22 23:23:41.051 DEBG Flush :1195 extent_limit None deps:[JobId(1194), JobId(1193), JobId(1192)] res:true f:81 g:1
53371 Sep 22 23:23:41.082 DEBG Write :1196 deps:[JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53372 Sep 22 23:23:41.083 INFO [lossy] skipping 1197
53373 Sep 22 23:23:41.083 INFO [lossy] skipping 1198
53374 Sep 22 23:23:41.083 WARN 1199 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53375 Sep 22 23:23:41.083 INFO [lossy] skipping 1200
53376 Sep 22 23:23:41.083 INFO [lossy] skipping 1202
53377 Sep 22 23:23:41.083 WARN 1203 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53378 Sep 22 23:23:41.083 INFO [lossy] skipping 1204
53379 Sep 22 23:23:41.083 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53380 Sep 22 23:23:41.083 WARN 1207 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53381 Sep 22 23:23:41.083 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53382 Sep 22 23:23:41.083 WARN 1210 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53383 Sep 22 23:23:41.083 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53384 Sep 22 23:23:41.083 INFO [lossy] skipping 1212
53385 Sep 22 23:23:41.083 WARN 1213 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53386 Sep 22 23:23:41.083 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53387 Sep 22 23:23:41.083 WARN 1216 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53388 Sep 22 23:23:41.083 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53389 Sep 22 23:23:41.083 INFO [lossy] skipping 1218
53390 Sep 22 23:23:41.083 WARN 1219 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 8 deps, role: work
53391 Sep 22 23:23:41.083 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 8 deps, role: work
53392 Sep 22 23:23:41.083 WARN 1222 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 9 deps, role: work
53393 Sep 22 23:23:41.083 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 9 deps, role: work
53394 Sep 22 23:23:41.083 INFO [lossy] skipping 1224
53395 Sep 22 23:23:41.083 WARN 1225 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 10 deps, role: work
53396 Sep 22 23:23:41.083 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 10 deps, role: work
53397 Sep 22 23:23:41.083 INFO [lossy] skipping 1227
53398 Sep 22 23:23:41.083 INFO [lossy] skipping 1228
53399 Sep 22 23:23:41.083 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 11 deps, role: work
53400 Sep 22 23:23:41.083 INFO [lossy] skipping 1230
53401 Sep 22 23:23:41.083 WARN 1231 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 12 deps, role: work
53402 Sep 22 23:23:41.083 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 12 deps, role: work
53403 Sep 22 23:23:41.083 WARN 1234 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 13 deps, role: work
53404 Sep 22 23:23:41.083 INFO [lossy] skipping 1235
53405 Sep 22 23:23:41.083 INFO [lossy] skipping 1236
53406 Sep 22 23:23:41.084 WARN returning error on write!
53407 Sep 22 23:23:41.084 DEBG Write :1197 deps:[JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
53408 Sep 22 23:23:41.084 INFO [lossy] skipping 1198
53409 Sep 22 23:23:41.084 WARN 1200 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53410 Sep 22 23:23:41.084 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53411 Sep 22 23:23:41.084 INFO [lossy] skipping 1204
53412 Sep 22 23:23:41.085 INFO [lossy] skipping 1218
53413 Sep 22 23:23:41.085 INFO [lossy] skipping 1224
53414 Sep 22 23:23:41.085 INFO [lossy] skipping 1227
53415 Sep 22 23:23:41.085 WARN 1228 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 11 deps, role: work
53416 Sep 22 23:23:41.115 DEBG Write :1197 deps:[JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53417 Sep 22 23:23:41.124 DEBG Flush :1198 extent_limit None deps:[JobId(1197), JobId(1196), JobId(1195)] res:true f:82 g:1
53418 Sep 22 23:23:41.124 INFO [lossy] skipping 1218
53419 Sep 22 23:23:41.124 INFO [lossy] skipping 1227
53420 Sep 22 23:23:41.124 INFO [lossy] skipping 1227
53421 Sep 22 23:23:41.125 WARN returning error on write!
53422 Sep 22 23:23:41.125 DEBG Write :1199 deps:[JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
53423 Sep 22 23:23:41.127 WARN returning error on write!
53424 Sep 22 23:23:41.127 DEBG Write :1200 deps:[JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
53425 Sep 22 23:23:41.127 INFO [lossy] skipping 1201
53426 Sep 22 23:23:41.127 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53427 Sep 22 23:23:41.127 INFO [lossy] skipping 1203
53428 Sep 22 23:23:41.127 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53429 Sep 22 23:23:41.127 WARN 1207 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53430 Sep 22 23:23:41.127 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53431 Sep 22 23:23:41.127 INFO [lossy] skipping 1210
53432 Sep 22 23:23:41.127 INFO [lossy] skipping 1211
53433 Sep 22 23:23:41.127 INFO [lossy] skipping 1212
53434 Sep 22 23:23:41.127 INFO [lossy] skipping 1213
53435 Sep 22 23:23:41.127 INFO [lossy] skipping 1214
53436 Sep 22 23:23:41.127 WARN 1216 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53437 Sep 22 23:23:41.127 INFO [lossy] skipping 1217
53438 Sep 22 23:23:41.127 WARN 1219 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53439 Sep 22 23:23:41.127 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53440 Sep 22 23:23:41.127 INFO [lossy] skipping 1222
53441 Sep 22 23:23:41.127 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 8 deps, role: work
53442 Sep 22 23:23:41.127 INFO [lossy] skipping 1224
53443 Sep 22 23:23:41.127 INFO [lossy] skipping 1225
53444 Sep 22 23:23:41.127 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 9 deps, role: work
53445 Sep 22 23:23:41.127 INFO [lossy] skipping 1228
53446 Sep 22 23:23:41.127 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 10 deps, role: work
53447 Sep 22 23:23:41.127 WARN 1231 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 11 deps, role: work
53448 Sep 22 23:23:41.127 INFO [lossy] skipping 1232
53449 Sep 22 23:23:41.127 WARN 1234 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 12 deps, role: work
53450 Sep 22 23:23:41.127 INFO [lossy] skipping 1199
53451 Sep 22 23:23:41.128 WARN returning error on write!
53452 Sep 22 23:23:41.128 DEBG Write :1200 deps:[JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
53453 Sep 22 23:23:41.128 INFO [lossy] skipping 1201
53454 Sep 22 23:23:41.128 WARN 1203 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53455 Sep 22 23:23:41.128 WARN 1210 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53456 Sep 22 23:23:41.128 INFO [lossy] skipping 1211
53457 Sep 22 23:23:41.128 WARN 1213 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53458 Sep 22 23:23:41.128 INFO [lossy] skipping 1214
53459 Sep 22 23:23:41.128 INFO [lossy] skipping 1217
53460 Sep 22 23:23:41.128 INFO [lossy] skipping 1222
53461 Sep 22 23:23:41.128 WARN 1225 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 9 deps, role: work
53462 Sep 22 23:23:41.128 WARN 1228 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 10 deps, role: work
53463 Sep 22 23:23:41.128 INFO [lossy] skipping 1232
53464 Sep 22 23:23:41.159 DEBG Write :1199 deps:[JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53465 Sep 22 23:23:41.191 DEBG Write :1200 deps:[JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53466 Sep 22 23:23:41.192 INFO [lossy] skipping 1201
53467 Sep 22 23:23:41.192 INFO [lossy] skipping 1211
53468 Sep 22 23:23:41.192 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53469 Sep 22 23:23:41.192 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53470 Sep 22 23:23:41.192 INFO [lossy] skipping 1222
53471 Sep 22 23:23:41.192 INFO [lossy] skipping 1232
53472 Sep 22 23:23:41.192 INFO [lossy] skipping 1201
53473 Sep 22 23:23:41.192 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53474 Sep 22 23:23:41.192 WARN 1222 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 8 deps, role: work
53475 Sep 22 23:23:41.192 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 11 deps, role: work
53476 Sep 22 23:23:41.201 DEBG Flush :1201 extent_limit None deps:[JobId(1200), JobId(1199), JobId(1198)] res:true f:83 g:1
53477 Sep 22 23:23:41.201 INFO [lossy] skipping 1202
53478 Sep 22 23:23:41.231 DEBG Write :1203 deps:[JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53479 Sep 22 23:23:41.233 INFO [lossy] skipping 1204
53480 Sep 22 23:23:41.233 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53481 Sep 22 23:23:41.233 INFO [lossy] skipping 1206
53482 Sep 22 23:23:41.233 WARN 1207 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53483 Sep 22 23:23:41.233 INFO [lossy] skipping 1208
53484 Sep 22 23:23:41.233 INFO [lossy] skipping 1209
53485 Sep 22 23:23:41.233 WARN 1210 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53486 Sep 22 23:23:41.233 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53487 Sep 22 23:23:41.233 INFO [lossy] skipping 1212
53488 Sep 22 23:23:41.233 WARN 1213 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53489 Sep 22 23:23:41.233 INFO [lossy] skipping 1214
53490 Sep 22 23:23:41.233 WARN 1216 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53491 Sep 22 23:23:41.233 INFO [lossy] skipping 1217
53492 Sep 22 23:23:41.233 INFO [lossy] skipping 1219
53493 Sep 22 23:23:41.233 INFO [lossy] skipping 1220
53494 Sep 22 23:23:41.233 INFO [lossy] skipping 1221
53495 Sep 22 23:23:41.233 INFO [lossy] skipping 1222
53496 Sep 22 23:23:41.233 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53497 Sep 22 23:23:41.233 WARN 1225 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 8 deps, role: work
53498 Sep 22 23:23:41.233 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 8 deps, role: work
53499 Sep 22 23:23:41.233 INFO [lossy] skipping 1227
53500 Sep 22 23:23:41.233 WARN 1228 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 9 deps, role: work
53501 Sep 22 23:23:41.233 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 9 deps, role: work
53502 Sep 22 23:23:41.233 WARN 1231 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 10 deps, role: work
53503 Sep 22 23:23:41.233 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 10 deps, role: work
53504 Sep 22 23:23:41.233 WARN 1234 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 11 deps, role: work
53505 Sep 22 23:23:41.233 INFO [lossy] skipping 1236
53506 Sep 22 23:23:41.264 DEBG Write :1202 deps:[JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53507 Sep 22 23:23:41.265 INFO [lossy] skipping 1204
53508 Sep 22 23:23:41.265 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53509 Sep 22 23:23:41.265 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53510 Sep 22 23:23:41.265 INFO [lossy] skipping 1217
53511 Sep 22 23:23:41.265 WARN 1219 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53512 Sep 22 23:23:41.265 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53513 Sep 22 23:23:41.266 INFO [lossy] skipping 1221
53514 Sep 22 23:23:41.266 INFO [lossy] skipping 1222
53515 Sep 22 23:23:41.266 WARN returning error on flush!
53516 Sep 22 23:23:41.266 DEBG Flush :1204 extent_limit None deps:[JobId(1203), JobId(1202), JobId(1201)] res:false f:84 g:1
53517 Sep 22 23:23:41.266 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53518 Sep 22 23:23:41.266 INFO [lossy] skipping 1222
53519 Sep 22 23:23:41.266 WARN returning error on flush!
53520 Sep 22 23:23:41.266 DEBG Flush :1204 extent_limit None deps:[JobId(1203), JobId(1202), JobId(1201)] res:false f:84 g:1
53521 Sep 22 23:23:41.266 WARN 1222 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53522 Sep 22 23:23:41.266 WARN returning error on flush!
53523 Sep 22 23:23:41.266 DEBG Flush :1204 extent_limit None deps:[JobId(1203), JobId(1202), JobId(1201)] res:false f:84 g:1
53524 Sep 22 23:23:41.266 WARN returning error on flush!
53525 Sep 22 23:23:41.266 DEBG Flush :1204 extent_limit None deps:[JobId(1203), JobId(1202), JobId(1201)] res:false f:84 g:1
53526 Sep 22 23:23:41.274 DEBG Flush :1204 extent_limit None deps:[JobId(1203), JobId(1202), JobId(1201)] res:true f:84 g:1
53527 Sep 22 23:23:41.305 DEBG Write :1205 deps:[JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53528 Sep 22 23:23:41.310 DEBG Flush :1206 extent_limit None deps:[JobId(1205), JobId(1204)] res:true f:85 g:1
53529 Sep 22 23:23:41.312 WARN returning error on write!
53530 Sep 22 23:23:41.312 DEBG Write :1207 deps:[JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
53531 Sep 22 23:23:41.343 DEBG Write :1208 deps:[JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53532 Sep 22 23:23:41.344 WARN 1209 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53533 Sep 22 23:23:41.344 WARN 1210 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53534 Sep 22 23:23:41.344 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53535 Sep 22 23:23:41.344 WARN 1213 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53536 Sep 22 23:23:41.344 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53537 Sep 22 23:23:41.344 WARN 1216 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53538 Sep 22 23:23:41.344 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53539 Sep 22 23:23:41.344 WARN 1219 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53540 Sep 22 23:23:41.344 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53541 Sep 22 23:23:41.344 INFO [lossy] skipping 1221
53542 Sep 22 23:23:41.344 WARN 1222 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53543 Sep 22 23:23:41.344 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53544 Sep 22 23:23:41.345 WARN 1225 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53545 Sep 22 23:23:41.345 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53546 Sep 22 23:23:41.345 WARN 1228 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53547 Sep 22 23:23:41.345 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53548 Sep 22 23:23:41.345 WARN 1231 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 8 deps, role: work
53549 Sep 22 23:23:41.345 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 8 deps, role: work
53550 Sep 22 23:23:41.345 WARN 1234 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 9 deps, role: work
53551 Sep 22 23:23:41.345 INFO [lossy] skipping 1236
53552 Sep 22 23:23:41.375 DEBG Write :1207 deps:[JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53553 Sep 22 23:23:41.377 INFO [lossy] skipping 1236
53554 Sep 22 23:23:41.377 INFO [lossy] sleeping 1 second
53555 Sep 22 23:23:41.378 ERRO [2] job id 1197 saw error GenericError("test error")
53556 Sep 22 23:23:41.378 ERRO [2] job id 1199 saw error GenericError("test error")
53557 Sep 22 23:23:41.378 ERRO [2] job id 1200 saw error GenericError("test error")
53558 Sep 22 23:23:41.378 ERRO [2] job id 1200 saw error GenericError("test error")
53559 Sep 22 23:23:41.378 ERRO [2] job id 1204 saw error GenericError("test error")
53560 Sep 22 23:23:41.378 ERRO [2] job id 1204 saw error GenericError("test error")
53561 Sep 22 23:23:41.378 ERRO [2] job id 1204 saw error GenericError("test error")
53562 Sep 22 23:23:41.378 ERRO [2] job id 1204 saw error GenericError("test error")
53563 Sep 22 23:23:41.378 ERRO [2] job id 1207 saw error GenericError("test error")
53564 Sep 22 23:23:42.387 DEBG Flush :1209 extent_limit None deps:[JobId(1208), JobId(1207), JobId(1206)] res:true f:86 g:1
53565 Sep 22 23:23:42.388 WARN returning error on write!
53566 Sep 22 23:23:42.388 DEBG Write :1210 deps:[JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
53567 Sep 22 23:23:42.419 DEBG Write :1211 deps:[JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53568 Sep 22 23:23:42.420 WARN 1212 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53569 Sep 22 23:23:42.420 WARN 1213 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53570 Sep 22 23:23:42.420 INFO [lossy] skipping 1214
53571 Sep 22 23:23:42.420 INFO [lossy] skipping 1215
53572 Sep 22 23:23:42.420 WARN 1216 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53573 Sep 22 23:23:42.420 INFO [lossy] skipping 1217
53574 Sep 22 23:23:42.420 INFO [lossy] skipping 1219
53575 Sep 22 23:23:42.420 INFO [lossy] skipping 1220
53576 Sep 22 23:23:42.420 WARN 1222 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53577 Sep 22 23:23:42.420 INFO [lossy] skipping 1223
53578 Sep 22 23:23:42.420 INFO [lossy] skipping 1225
53579 Sep 22 23:23:42.420 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53580 Sep 22 23:23:42.420 WARN 1228 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53581 Sep 22 23:23:42.421 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53582 Sep 22 23:23:42.421 WARN 1231 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53583 Sep 22 23:23:42.421 INFO [lossy] skipping 1232
53584 Sep 22 23:23:42.421 WARN 1234 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 8 deps, role: work
53585 Sep 22 23:23:42.451 DEBG Write :1210 deps:[JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53586 Sep 22 23:23:42.452 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53587 Sep 22 23:23:42.452 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53588 Sep 22 23:23:42.452 WARN 1219 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53589 Sep 22 23:23:42.452 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53590 Sep 22 23:23:42.452 INFO [lossy] skipping 1223
53591 Sep 22 23:23:42.452 WARN 1225 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53592 Sep 22 23:23:42.452 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53593 Sep 22 23:23:42.452 INFO [lossy] skipping 1223
53594 Sep 22 23:23:42.453 INFO [lossy] skipping 1223
53595 Sep 22 23:23:42.453 INFO [lossy] skipping 1223
53596 Sep 22 23:23:42.453 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53597 Sep 22 23:23:42.453 INFO [lossy] skipping 1212
53598 Sep 22 23:23:42.453 INFO [lossy] skipping 1215
53599 Sep 22 23:23:42.453 INFO [lossy] skipping 1218
53600 Sep 22 23:23:42.453 INFO [lossy] skipping 1225
53601 Sep 22 23:23:42.453 INFO [lossy] skipping 1227
53602 Sep 22 23:23:42.453 INFO [lossy] skipping 1232
53603 Sep 22 23:23:42.453 INFO [lossy] skipping 1236
53604 Sep 22 23:23:42.461 DEBG Flush :1212 extent_limit None deps:[JobId(1211), JobId(1210), JobId(1209)] res:true f:87 g:1
53605 Sep 22 23:23:42.461 INFO [lossy] skipping 1215
53606 Sep 22 23:23:42.461 WARN 1225 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53607 Sep 22 23:23:42.461 INFO [lossy] skipping 1227
53608 Sep 22 23:23:42.461 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53609 Sep 22 23:23:42.461 INFO [lossy] skipping 1215
53610 Sep 22 23:23:42.461 WARN 1215 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53611 Sep 22 23:23:42.461 INFO [lossy] sleeping 1 second
53612 Sep 22 23:23:42.461 ERRO [2] job id 1210 saw error GenericError("test error")
53613 Sep 22 23:23:43.462 INFO [lossy] skipping 1213
53614 Sep 22 23:23:43.493 DEBG Write :1214 deps:[JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53615 Sep 22 23:23:43.494 WARN 1215 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53616 Sep 22 23:23:43.494 WARN 1216 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53617 Sep 22 23:23:43.494 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53618 Sep 22 23:23:43.494 INFO [lossy] skipping 1219
53619 Sep 22 23:23:43.494 INFO [lossy] skipping 1220
53620 Sep 22 23:23:43.494 WARN 1222 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53621 Sep 22 23:23:43.495 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53622 Sep 22 23:23:43.495 INFO [lossy] skipping 1224
53623 Sep 22 23:23:43.495 INFO [lossy] skipping 1226
53624 Sep 22 23:23:43.495 WARN 1228 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53625 Sep 22 23:23:43.495 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53626 Sep 22 23:23:43.495 INFO [lossy] skipping 1230
53627 Sep 22 23:23:43.495 WARN 1231 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53628 Sep 22 23:23:43.495 INFO [lossy] skipping 1234
53629 Sep 22 23:23:43.495 INFO [lossy] skipping 1235
53630 Sep 22 23:23:43.495 INFO [lossy] skipping 1236
53631 Sep 22 23:23:43.526 DEBG Write :1213 deps:[JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53632 Sep 22 23:23:43.527 INFO [lossy] skipping 1219
53633 Sep 22 23:23:43.527 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53634 Sep 22 23:23:43.527 INFO [lossy] skipping 1226
53635 Sep 22 23:23:43.527 INFO [lossy] skipping 1230
53636 Sep 22 23:23:43.527 WARN 1234 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
53637 Sep 22 23:23:43.527 INFO [lossy] skipping 1235
53638 Sep 22 23:23:43.527 WARN 1219 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53639 Sep 22 23:23:43.527 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53640 Sep 22 23:23:43.527 INFO [lossy] skipping 1235
53641 Sep 22 23:23:43.535 DEBG Flush :1215 extent_limit None deps:[JobId(1214), JobId(1213), JobId(1212)] res:true f:88 g:1
53642 Sep 22 23:23:43.566 DEBG Write :1216 deps:[JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53643 Sep 22 23:23:43.568 WARN returning error on write!
53644 Sep 22 23:23:43.568 DEBG Write :1217 deps:[JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
53645 Sep 22 23:23:43.569 WARN 1218 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53646 Sep 22 23:23:43.569 INFO [lossy] skipping 1219
53647 Sep 22 23:23:43.569 INFO [lossy] skipping 1220
53648 Sep 22 23:23:43.569 INFO [lossy] skipping 1222
53649 Sep 22 23:23:43.569 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53650 Sep 22 23:23:43.569 INFO [lossy] skipping 1224
53651 Sep 22 23:23:43.569 WARN 1225 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53652 Sep 22 23:23:43.569 INFO [lossy] skipping 1226
53653 Sep 22 23:23:43.569 WARN 1228 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53654 Sep 22 23:23:43.569 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53655 Sep 22 23:23:43.569 WARN 1231 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53656 Sep 22 23:23:43.569 INFO [lossy] skipping 1232
53657 Sep 22 23:23:43.569 INFO [lossy] skipping 1233
53658 Sep 22 23:23:43.569 WARN 1234 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
53659 Sep 22 23:23:43.599 DEBG Write :1217 deps:[JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53660 Sep 22 23:23:43.600 WARN 1219 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53661 Sep 22 23:23:43.600 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53662 Sep 22 23:23:43.600 WARN 1222 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53663 Sep 22 23:23:43.600 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53664 Sep 22 23:23:43.600 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53665 Sep 22 23:23:43.600 INFO [lossy] skipping 1218
53666 Sep 22 23:23:43.600 INFO [lossy] skipping 1222
53667 Sep 22 23:23:43.600 INFO [lossy] skipping 1234
53668 Sep 22 23:23:43.600 INFO [lossy] skipping 1218
53669 Sep 22 23:23:43.600 INFO [lossy] skipping 1234
53670 Sep 22 23:23:43.609 DEBG Flush :1218 extent_limit None deps:[JobId(1217), JobId(1216), JobId(1215)] res:true f:89 g:1
53671 Sep 22 23:23:43.609 WARN 1234 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
53672 Sep 22 23:23:43.639 DEBG Write :1219 deps:[JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53673 Sep 22 23:23:43.641 INFO [lossy] skipping 1220
53674 Sep 22 23:23:43.641 INFO [lossy] skipping 1221
53675 Sep 22 23:23:43.641 WARN 1222 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53676 Sep 22 23:23:43.641 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53677 Sep 22 23:23:43.641 INFO [lossy] skipping 1224
53678 Sep 22 23:23:43.641 WARN 1225 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53679 Sep 22 23:23:43.641 INFO [lossy] skipping 1226
53680 Sep 22 23:23:43.641 WARN 1228 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53681 Sep 22 23:23:43.641 INFO [lossy] skipping 1229
53682 Sep 22 23:23:43.641 WARN 1231 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53683 Sep 22 23:23:43.641 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53684 Sep 22 23:23:43.671 DEBG Write :1220 deps:[JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53685 Sep 22 23:23:43.681 DEBG Flush :1221 extent_limit None deps:[JobId(1220), JobId(1219), JobId(1218)] res:true f:90 g:1
53686 Sep 22 23:23:43.681 WARN 1224 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53687 Sep 22 23:23:43.681 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53688 Sep 22 23:23:43.681 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53689 Sep 22 23:23:43.681 INFO [lossy] sleeping 1 second
53690 Sep 22 23:23:43.682 ERRO [2] job id 1217 saw error GenericError("test error")
53691 Sep 22 23:23:44.683 INFO [lossy] skipping 1222
53692 Sep 22 23:23:44.713 DEBG Write :1223 deps:[JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53693 Sep 22 23:23:44.715 INFO [lossy] skipping 1224
53694 Sep 22 23:23:44.715 WARN 1225 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53695 Sep 22 23:23:44.715 WARN 1228 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53696 Sep 22 23:23:44.715 INFO [lossy] skipping 1231
53697 Sep 22 23:23:44.715 INFO [lossy] skipping 1232
53698 Sep 22 23:23:44.715 INFO [lossy] skipping 1233
53699 Sep 22 23:23:44.715 INFO [lossy] skipping 1234
53700 Sep 22 23:23:44.715 INFO [lossy] skipping 1235
53701 Sep 22 23:23:44.715 INFO [lossy] skipping 1222
53702 Sep 22 23:23:44.715 INFO [lossy] skipping 1224
53703 Sep 22 23:23:44.715 WARN 1231 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
53704 Sep 22 23:23:44.715 INFO [lossy] skipping 1232
53705 Sep 22 23:23:44.715 WARN 1234 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
53706 Sep 22 23:23:44.745 DEBG Write :1222 deps:[JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53707 Sep 22 23:23:44.755 DEBG Flush :1224 extent_limit None deps:[JobId(1223), JobId(1222), JobId(1221)] res:true f:91 g:1
53708 Sep 22 23:23:44.755 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53709 Sep 22 23:23:44.785 DEBG Write :1225 deps:[JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53710 Sep 22 23:23:44.816 DEBG Write :1226 deps:[JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53711 Sep 22 23:23:44.826 DEBG Flush :1227 extent_limit None deps:[JobId(1226), JobId(1225), JobId(1224)] res:true f:92 g:1
53712 Sep 22 23:23:44.826 INFO [lossy] skipping 1228
53713 Sep 22 23:23:44.856 DEBG Write :1229 deps:[JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53714 Sep 22 23:23:44.857 WARN 1230 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53715 Sep 22 23:23:44.857 WARN 1231 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53716 Sep 22 23:23:44.857 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53717 Sep 22 23:23:44.857 WARN 1234 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53718 Sep 22 23:23:44.888 DEBG Write :1228 deps:[JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53719 Sep 22 23:23:44.889 WARN returning error on flush!
53720 Sep 22 23:23:44.889 DEBG Flush :1230 extent_limit None deps:[JobId(1229), JobId(1228), JobId(1227)] res:false f:93 g:1
53721 Sep 22 23:23:44.889 INFO [lossy] skipping 1232
53722 Sep 22 23:23:44.889 INFO [lossy] skipping 1236
53723 Sep 22 23:23:44.898 DEBG Flush :1230 extent_limit None deps:[JobId(1229), JobId(1228), JobId(1227)] res:true f:93 g:1
53724 Sep 22 23:23:44.928 DEBG Write :1232 deps:[JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53725 Sep 22 23:23:44.930 INFO [lossy] skipping 1236
53726 Sep 22 23:23:44.930 INFO [lossy] skipping 1236
53727 Sep 22 23:23:44.930 INFO [lossy] skipping 1236
53728 Sep 22 23:23:44.930 INFO [lossy] skipping 1236
53729 Sep 22 23:23:44.961 DEBG Write :1231 deps:[JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
53730 Sep 22 23:23:44.970 DEBG Flush :1233 extent_limit None deps:[JobId(1232), JobId(1231), JobId(1230)] res:true f:94 g:1
53731 Sep 22 23:23:44.971 WARN returning error on write!
53732 Sep 22 23:23:44.971 DEBG Write :1234 deps:[JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162), JobId(1161)] res:false
53733 Sep 22 23:23:44.972 INFO [lossy] skipping 1235
53734 Sep 22 23:23:44.972 WARN 1236 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
53735 Sep 22 23:23:45.002 DEBG Write :1234 deps:[JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162), JobId(1161)] res:true
53736 Sep 22 23:23:45.010 DEBG Read :1235 deps:[JobId(1233)] res:true
53737 Sep 22 23:23:45.031 ERRO [2] job id 1230 saw error GenericError("test error")
53738 Sep 22 23:23:45.031 ERRO [2] job id 1234 saw error GenericError("test error")
53739 Sep 22 23:23:45.033 INFO [lossy] sleeping 1 second
53740 Sep 22 23:23:45.410 DEBG [2] Read AckReady 1235, : downstairs
53741 Sep 22 23:23:45.411 DEBG up_ds_listen was notified
53742 Sep 22 23:23:45.411 DEBG up_ds_listen process 1235
53743 Sep 22 23:23:45.411 DEBG [A] ack job 1235:236, : downstairs
53744 Sep 22 23:23:45.465 DEBG up_ds_listen checked 1 jobs, back to waiting
53745 Sep 22 23:23:45.466 DEBG IO Read 1237 has deps [JobId(1236)]
53746 Sep 22 23:23:45.533 DEBG IO Flush 1238 has deps [JobId(1237), JobId(1236)]
53747 Sep 22 23:23:46.040 DEBG Flush :1236 extent_limit None deps:[JobId(1235), JobId(1234), JobId(1233)] res:true f:95 g:1
53748 Sep 22 23:23:46.040 WARN returning error on read!
53749 Sep 22 23:23:46.040 DEBG Read :1237 deps:[JobId(1236)] res:false
53750 Sep 22 23:23:46.040 WARN returning error on read!
53751 Sep 22 23:23:46.040 DEBG Read :1237 deps:[JobId(1236)] res:false
53752 Sep 22 23:23:46.046 DEBG Read :1237 deps:[JobId(1236)] res:true
53753 Sep 22 23:23:46.068 ERRO [2] job id 1237 saw error GenericError("test error")
53754 Sep 22 23:23:46.068 ERRO [2] job id 1237 saw error GenericError("test error")
53755 Sep 22 23:23:46.069 DEBG Flush :1238 extent_limit None deps:[JobId(1237), JobId(1236)] res:true f:96 g:1
53756 Sep 22 23:23:46.069 INFO [lossy] sleeping 1 second
53757 Sep 22 23:23:46.447 DEBG [2] Read AckReady 1237, : downstairs
53758 Sep 22 23:23:46.448 DEBG up_ds_listen was notified
53759 Sep 22 23:23:46.448 DEBG up_ds_listen process 1237
53760 Sep 22 23:23:46.448 DEBG [A] ack job 1237:238, : downstairs
53761 Sep 22 23:23:46.502 DEBG up_ds_listen checked 1 jobs, back to waiting
53762 Sep 22 23:23:46.503 DEBG IO Read 1239 has deps [JobId(1238)]
53763 Sep 22 23:23:46.536 DEBG IO Flush 1240 has deps [JobId(1239), JobId(1238)]
53764 Sep 22 23:23:47.078 DEBG Read :1239 deps:[JobId(1238)] res:true
53765 Sep 22 23:23:47.100 INFO [lossy] skipping 1240
53766 Sep 22 23:23:47.100 INFO [lossy] skipping 1240
53767 Sep 22 23:23:47.100 DEBG Flush :1240 extent_limit None deps:[JobId(1239), JobId(1238)] res:true f:97 g:1
53768 Sep 22 23:23:47.101 INFO [lossy] sleeping 1 second
53769 Sep 22 23:23:47.478 DEBG [2] Read AckReady 1239, : downstairs
53770 Sep 22 23:23:47.479 DEBG up_ds_listen was notified
53771 Sep 22 23:23:47.479 DEBG up_ds_listen process 1239
53772 Sep 22 23:23:47.479 DEBG [A] ack job 1239:240, : downstairs
53773 Sep 22 23:23:47.532 DEBG up_ds_listen checked 1 jobs, back to waiting
53774 Sep 22 23:23:47.534 DEBG IO Read 1241 has deps [JobId(1240)]
53775 Sep 22 23:23:47.539 DEBG IO Flush 1242 has deps [JobId(1241), JobId(1240)]
53776 Sep 22 23:23:48.108 DEBG Read :1241 deps:[JobId(1240)] res:true
53777 Sep 22 23:23:48.131 INFO [lossy] skipping 1242
53778 Sep 22 23:23:48.131 DEBG Flush :1242 extent_limit None deps:[JobId(1241), JobId(1240)] res:true f:98 g:1
53779 Sep 22 23:23:48.131 INFO [lossy] sleeping 1 second
53780 Sep 22 23:23:48.509 DEBG [2] Read AckReady 1241, : downstairs
53781 Sep 22 23:23:48.510 DEBG up_ds_listen was notified
53782 Sep 22 23:23:48.510 DEBG up_ds_listen process 1241
53783 Sep 22 23:23:48.510 DEBG [A] ack job 1241:242, : downstairs
53784 Sep 22 23:23:48.562 DEBG up_ds_listen checked 1 jobs, back to waiting
53785 Sep 22 23:23:48.564 DEBG IO Read 1243 has deps [JobId(1242)]
53786 Sep 22 23:23:49.064 DEBG IO Flush 1244 has deps [JobId(1243), JobId(1242)]
53787 Sep 22 23:23:49.140 DEBG Read :1243 deps:[JobId(1242)] res:true
53788 Sep 22 23:23:49.163 INFO [lossy] skipping 1244
53789 Sep 22 23:23:49.163 DEBG Flush :1244 extent_limit None deps:[JobId(1243), JobId(1242)] res:true f:99 g:1
53790 Sep 22 23:23:49.163 INFO [lossy] sleeping 1 second
53791 Sep 22 23:23:49.540 DEBG [2] Read AckReady 1243, : downstairs
53792 Sep 22 23:23:49.541 DEBG up_ds_listen was notified
53793 Sep 22 23:23:49.541 DEBG up_ds_listen process 1243
53794 Sep 22 23:23:49.541 DEBG [A] ack job 1243:244, : downstairs
53795 Sep 22 23:23:49.594 DEBG up_ds_listen checked 1 jobs, back to waiting
53796 Sep 22 23:23:49.596 DEBG IO Read 1245 has deps [JobId(1244)]
53797 Sep 22 23:23:50.096 DEBG IO Flush 1246 has deps [JobId(1245), JobId(1244)]
53798 Sep 22 23:23:50.164 INFO [lossy] skipping 1245
53799 Sep 22 23:23:50.170 DEBG Read :1245 deps:[JobId(1244)] res:true
53800 Sep 22 23:23:50.193 DEBG Flush :1246 extent_limit None deps:[JobId(1245), JobId(1244)] res:true f:100 g:1
53801 Sep 22 23:23:50.193 INFO [lossy] sleeping 1 second
53802 Sep 22 23:23:50.570 DEBG [2] Read AckReady 1245, : downstairs
53803 Sep 22 23:23:50.571 DEBG up_ds_listen was notified
53804 Sep 22 23:23:50.571 DEBG up_ds_listen process 1245
53805 Sep 22 23:23:50.571 DEBG [A] ack job 1245:246, : downstairs
53806 Sep 22 23:23:50.624 DEBG up_ds_listen checked 1 jobs, back to waiting
53807 Sep 22 23:23:50.625 DEBG IO Read 1247 has deps [JobId(1246)]
53808 Sep 22 23:23:51.126 DEBG IO Flush 1248 has deps [JobId(1247), JobId(1246)]
53809 Sep 22 23:23:51.193 INFO [lossy] skipping 1247
53810 Sep 22 23:23:51.200 DEBG Read :1247 deps:[JobId(1246)] res:true
53811 Sep 22 23:23:51.223 INFO [lossy] sleeping 1 second
53812 Sep 22 23:23:51.600 DEBG [2] Read AckReady 1247, : downstairs
53813 Sep 22 23:23:51.601 DEBG up_ds_listen was notified
53814 Sep 22 23:23:51.601 DEBG up_ds_listen process 1247
53815 Sep 22 23:23:51.601 DEBG [A] ack job 1247:248, : downstairs
53816 Sep 22 23:23:51.654 DEBG up_ds_listen checked 1 jobs, back to waiting
53817 Sep 22 23:23:51.655 DEBG IO Read 1249 has deps [JobId(1248)]
53818 Sep 22 23:23:52.156 DEBG IO Flush 1250 has deps [JobId(1249), JobId(1248)]
53819 Sep 22 23:23:52.223 INFO [lossy] skipping 1248
53820 Sep 22 23:23:52.223 INFO [lossy] skipping 1250
53821 Sep 22 23:23:52.223 INFO [lossy] skipping 1248
53822 Sep 22 23:23:52.223 INFO [lossy] skipping 1248
53823 Sep 22 23:23:52.223 DEBG Flush :1248 extent_limit None deps:[JobId(1247), JobId(1246)] res:true f:101 g:1
53824 Sep 22 23:23:52.223 INFO [lossy] sleeping 1 second
53825 Sep 22 23:23:53.225 INFO [lossy] skipping 1249
53826 Sep 22 23:23:53.225 INFO [lossy] skipping 1250
53827 Sep 22 23:23:53.225 INFO [lossy] skipping 1249
53828 Sep 22 23:23:53.225 WARN 1250 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
53829 Sep 22 23:23:53.225 WARN returning error on read!
53830 Sep 22 23:23:53.225 DEBG Read :1249 deps:[JobId(1248)] res:false
53831 Sep 22 23:23:53.225 WARN returning error on read!
53832 Sep 22 23:23:53.225 DEBG Read :1249 deps:[JobId(1248)] res:false
53833 Sep 22 23:23:53.225 INFO [lossy] skipping 1249
53834 Sep 22 23:23:53.225 INFO [lossy] skipping 1249
53835 Sep 22 23:23:53.231 DEBG Read :1249 deps:[JobId(1248)] res:true
53836 Sep 22 23:23:53.253 ERRO [2] job id 1249 saw error GenericError("test error")
53837 Sep 22 23:23:53.253 ERRO [2] job id 1249 saw error GenericError("test error")
53838 Sep 22 23:23:53.255 INFO [lossy] skipping 1250
53839 Sep 22 23:23:53.255 WARN returning error on flush!
53840 Sep 22 23:23:53.255 DEBG Flush :1250 extent_limit None deps:[JobId(1249), JobId(1248)] res:false f:102 g:1
53841 Sep 22 23:23:53.255 INFO [lossy] skipping 1250
53842 Sep 22 23:23:53.255 WARN returning error on flush!
53843 Sep 22 23:23:53.255 DEBG Flush :1250 extent_limit None deps:[JobId(1249), JobId(1248)] res:false f:102 g:1
53844 Sep 22 23:23:53.255 DEBG Flush :1250 extent_limit None deps:[JobId(1249), JobId(1248)] res:true f:102 g:1
53845 Sep 22 23:23:53.255 INFO [lossy] sleeping 1 second
53846 Sep 22 23:23:53.631 DEBG [2] Read AckReady 1249, : downstairs
53847 Sep 22 23:23:53.631 ERRO [2] job id 1250 saw error GenericError("test error")
53848 Sep 22 23:23:53.631 ERRO [2] job id 1250 saw error GenericError("test error")
53849 Sep 22 23:23:53.631 DEBG up_ds_listen was notified
53850 Sep 22 23:23:53.632 DEBG up_ds_listen process 1249
53851 Sep 22 23:23:53.632 DEBG [A] ack job 1249:250, : downstairs
53852 Sep 22 23:23:53.684 DEBG up_ds_listen checked 1 jobs, back to waiting
53853 Sep 22 23:23:53.685 DEBG IO Read 1251 has deps [JobId(1250)]
53854 Sep 22 23:23:54.186 DEBG IO Flush 1252 has deps [JobId(1251), JobId(1250)]
53855 Sep 22 23:23:54.255 WARN returning error on read!
53856 Sep 22 23:23:54.255 DEBG Read :1251 deps:[JobId(1250)] res:false
53857 Sep 22 23:23:54.256 INFO [lossy] skipping 1251
53858 Sep 22 23:23:54.256 WARN returning error on read!
53859 Sep 22 23:23:54.256 DEBG Read :1251 deps:[JobId(1250)] res:false
53860 Sep 22 23:23:54.256 INFO [lossy] skipping 1251
53861 Sep 22 23:23:54.262 DEBG Read :1251 deps:[JobId(1250)] res:true
53862 Sep 22 23:23:54.284 ERRO [2] job id 1251 saw error GenericError("test error")
53863 Sep 22 23:23:54.284 ERRO [2] job id 1251 saw error GenericError("test error")
53864 Sep 22 23:23:54.285 DEBG Flush :1252 extent_limit None deps:[JobId(1251), JobId(1250)] res:true f:103 g:1
53865 Sep 22 23:23:54.285 INFO [lossy] sleeping 1 second
53866 Sep 22 23:23:54.661 DEBG [2] Read AckReady 1251, : downstairs
53867 Sep 22 23:23:54.662 DEBG up_ds_listen was notified
53868 Sep 22 23:23:54.662 DEBG up_ds_listen process 1251
53869 Sep 22 23:23:54.662 DEBG [A] ack job 1251:252, : downstairs
53870 Sep 22 23:23:54.715 DEBG up_ds_listen checked 1 jobs, back to waiting
53871 Sep 22 23:23:54.716 DEBG IO Read 1253 has deps [JobId(1252)]
53872 Sep 22 23:23:55.216 DEBG IO Flush 1254 has deps [JobId(1253), JobId(1252)]
53873 Sep 22 23:23:55.293 DEBG Read :1253 deps:[JobId(1252)] res:true
53874 Sep 22 23:23:55.316 DEBG Flush :1254 extent_limit None deps:[JobId(1253), JobId(1252)] res:true f:104 g:1
53875 Sep 22 23:23:55.316 INFO [lossy] sleeping 1 second
53876 Sep 22 23:23:55.694 DEBG [2] Read AckReady 1253, : downstairs
53877 Sep 22 23:23:55.695 DEBG up_ds_listen was notified
53878 Sep 22 23:23:55.695 DEBG up_ds_listen process 1253
53879 Sep 22 23:23:55.695 DEBG [A] ack job 1253:254, : downstairs
53880 Sep 22 23:23:55.748 DEBG up_ds_listen checked 1 jobs, back to waiting
53881 Sep 22 23:23:55.749 DEBG IO Read 1255 has deps [JobId(1254)]
53882 Sep 22 23:23:56.250 DEBG IO Flush 1256 has deps [JobId(1255), JobId(1254)]
53883 Sep 22 23:23:56.318 WARN returning error on read!
53884 Sep 22 23:23:56.318 DEBG Read :1255 deps:[JobId(1254)] res:false
53885 Sep 22 23:23:56.324 DEBG Read :1255 deps:[JobId(1254)] res:true
53886 Sep 22 23:23:56.345 ERRO [2] job id 1255 saw error GenericError("test error")
53887 Sep 22 23:23:56.347 INFO [lossy] sleeping 1 second
53888 Sep 22 23:23:56.724 DEBG [2] Read AckReady 1255, : downstairs
53889 Sep 22 23:23:56.725 DEBG up_ds_listen was notified
53890 Sep 22 23:23:56.725 DEBG up_ds_listen process 1255
53891 Sep 22 23:23:56.725 DEBG [A] ack job 1255:256, : downstairs
53892 Sep 22 23:23:56.778 DEBG up_ds_listen checked 1 jobs, back to waiting
53893 Sep 22 23:23:56.779 DEBG IO Read 1257 has deps [JobId(1256)]
53894 Sep 22 23:23:57.279 DEBG IO Flush 1258 has deps [JobId(1257), JobId(1256)]
53895 Sep 22 23:23:57.348 DEBG Flush :1256 extent_limit None deps:[JobId(1255), JobId(1254)] res:true f:105 g:1
53896 Sep 22 23:23:57.355 DEBG Read :1257 deps:[JobId(1256)] res:true
53897 Sep 22 23:23:57.377 INFO [lossy] skipping 1258
53898 Sep 22 23:23:57.378 DEBG Flush :1258 extent_limit None deps:[JobId(1257), JobId(1256)] res:true f:106 g:1
53899 Sep 22 23:23:57.378 INFO [lossy] sleeping 1 second
53900 Sep 22 23:23:57.755 DEBG [2] Read AckReady 1257, : downstairs
53901 Sep 22 23:23:57.756 DEBG up_ds_listen was notified
53902 Sep 22 23:23:57.756 DEBG up_ds_listen process 1257
53903 Sep 22 23:23:57.756 DEBG [A] ack job 1257:258, : downstairs
53904 Sep 22 23:23:57.808 DEBG up_ds_listen checked 1 jobs, back to waiting
53905 Sep 22 23:23:57.810 DEBG IO Read 1259 has deps [JobId(1258)]
53906 Sep 22 23:23:58.311 DEBG IO Flush 1260 has deps [JobId(1259), JobId(1258)]
53907 Sep 22 23:23:58.379 INFO [lossy] skipping 1259
53908 Sep 22 23:23:58.379 INFO [lossy] skipping 1260
53909 Sep 22 23:23:58.379 INFO [lossy] skipping 1259
53910 Sep 22 23:23:58.385 DEBG Read :1259 deps:[JobId(1258)] res:true
53911 Sep 22 23:23:58.408 DEBG Flush :1260 extent_limit None deps:[JobId(1259), JobId(1258)] res:true f:107 g:1
53912 Sep 22 23:23:58.408 INFO [lossy] sleeping 1 second
53913 Sep 22 23:23:58.786 DEBG [2] Read AckReady 1259, : downstairs
53914 Sep 22 23:23:58.787 DEBG up_ds_listen was notified
53915 Sep 22 23:23:58.787 DEBG up_ds_listen process 1259
53916 Sep 22 23:23:58.787 DEBG [A] ack job 1259:260, : downstairs
53917 Sep 22 23:23:58.840 DEBG up_ds_listen checked 1 jobs, back to waiting
53918 Sep 22 23:23:58.841 DEBG IO Read 1261 has deps [JobId(1260)]
53919 Sep 22 23:23:59.343 DEBG IO Flush 1262 has deps [JobId(1261), JobId(1260)]
53920 Sep 22 23:23:59.410 WARN returning error on read!
53921 Sep 22 23:23:59.410 DEBG Read :1261 deps:[JobId(1260)] res:false
53922 Sep 22 23:23:59.416 DEBG Read :1261 deps:[JobId(1260)] res:true
53923 Sep 22 23:23:59.437 ERRO [2] job id 1261 saw error GenericError("test error")
53924 Sep 22 23:23:59.439 DEBG Flush :1262 extent_limit None deps:[JobId(1261), JobId(1260)] res:true f:108 g:1
53925 Sep 22 23:23:59.439 INFO [lossy] sleeping 1 second
53926 Sep 22 23:23:59.816 DEBG [2] Read AckReady 1261, : downstairs
53927 Sep 22 23:23:59.817 DEBG up_ds_listen was notified
53928 Sep 22 23:23:59.817 DEBG up_ds_listen process 1261
53929 Sep 22 23:23:59.817 DEBG [A] ack job 1261:262, : downstairs
53930 Sep 22 23:23:59.869 DEBG up_ds_listen checked 1 jobs, back to waiting
53931 Sep 22 23:23:59.871 DEBG IO Read 1263 has deps [JobId(1262)]
53932 Sep 22 23:24:00.372 DEBG IO Flush 1264 has deps [JobId(1263), JobId(1262)]
53933 Sep 22 23:24:00.446 DEBG Read :1263 deps:[JobId(1262)] res:true
53934 Sep 22 23:24:00.469 WARN returning error on flush!
53935 Sep 22 23:24:00.469 DEBG Flush :1264 extent_limit None deps:[JobId(1263), JobId(1262)] res:false f:109 g:1
53936 Sep 22 23:24:00.469 WARN returning error on flush!
53937 Sep 22 23:24:00.469 DEBG Flush :1264 extent_limit None deps:[JobId(1263), JobId(1262)] res:false f:109 g:1
53938 Sep 22 23:24:00.469 DEBG Flush :1264 extent_limit None deps:[JobId(1263), JobId(1262)] res:true f:109 g:1
53939 Sep 22 23:24:00.469 INFO [lossy] sleeping 1 second
53940 Sep 22 23:24:00.845 DEBG [2] Read AckReady 1263, : downstairs
53941 Sep 22 23:24:00.847 ERRO [2] job id 1264 saw error GenericError("test error")
53942 Sep 22 23:24:00.847 ERRO [2] job id 1264 saw error GenericError("test error")
53943 Sep 22 23:24:00.847 DEBG up_ds_listen was notified
53944 Sep 22 23:24:00.847 DEBG up_ds_listen process 1263
53945 Sep 22 23:24:00.847 DEBG [A] ack job 1263:264, : downstairs
53946 Sep 22 23:24:00.899 DEBG up_ds_listen checked 1 jobs, back to waiting
53947 Sep 22 23:24:00.901 DEBG IO Read 1265 has deps [JobId(1264)]
53948 Sep 22 23:24:01.402 DEBG IO Flush 1266 has deps [JobId(1265), JobId(1264)]
53949 Sep 22 23:24:01.470 WARN returning error on read!
53950 Sep 22 23:24:01.470 DEBG Read :1265 deps:[JobId(1264)] res:false
53951 Sep 22 23:24:01.471 INFO [lossy] skipping 1266
53952 Sep 22 23:24:01.471 INFO [lossy] skipping 1265
53953 Sep 22 23:24:01.477 DEBG Read :1265 deps:[JobId(1264)] res:true
53954 Sep 22 23:24:01.499 ERRO [2] job id 1265 saw error GenericError("test error")
53955 Sep 22 23:24:01.500 INFO [lossy] sleeping 1 second
53956 Sep 22 23:24:01.877 DEBG [2] Read AckReady 1265, : downstairs
53957 Sep 22 23:24:01.878 DEBG up_ds_listen was notified
53958 Sep 22 23:24:01.878 DEBG up_ds_listen process 1265
53959 Sep 22 23:24:01.878 DEBG [A] ack job 1265:266, : downstairs
53960 Sep 22 23:24:01.930 DEBG up_ds_listen checked 1 jobs, back to waiting
53961 Sep 22 23:24:01.932 DEBG IO Read 1267 has deps [JobId(1266)]
53962 Sep 22 23:24:02.432 DEBG IO Flush 1268 has deps [JobId(1267), JobId(1266)]
53963 Sep 22 23:24:02.502 INFO [lossy] skipping 1266
53964 Sep 22 23:24:02.502 INFO [lossy] skipping 1267
53965 Sep 22 23:24:02.502 DEBG Flush :1266 extent_limit None deps:[JobId(1265), JobId(1264)] res:true f:110 g:1
53966 Sep 22 23:24:02.508 DEBG Read :1267 deps:[JobId(1266)] res:true
53967 Sep 22 23:24:02.531 WARN returning error on flush!
53968 Sep 22 23:24:02.531 DEBG Flush :1268 extent_limit None deps:[JobId(1267), JobId(1266)] res:false f:111 g:1
53969 Sep 22 23:24:02.531 INFO [lossy] skipping 1268
53970 Sep 22 23:24:02.531 INFO [lossy] skipping 1268
53971 Sep 22 23:24:02.531 DEBG Flush :1268 extent_limit None deps:[JobId(1267), JobId(1266)] res:true f:111 g:1
53972 Sep 22 23:24:02.531 INFO [lossy] sleeping 1 second
53973 Sep 22 23:24:02.908 DEBG [2] Read AckReady 1267, : downstairs
53974 Sep 22 23:24:02.909 ERRO [2] job id 1268 saw error GenericError("test error")
53975 Sep 22 23:24:02.909 DEBG up_ds_listen was notified
53976 Sep 22 23:24:02.909 DEBG up_ds_listen process 1267
53977 Sep 22 23:24:02.909 DEBG [A] ack job 1267:268, : downstairs
53978 Sep 22 23:24:02.962 DEBG up_ds_listen checked 1 jobs, back to waiting
53979 Sep 22 23:24:02.963 DEBG IO Read 1269 has deps [JobId(1268)]
53980 Sep 22 23:24:03.465 DEBG IO Flush 1270 has deps [JobId(1269), JobId(1268)]
53981 Sep 22 23:24:03.538 DEBG Read :1269 deps:[JobId(1268)] res:true
53982 Sep 22 23:24:03.561 INFO [lossy] skipping 1270
53983 Sep 22 23:24:03.561 WARN returning error on flush!
53984 Sep 22 23:24:03.561 DEBG Flush :1270 extent_limit None deps:[JobId(1269), JobId(1268)] res:false f:112 g:1
53985 Sep 22 23:24:03.561 INFO [lossy] skipping 1270
53986 Sep 22 23:24:03.561 DEBG Flush :1270 extent_limit None deps:[JobId(1269), JobId(1268)] res:true f:112 g:1
53987 Sep 22 23:24:03.561 INFO [lossy] sleeping 1 second
53988 Sep 22 23:24:03.938 DEBG [2] Read AckReady 1269, : downstairs
53989 Sep 22 23:24:03.939 ERRO [2] job id 1270 saw error GenericError("test error")
53990 Sep 22 23:24:03.939 DEBG up_ds_listen was notified
53991 Sep 22 23:24:03.939 DEBG up_ds_listen process 1269
53992 Sep 22 23:24:03.939 DEBG [A] ack job 1269:270, : downstairs
53993 Sep 22 23:24:03.991 DEBG up_ds_listen checked 1 jobs, back to waiting
53994 Sep 22 23:24:03.993 DEBG IO Read 1271 has deps [JobId(1270)]
53995 Sep 22 23:24:04.493 DEBG IO Flush 1272 has deps [JobId(1271), JobId(1270)]
53996 Sep 22 23:24:04.563 WARN returning error on read!
53997 Sep 22 23:24:04.563 DEBG Read :1271 deps:[JobId(1270)] res:false
53998 Sep 22 23:24:04.563 INFO [lossy] skipping 1271
53999 Sep 22 23:24:04.563 WARN returning error on read!
54000 Sep 22 23:24:04.563 DEBG Read :1271 deps:[JobId(1270)] res:false
54001 Sep 22 23:24:04.563 INFO [lossy] skipping 1271
54002 Sep 22 23:24:04.563 INFO [lossy] skipping 1271
54003 Sep 22 23:24:04.570 DEBG Read :1271 deps:[JobId(1270)] res:true
54004 Sep 22 23:24:04.591 ERRO [2] job id 1271 saw error GenericError("test error")
54005 Sep 22 23:24:04.591 ERRO [2] job id 1271 saw error GenericError("test error")
54006 Sep 22 23:24:04.593 INFO [lossy] skipping 1272
54007 Sep 22 23:24:04.593 DEBG Flush :1272 extent_limit None deps:[JobId(1271), JobId(1270)] res:true f:113 g:1
54008 Sep 22 23:24:04.593 INFO [lossy] sleeping 1 second
54009 Sep 22 23:24:04.970 DEBG [2] Read AckReady 1271, : downstairs
54010 Sep 22 23:24:04.971 DEBG up_ds_listen was notified
54011 Sep 22 23:24:04.971 DEBG up_ds_listen process 1271
54012 Sep 22 23:24:04.972 DEBG [A] ack job 1271:272, : downstairs
54013 Sep 22 23:24:05.024 DEBG up_ds_listen checked 1 jobs, back to waiting
54014 Sep 22 23:24:05.025 DEBG IO Read 1273 has deps [JobId(1272)]
54015 Sep 22 23:24:05.526 DEBG IO Flush 1274 has deps [JobId(1273), JobId(1272)]
54016 Sep 22 23:24:05.601 DEBG Read :1273 deps:[JobId(1272)] res:true
54017 Sep 22 23:24:05.624 DEBG Flush :1274 extent_limit None deps:[JobId(1273), JobId(1272)] res:true f:114 g:1
54018 Sep 22 23:24:05.624 INFO [lossy] sleeping 1 second
54019 Sep 22 23:24:06.002 DEBG [2] Read AckReady 1273, : downstairs
54020 Sep 22 23:24:06.003 DEBG up_ds_listen was notified
54021 Sep 22 23:24:06.003 DEBG up_ds_listen process 1273
54022 Sep 22 23:24:06.003 DEBG [A] ack job 1273:274, : downstairs
54023 Sep 22 23:24:06.056 DEBG up_ds_listen checked 1 jobs, back to waiting
54024 Sep 22 23:24:06.057 DEBG IO Read 1275 has deps [JobId(1274)]
54025 Sep 22 23:24:06.559 DEBG IO Flush 1276 has deps [JobId(1275), JobId(1274)]
54026 Sep 22 23:24:06.625 INFO [lossy] skipping 1275
54027 Sep 22 23:24:06.626 WARN returning error on read!
54028 Sep 22 23:24:06.626 DEBG Read :1275 deps:[JobId(1274)] res:false
54029 Sep 22 23:24:06.626 WARN returning error on read!
54030 Sep 22 23:24:06.626 DEBG Read :1275 deps:[JobId(1274)] res:false
54031 Sep 22 23:24:06.632 DEBG Read :1275 deps:[JobId(1274)] res:true
54032 Sep 22 23:24:06.654 ERRO [2] job id 1275 saw error GenericError("test error")
54033 Sep 22 23:24:06.654 ERRO [2] job id 1275 saw error GenericError("test error")
54034 Sep 22 23:24:06.655 DEBG Flush :1276 extent_limit None deps:[JobId(1275), JobId(1274)] res:true f:115 g:1
54035 Sep 22 23:24:06.655 INFO [lossy] sleeping 1 second
54036 Sep 22 23:24:07.034 DEBG [2] Read AckReady 1275, : downstairs
54037 Sep 22 23:24:07.036 DEBG up_ds_listen was notified
54038 Sep 22 23:24:07.036 DEBG up_ds_listen process 1275
54039 Sep 22 23:24:07.036 DEBG [A] ack job 1275:276, : downstairs
54040 Sep 22 23:24:07.089 DEBG up_ds_listen checked 1 jobs, back to waiting
54041 Sep 22 23:24:07.090 DEBG IO Read 1277 has deps [JobId(1276)]
54042 Sep 22 23:24:07.590 DEBG IO Flush 1278 has deps [JobId(1277), JobId(1276)]
54043 Sep 22 23:24:07.662 DEBG Read :1277 deps:[JobId(1276)] res:true
54044 Sep 22 23:24:07.685 DEBG Flush :1278 extent_limit None deps:[JobId(1277), JobId(1276)] res:true f:116 g:1
54045 Sep 22 23:24:07.685 INFO [lossy] sleeping 1 second
54046 Sep 22 23:24:08.063 DEBG [2] Read AckReady 1277, : downstairs
54047 Sep 22 23:24:08.064 DEBG up_ds_listen was notified
54048 Sep 22 23:24:08.064 DEBG up_ds_listen process 1277
54049 Sep 22 23:24:08.064 DEBG [A] ack job 1277:278, : downstairs
54050 Sep 22 23:24:08.117 DEBG up_ds_listen checked 1 jobs, back to waiting
54051 Sep 22 23:24:08.119 DEBG IO Read 1279 has deps [JobId(1278)]
54052 Sep 22 23:24:08.619 DEBG IO Flush 1280 has deps [JobId(1279), JobId(1278)]
54053 Sep 22 23:24:08.687 INFO [lossy] skipping 1279
54054 Sep 22 23:24:08.693 DEBG Read :1279 deps:[JobId(1278)] res:true
54055 Sep 22 23:24:08.716 INFO [lossy] sleeping 1 second
54056 Sep 22 23:24:09.092 DEBG [2] Read AckReady 1279, : downstairs
54057 Sep 22 23:24:09.093 DEBG up_ds_listen was notified
54058 Sep 22 23:24:09.093 DEBG up_ds_listen process 1279
54059 Sep 22 23:24:09.093 DEBG [A] ack job 1279:280, : downstairs
54060 Sep 22 23:24:09.146 DEBG up_ds_listen checked 1 jobs, back to waiting
54061 Sep 22 23:24:09.147 DEBG IO Read 1281 has deps [JobId(1280)]
54062 Sep 22 23:24:09.648 DEBG IO Flush 1282 has deps [JobId(1281), JobId(1280)]
54063 Sep 22 23:24:09.718 INFO [lossy] skipping 1280
54064 Sep 22 23:24:09.718 DEBG Flush :1280 extent_limit None deps:[JobId(1279), JobId(1278)] res:true f:117 g:1
54065 Sep 22 23:24:09.718 WARN returning error on read!
54066 Sep 22 23:24:09.718 DEBG Read :1281 deps:[JobId(1280)] res:false
54067 Sep 22 23:24:09.718 WARN 1282 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54068 Sep 22 23:24:09.724 DEBG Read :1281 deps:[JobId(1280)] res:true
54069 Sep 22 23:24:09.746 ERRO [2] job id 1281 saw error GenericError("test error")
54070 Sep 22 23:24:09.747 INFO [lossy] sleeping 1 second
54071 Sep 22 23:24:10.124 DEBG [2] Read AckReady 1281, : downstairs
54072 Sep 22 23:24:10.125 DEBG up_ds_listen was notified
54073 Sep 22 23:24:10.126 DEBG up_ds_listen process 1281
54074 Sep 22 23:24:10.126 DEBG [A] ack job 1281:282, : downstairs
54075 Sep 22 23:24:10.178 DEBG up_ds_listen checked 1 jobs, back to waiting
54076 Sep 22 23:24:10.180 DEBG IO Read 1283 has deps [JobId(1282)]
54077 Sep 22 23:24:10.680 DEBG IO Flush 1284 has deps [JobId(1283), JobId(1282)]
54078 Sep 22 23:24:10.749 DEBG Flush :1282 extent_limit None deps:[JobId(1281), JobId(1280)] res:true f:118 g:1
54079 Sep 22 23:24:10.755 DEBG Read :1283 deps:[JobId(1282)] res:true
54080 Sep 22 23:24:10.778 DEBG Flush :1284 extent_limit None deps:[JobId(1283), JobId(1282)] res:true f:119 g:1
54081 Sep 22 23:24:10.778 INFO [lossy] sleeping 1 second
54082 Sep 22 23:24:11.155 DEBG [2] Read AckReady 1283, : downstairs
54083 Sep 22 23:24:11.156 DEBG up_ds_listen was notified
54084 Sep 22 23:24:11.156 DEBG up_ds_listen process 1283
54085 Sep 22 23:24:11.156 DEBG [A] ack job 1283:284, : downstairs
54086 Sep 22 23:24:11.209 DEBG up_ds_listen checked 1 jobs, back to waiting
54087 Sep 22 23:24:11.211 DEBG IO Read 1285 has deps [JobId(1284)]
54088 Sep 22 23:24:11.711 DEBG IO Flush 1286 has deps [JobId(1285), JobId(1284)]
54089 Sep 22 23:24:11.779 WARN returning error on read!
54090 Sep 22 23:24:11.779 DEBG Read :1285 deps:[JobId(1284)] res:false
54091 Sep 22 23:24:11.779 INFO [lossy] skipping 1286
54092 Sep 22 23:24:11.785 DEBG Read :1285 deps:[JobId(1284)] res:true
54093 Sep 22 23:24:11.807 ERRO [2] job id 1285 saw error GenericError("test error")
54094 Sep 22 23:24:11.808 DEBG Flush :1286 extent_limit None deps:[JobId(1285), JobId(1284)] res:true f:120 g:1
54095 Sep 22 23:24:12.186 DEBG [2] Read AckReady 1285, : downstairs
54096 Sep 22 23:24:12.187 DEBG up_ds_listen was notified
54097 Sep 22 23:24:12.187 DEBG up_ds_listen process 1285
54098 Sep 22 23:24:12.187 DEBG [A] ack job 1285:286, : downstairs
54099 Sep 22 23:24:12.240 DEBG up_ds_listen checked 1 jobs, back to waiting
54100 Sep 22 23:24:12.241 DEBG IO Read 1287 has deps [JobId(1286)]
54101 Sep 22 23:24:12.245 WARN returning error on read!
54102 Sep 22 23:24:12.245 DEBG Read :1287 deps:[JobId(1286)] res:false
54103 Sep 22 23:24:12.251 DEBG Read :1287 deps:[JobId(1286)] res:true
54104 Sep 22 23:24:12.273 ERRO [2] job id 1287 saw error GenericError("test error")
54105 Sep 22 23:24:12.650 DEBG [2] Read AckReady 1287, : downstairs
54106 Sep 22 23:24:12.651 DEBG up_ds_listen was notified
54107 Sep 22 23:24:12.651 DEBG up_ds_listen process 1287
54108 Sep 22 23:24:12.651 DEBG [A] ack job 1287:288, : downstairs
54109 Sep 22 23:24:12.704 DEBG up_ds_listen checked 1 jobs, back to waiting
54110 Sep 22 23:24:12.705 DEBG IO Read 1288 has deps [JobId(1286)]
54111 Sep 22 23:24:12.709 INFO [lossy] sleeping 1 second
54112 Sep 22 23:24:12.742 DEBG IO Flush 1289 has deps [JobId(1288), JobId(1287), JobId(1286)]
54113 Sep 22 23:24:13.710 INFO [lossy] skipping 1288
54114 Sep 22 23:24:13.711 INFO [lossy] skipping 1288
54115 Sep 22 23:24:13.717 DEBG Read :1288 deps:[JobId(1286)] res:true
54116 Sep 22 23:24:13.740 INFO [lossy] sleeping 1 second
54117 Sep 22 23:24:14.116 DEBG [2] Read AckReady 1288, : downstairs
54118 Sep 22 23:24:14.117 DEBG up_ds_listen was notified
54119 Sep 22 23:24:14.117 DEBG up_ds_listen process 1288
54120 Sep 22 23:24:14.117 DEBG [A] ack job 1288:289, : downstairs
54121 Sep 22 23:24:14.170 DEBG up_ds_listen checked 1 jobs, back to waiting
54122 Sep 22 23:24:14.171 DEBG IO Read 1290 has deps [JobId(1289)]
54123 Sep 22 23:24:14.673 DEBG IO Flush 1291 has deps [JobId(1290), JobId(1289)]
54124 Sep 22 23:24:14.741 DEBG Flush :1289 extent_limit None deps:[JobId(1288), JobId(1287), JobId(1286)] res:true f:121 g:1
54125 Sep 22 23:24:14.748 DEBG Read :1290 deps:[JobId(1289)] res:true
54126 Sep 22 23:24:14.771 WARN returning error on flush!
54127 Sep 22 23:24:14.771 DEBG Flush :1291 extent_limit None deps:[JobId(1290), JobId(1289)] res:false f:122 g:1
54128 Sep 22 23:24:14.771 INFO [lossy] skipping 1291
54129 Sep 22 23:24:14.771 WARN returning error on flush!
54130 Sep 22 23:24:14.771 DEBG Flush :1291 extent_limit None deps:[JobId(1290), JobId(1289)] res:false f:122 g:1
54131 Sep 22 23:24:14.771 DEBG Flush :1291 extent_limit None deps:[JobId(1290), JobId(1289)] res:true f:122 g:1
54132 Sep 22 23:24:15.148 DEBG [2] Read AckReady 1290, : downstairs
54133 Sep 22 23:24:15.149 ERRO [2] job id 1291 saw error GenericError("test error")
54134 Sep 22 23:24:15.149 ERRO [2] job id 1291 saw error GenericError("test error")
54135 Sep 22 23:24:15.149 DEBG up_ds_listen was notified
54136 Sep 22 23:24:15.149 DEBG up_ds_listen process 1290
54137 Sep 22 23:24:15.149 DEBG [A] ack job 1290:291, : downstairs
54138 Sep 22 23:24:15.202 DEBG up_ds_listen checked 1 jobs, back to waiting
54139 Sep 22 23:24:15.203 DEBG IO Read 1292 has deps [JobId(1291)]
54140 Sep 22 23:24:15.213 DEBG Read :1292 deps:[JobId(1291)] res:true
54141 Sep 22 23:24:15.612 DEBG [2] Read AckReady 1292, : downstairs
54142 Sep 22 23:24:15.613 DEBG up_ds_listen was notified
54143 Sep 22 23:24:15.613 DEBG up_ds_listen process 1292
54144 Sep 22 23:24:15.613 DEBG [A] ack job 1292:293, : downstairs
54145 Sep 22 23:24:15.665 DEBG up_ds_listen checked 1 jobs, back to waiting
54146 Sep 22 23:24:15.666 DEBG IO Read 1293 has deps [JobId(1291)]
54147 Sep 22 23:24:15.671 INFO [lossy] sleeping 1 second
54148 Sep 22 23:24:15.704 DEBG IO Flush 1294 has deps [JobId(1293), JobId(1292), JobId(1291)]
54149 Sep 22 23:24:16.678 DEBG Read :1293 deps:[JobId(1291)] res:true
54150 Sep 22 23:24:16.701 DEBG Flush :1294 extent_limit None deps:[JobId(1293), JobId(1292), JobId(1291)] res:true f:123 g:1
54151 Sep 22 23:24:16.701 INFO [lossy] sleeping 1 second
54152 Sep 22 23:24:17.078 DEBG [2] Read AckReady 1293, : downstairs
54153 Sep 22 23:24:17.079 DEBG up_ds_listen was notified
54154 Sep 22 23:24:17.079 DEBG up_ds_listen process 1293
54155 Sep 22 23:24:17.079 DEBG [A] ack job 1293:294, : downstairs
54156 Sep 22 23:24:17.131 DEBG up_ds_listen checked 1 jobs, back to waiting
54157 Sep 22 23:24:17.133 DEBG IO Read 1295 has deps [JobId(1294)]
54158 Sep 22 23:24:17.633 DEBG IO Flush 1296 has deps [JobId(1295), JobId(1294)]
54159 Sep 22 23:24:17.709 DEBG Read :1295 deps:[JobId(1294)] res:true
54160 Sep 22 23:24:17.731 WARN returning error on flush!
54161 Sep 22 23:24:17.731 DEBG Flush :1296 extent_limit None deps:[JobId(1295), JobId(1294)] res:false f:124 g:1
54162 Sep 22 23:24:17.731 INFO [lossy] skipping 1296
54163 Sep 22 23:24:17.731 DEBG Flush :1296 extent_limit None deps:[JobId(1295), JobId(1294)] res:true f:124 g:1
54164 Sep 22 23:24:18.107 DEBG [2] Read AckReady 1295, : downstairs
54165 Sep 22 23:24:18.108 ERRO [2] job id 1296 saw error GenericError("test error")
54166 Sep 22 23:24:18.108 DEBG up_ds_listen was notified
54167 Sep 22 23:24:18.108 DEBG up_ds_listen process 1295
54168 Sep 22 23:24:18.109 DEBG [A] ack job 1295:296, : downstairs
54169 Sep 22 23:24:18.161 DEBG up_ds_listen checked 1 jobs, back to waiting
54170 Sep 22 23:24:18.162 DEBG IO Read 1297 has deps [JobId(1296)]
54171 Sep 22 23:24:18.166 INFO [lossy] sleeping 1 second
54172 Sep 22 23:24:18.664 DEBG IO Flush 1298 has deps [JobId(1297), JobId(1296)]
54173 Sep 22 23:24:19.174 DEBG Read :1297 deps:[JobId(1296)] res:true
54174 Sep 22 23:24:19.197 DEBG Flush :1298 extent_limit None deps:[JobId(1297), JobId(1296)] res:true f:125 g:1
54175 Sep 22 23:24:19.574 DEBG [2] Read AckReady 1297, : downstairs
54176 Sep 22 23:24:19.575 DEBG up_ds_listen was notified
54177 Sep 22 23:24:19.575 DEBG up_ds_listen process 1297
54178 Sep 22 23:24:19.575 DEBG [A] ack job 1297:298, : downstairs
54179 Sep 22 23:24:19.628 DEBG up_ds_listen checked 1 jobs, back to waiting
54180 Sep 22 23:24:19.630 DEBG IO Read 1299 has deps [JobId(1298)]
54181 Sep 22 23:24:19.640 DEBG Read :1299 deps:[JobId(1298)] res:true
54182 Sep 22 23:24:20.039 DEBG [2] Read AckReady 1299, : downstairs
54183 Sep 22 23:24:20.040 DEBG up_ds_listen was notified
54184 Sep 22 23:24:20.040 DEBG up_ds_listen process 1299
54185 Sep 22 23:24:20.040 DEBG [A] ack job 1299:300, : downstairs
54186 Sep 22 23:24:20.093 DEBG up_ds_listen checked 1 jobs, back to waiting
54187 Sep 22 23:24:20.094 DEBG IO Flush 1300 has deps [JobId(1299), JobId(1298)]
54188 Sep 22 23:24:20.094 DEBG IO Read 1301 has deps [JobId(1300)]
54189 Sep 22 23:24:20.096 INFO [lossy] skipping 1300
54190 Sep 22 23:24:20.096 INFO [lossy] skipping 1300
54191 Sep 22 23:24:20.097 DEBG Flush :1300 extent_limit None deps:[JobId(1299), JobId(1298)] res:true f:126 g:1
54192 Sep 22 23:24:20.105 DEBG Read :1301 deps:[JobId(1300)] res:true
54193 Sep 22 23:24:20.503 DEBG [2] Read AckReady 1301, : downstairs
54194 Sep 22 23:24:20.504 DEBG up_ds_listen was notified
54195 Sep 22 23:24:20.504 DEBG up_ds_listen process 1301
54196 Sep 22 23:24:20.504 DEBG [A] ack job 1301:302, : downstairs
54197 Sep 22 23:24:20.557 DEBG up_ds_listen checked 1 jobs, back to waiting
54198 Sep 22 23:24:20.558 DEBG IO Read 1302 has deps [JobId(1300)]
54199 Sep 22 23:24:20.568 DEBG Read :1302 deps:[JobId(1300)] res:true
54200 Sep 22 23:24:20.967 DEBG [2] Read AckReady 1302, : downstairs
54201 Sep 22 23:24:20.968 DEBG up_ds_listen was notified
54202 Sep 22 23:24:20.968 DEBG up_ds_listen process 1302
54203 Sep 22 23:24:20.968 DEBG [A] ack job 1302:303, : downstairs
54204 Sep 22 23:24:21.021 DEBG up_ds_listen checked 1 jobs, back to waiting
54205 Sep 22 23:24:21.022 DEBG IO Flush 1303 has deps [JobId(1302), JobId(1301), JobId(1300)]
54206 Sep 22 23:24:21.022 DEBG IO Read 1304 has deps [JobId(1303)]
54207 Sep 22 23:24:21.024 INFO [lossy] skipping 1303
54208 Sep 22 23:24:21.024 DEBG Flush :1303 extent_limit None deps:[JobId(1302), JobId(1301), JobId(1300)] res:true f:127 g:1
54209 Sep 22 23:24:21.027 WARN returning error on read!
54210 Sep 22 23:24:21.027 DEBG Read :1304 deps:[JobId(1303)] res:false
54211 Sep 22 23:24:21.033 DEBG Read :1304 deps:[JobId(1303)] res:true
54212 Sep 22 23:24:21.054 ERRO [2] job id 1304 saw error GenericError("test error")
54213 Sep 22 23:24:21.433 DEBG [2] Read AckReady 1304, : downstairs
54214 Sep 22 23:24:21.434 DEBG up_ds_listen was notified
54215 Sep 22 23:24:21.434 DEBG up_ds_listen process 1304
54216 Sep 22 23:24:21.435 DEBG [A] ack job 1304:305, : downstairs
54217 Sep 22 23:24:21.487 DEBG up_ds_listen checked 1 jobs, back to waiting
54218 Sep 22 23:24:21.488 DEBG IO Read 1305 has deps [JobId(1303)]
54219 Sep 22 23:24:21.498 DEBG Read :1305 deps:[JobId(1303)] res:true
54220 Sep 22 23:24:21.897 DEBG [2] Read AckReady 1305, : downstairs
54221 Sep 22 23:24:21.898 DEBG up_ds_listen was notified
54222 Sep 22 23:24:21.898 DEBG up_ds_listen process 1305
54223 Sep 22 23:24:21.898 DEBG [A] ack job 1305:306, : downstairs
54224 Sep 22 23:24:21.951 DEBG up_ds_listen checked 1 jobs, back to waiting
54225 Sep 22 23:24:21.952 DEBG IO Flush 1306 has deps [JobId(1305), JobId(1304), JobId(1303)]
54226 Sep 22 23:24:21.952 DEBG IO Read 1307 has deps [JobId(1306)]
54227 Sep 22 23:24:21.954 INFO [lossy] sleeping 1 second
54228 Sep 22 23:24:22.453 DEBG IO Flush 1308 has deps [JobId(1307), JobId(1306)]
54229 Sep 22 23:24:22.955 WARN returning error on flush!
54230 Sep 22 23:24:22.955 DEBG Flush :1306 extent_limit None deps:[JobId(1305), JobId(1304), JobId(1303)] res:false f:128 g:1
54231 Sep 22 23:24:22.955 WARN returning error on flush!
54232 Sep 22 23:24:22.955 DEBG Flush :1306 extent_limit None deps:[JobId(1305), JobId(1304), JobId(1303)] res:false f:128 g:1
54233 Sep 22 23:24:22.955 DEBG Flush :1306 extent_limit None deps:[JobId(1305), JobId(1304), JobId(1303)] res:true f:128 g:1
54234 Sep 22 23:24:22.955 INFO [lossy] skipping 1307
54235 Sep 22 23:24:22.955 WARN 1308 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54236 Sep 22 23:24:22.961 DEBG Read :1307 deps:[JobId(1306)] res:true
54237 Sep 22 23:24:22.983 ERRO [2] job id 1306 saw error GenericError("test error")
54238 Sep 22 23:24:22.983 ERRO [2] job id 1306 saw error GenericError("test error")
54239 Sep 22 23:24:22.985 WARN returning error on flush!
54240 Sep 22 23:24:22.985 DEBG Flush :1308 extent_limit None deps:[JobId(1307), JobId(1306)] res:false f:129 g:1
54241 Sep 22 23:24:22.985 DEBG Flush :1308 extent_limit None deps:[JobId(1307), JobId(1306)] res:true f:129 g:1
54242 Sep 22 23:24:23.362 DEBG [2] Read AckReady 1307, : downstairs
54243 Sep 22 23:24:23.363 ERRO [2] job id 1308 saw error GenericError("test error")
54244 Sep 22 23:24:23.363 DEBG up_ds_listen was notified
54245 Sep 22 23:24:23.363 DEBG up_ds_listen process 1307
54246 Sep 22 23:24:23.363 DEBG [A] ack job 1307:308, : downstairs
54247 Sep 22 23:24:23.416 DEBG up_ds_listen checked 1 jobs, back to waiting
54248 Sep 22 23:24:23.417 DEBG IO Read 1309 has deps [JobId(1308)]
54249 Sep 22 23:24:23.421 WARN returning error on read!
54250 Sep 22 23:24:23.421 DEBG Read :1309 deps:[JobId(1308)] res:false
54251 Sep 22 23:24:23.421 WARN returning error on read!
54252 Sep 22 23:24:23.421 DEBG Read :1309 deps:[JobId(1308)] res:false
54253 Sep 22 23:24:23.421 INFO [lossy] skipping 1309
54254 Sep 22 23:24:23.427 DEBG Read :1309 deps:[JobId(1308)] res:true
54255 Sep 22 23:24:23.448 ERRO [2] job id 1309 saw error GenericError("test error")
54256 Sep 22 23:24:23.449 ERRO [2] job id 1309 saw error GenericError("test error")
54257 Sep 22 23:24:23.825 DEBG [2] Read AckReady 1309, : downstairs
54258 Sep 22 23:24:23.827 DEBG up_ds_listen was notified
54259 Sep 22 23:24:23.827 DEBG up_ds_listen process 1309
54260 Sep 22 23:24:23.827 DEBG [A] ack job 1309:310, : downstairs
54261 Sep 22 23:24:23.879 DEBG up_ds_listen checked 1 jobs, back to waiting
54262 Sep 22 23:24:23.880 DEBG IO Flush 1310 has deps [JobId(1309), JobId(1308)]
54263 Sep 22 23:24:23.881 DEBG IO Read 1311 has deps [JobId(1310)]
54264 Sep 22 23:24:23.883 INFO [lossy] sleeping 1 second
54265 Sep 22 23:24:24.381 DEBG IO Flush 1312 has deps [JobId(1311), JobId(1310)]
54266 Sep 22 23:24:24.884 DEBG Flush :1310 extent_limit None deps:[JobId(1309), JobId(1308)] res:true f:130 g:1
54267 Sep 22 23:24:24.891 DEBG Read :1311 deps:[JobId(1310)] res:true
54268 Sep 22 23:24:24.914 DEBG Flush :1312 extent_limit None deps:[JobId(1311), JobId(1310)] res:true f:131 g:1
54269 Sep 22 23:24:25.291 DEBG [2] Read AckReady 1311, : downstairs
54270 Sep 22 23:24:25.292 DEBG up_ds_listen was notified
54271 Sep 22 23:24:25.292 DEBG up_ds_listen process 1311
54272 Sep 22 23:24:25.292 DEBG [A] ack job 1311:312, : downstairs
54273 Sep 22 23:24:25.345 DEBG up_ds_listen checked 1 jobs, back to waiting
54274 Sep 22 23:24:25.347 DEBG IO Read 1313 has deps [JobId(1312)]
54275 Sep 22 23:24:25.351 WARN returning error on read!
54276 Sep 22 23:24:25.351 DEBG Read :1313 deps:[JobId(1312)] res:false
54277 Sep 22 23:24:25.357 DEBG Read :1313 deps:[JobId(1312)] res:true
54278 Sep 22 23:24:25.379 ERRO [2] job id 1313 saw error GenericError("test error")
54279 Sep 22 23:24:25.759 DEBG [2] Read AckReady 1313, : downstairs
54280 Sep 22 23:24:25.760 DEBG up_ds_listen was notified
54281 Sep 22 23:24:25.760 DEBG up_ds_listen process 1313
54282 Sep 22 23:24:25.760 DEBG [A] ack job 1313:314, : downstairs
54283 Sep 22 23:24:25.812 DEBG up_ds_listen checked 1 jobs, back to waiting
54284 Sep 22 23:24:25.813 DEBG IO Flush 1314 has deps [JobId(1313), JobId(1312)]
54285 Sep 22 23:24:25.814 DEBG IO Read 1315 has deps [JobId(1314)]
54286 Sep 22 23:24:25.816 DEBG Flush :1314 extent_limit None deps:[JobId(1313), JobId(1312)] res:true f:132 g:1
54287 Sep 22 23:24:25.818 INFO [lossy] sleeping 1 second
54288 Sep 22 23:24:26.315 DEBG IO Flush 1316 has deps [JobId(1315), JobId(1314)]
54289 Sep 22 23:24:26.825 DEBG Read :1315 deps:[JobId(1314)] res:true
54290 Sep 22 23:24:26.848 WARN returning error on flush!
54291 Sep 22 23:24:26.848 DEBG Flush :1316 extent_limit None deps:[JobId(1315), JobId(1314)] res:false f:133 g:1
54292 Sep 22 23:24:26.848 INFO [lossy] skipping 1316
54293 Sep 22 23:24:26.848 DEBG Flush :1316 extent_limit None deps:[JobId(1315), JobId(1314)] res:true f:133 g:1
54294 Sep 22 23:24:27.225 DEBG [2] Read AckReady 1315, : downstairs
54295 Sep 22 23:24:27.226 ERRO [2] job id 1316 saw error GenericError("test error")
54296 Sep 22 23:24:27.226 DEBG up_ds_listen was notified
54297 Sep 22 23:24:27.226 DEBG up_ds_listen process 1315
54298 Sep 22 23:24:27.226 DEBG [A] ack job 1315:316, : downstairs
54299 Sep 22 23:24:27.279 DEBG up_ds_listen checked 1 jobs, back to waiting
54300 Sep 22 23:24:27.280 DEBG IO Read 1317 has deps [JobId(1316)]
54301 Sep 22 23:24:27.290 DEBG Read :1317 deps:[JobId(1316)] res:true
54302 Sep 22 23:24:27.689 DEBG [2] Read AckReady 1317, : downstairs
54303 Sep 22 23:24:27.691 DEBG up_ds_listen was notified
54304 Sep 22 23:24:27.691 DEBG up_ds_listen process 1317
54305 Sep 22 23:24:27.691 DEBG [A] ack job 1317:318, : downstairs
54306 Sep 22 23:24:27.743 DEBG up_ds_listen checked 1 jobs, back to waiting
54307 Sep 22 23:24:27.744 DEBG IO Flush 1318 has deps [JobId(1317), JobId(1316)]
54308 Sep 22 23:24:27.745 DEBG IO Read 1319 has deps [JobId(1318)]
54309 Sep 22 23:24:27.747 WARN returning error on flush!
54310 Sep 22 23:24:27.747 DEBG Flush :1318 extent_limit None deps:[JobId(1317), JobId(1316)] res:false f:134 g:1
54311 Sep 22 23:24:27.747 WARN returning error on flush!
54312 Sep 22 23:24:27.747 DEBG Flush :1318 extent_limit None deps:[JobId(1317), JobId(1316)] res:false f:134 g:1
54313 Sep 22 23:24:27.747 DEBG Flush :1318 extent_limit None deps:[JobId(1317), JobId(1316)] res:true f:134 g:1
54314 Sep 22 23:24:27.749 ERRO [2] job id 1318 saw error GenericError("test error")
54315 Sep 22 23:24:27.749 ERRO [2] job id 1318 saw error GenericError("test error")
54316 Sep 22 23:24:27.749 WARN returning error on read!
54317 Sep 22 23:24:27.749 DEBG Read :1319 deps:[JobId(1318)] res:false
54318 Sep 22 23:24:27.750 WARN returning error on read!
54319 Sep 22 23:24:27.750 DEBG Read :1319 deps:[JobId(1318)] res:false
54320 Sep 22 23:24:27.755 DEBG Read :1319 deps:[JobId(1318)] res:true
54321 Sep 22 23:24:27.777 ERRO [2] job id 1319 saw error GenericError("test error")
54322 Sep 22 23:24:27.777 ERRO [2] job id 1319 saw error GenericError("test error")
54323 Sep 22 23:24:28.155 DEBG [2] Read AckReady 1319, : downstairs
54324 Sep 22 23:24:28.156 DEBG up_ds_listen was notified
54325 Sep 22 23:24:28.156 DEBG up_ds_listen process 1319
54326 Sep 22 23:24:28.156 DEBG [A] ack job 1319:320, : downstairs
54327 Sep 22 23:24:28.209 DEBG up_ds_listen checked 1 jobs, back to waiting
54328 Sep 22 23:24:28.210 DEBG IO Read 1320 has deps [JobId(1318)]
54329 Sep 22 23:24:28.214 WARN returning error on read!
54330 Sep 22 23:24:28.214 DEBG Read :1320 deps:[JobId(1318)] res:false
54331 Sep 22 23:24:28.220 DEBG Read :1320 deps:[JobId(1318)] res:true
54332 Sep 22 23:24:28.241 ERRO [2] job id 1320 saw error GenericError("test error")
54333 Sep 22 23:24:28.619 DEBG [2] Read AckReady 1320, : downstairs
54334 Sep 22 23:24:28.620 DEBG up_ds_listen was notified
54335 Sep 22 23:24:28.620 DEBG up_ds_listen process 1320
54336 Sep 22 23:24:28.620 DEBG [A] ack job 1320:321, : downstairs
54337 Sep 22 23:24:28.673 DEBG up_ds_listen checked 1 jobs, back to waiting
54338 Sep 22 23:24:28.674 DEBG IO Flush 1321 has deps [JobId(1320), JobId(1319), JobId(1318)]
54339 Sep 22 23:24:28.674 DEBG IO Read 1322 has deps [JobId(1321)]
54340 Sep 22 23:24:28.676 INFO [lossy] skipping 1321
54341 Sep 22 23:24:28.676 DEBG Flush :1321 extent_limit None deps:[JobId(1320), JobId(1319), JobId(1318)] res:true f:135 g:1
54342 Sep 22 23:24:28.679 WARN returning error on read!
54343 Sep 22 23:24:28.679 DEBG Read :1322 deps:[JobId(1321)] res:false
54344 Sep 22 23:24:28.685 DEBG Read :1322 deps:[JobId(1321)] res:true
54345 Sep 22 23:24:28.706 ERRO [2] job id 1322 saw error GenericError("test error")
54346 Sep 22 23:24:29.084 DEBG [2] Read AckReady 1322, : downstairs
54347 Sep 22 23:24:29.085 DEBG up_ds_listen was notified
54348 Sep 22 23:24:29.085 DEBG up_ds_listen process 1322
54349 Sep 22 23:24:29.085 DEBG [A] ack job 1322:323, : downstairs
54350 Sep 22 23:24:29.138 DEBG up_ds_listen checked 1 jobs, back to waiting
54351 Sep 22 23:24:31.253 DEBG IO Write 1323 has deps [JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1235), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1163), JobId(1162)]
54352 Sep 22 23:24:31.253 DEBG IO Flush 1324 has deps [JobId(1323), JobId(1322), JobId(1321)]
54353 Sep 22 23:24:31.253 DEBG up_ds_listen was notified
54354 Sep 22 23:24:31.253 DEBG up_ds_listen process 1323
54355 Sep 22 23:24:31.254 DEBG [A] ack job 1323:324, : downstairs
54356 Sep 22 23:24:31.254 DEBG up_ds_listen checked 1 jobs, back to waiting
54357 Sep 22 23:24:31.604 DEBG IO Write 1325 has deps [JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1237), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1165), JobId(1164), JobId(1162)]
54358 Sep 22 23:24:31.604 DEBG up_ds_listen was notified
54359 Sep 22 23:24:31.604 DEBG up_ds_listen process 1325
54360 Sep 22 23:24:31.605 DEBG [A] ack job 1325:326, : downstairs
54361 Sep 22 23:24:31.605 DEBG up_ds_listen checked 1 jobs, back to waiting
54362 Sep 22 23:24:31.605 INFO [lossy] sleeping 1 second
54363 Sep 22 23:24:31.935 DEBG IO Write 1326 has deps [JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1239), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1166), JobId(1164), JobId(1162)]
54364 Sep 22 23:24:31.935 DEBG up_ds_listen was notified
54365 Sep 22 23:24:31.935 DEBG up_ds_listen process 1326
54366 Sep 22 23:24:31.935 DEBG [A] ack job 1326:327, : downstairs
54367 Sep 22 23:24:31.935 DEBG up_ds_listen checked 1 jobs, back to waiting
54368 Sep 22 23:24:31.935 DEBG IO Flush 1327 has deps [JobId(1326), JobId(1325), JobId(1324)]
54369 Sep 22 23:24:32.265 DEBG IO Write 1328 has deps [JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1241), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1168), JobId(1167), JobId(1164), JobId(1162)]
54370 Sep 22 23:24:32.265 DEBG up_ds_listen was notified
54371 Sep 22 23:24:32.265 DEBG up_ds_listen process 1328
54372 Sep 22 23:24:32.265 DEBG [A] ack job 1328:329, : downstairs
54373 Sep 22 23:24:32.265 DEBG up_ds_listen checked 1 jobs, back to waiting
54374 Sep 22 23:24:32.595 DEBG IO Write 1329 has deps [JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1243), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1169), JobId(1167), JobId(1164), JobId(1162)]
54375 Sep 22 23:24:32.595 DEBG up_ds_listen was notified
54376 Sep 22 23:24:32.595 DEBG up_ds_listen process 1329
54377 Sep 22 23:24:32.595 DEBG [A] ack job 1329:330, : downstairs
54378 Sep 22 23:24:32.595 DEBG up_ds_listen checked 1 jobs, back to waiting
54379 Sep 22 23:24:32.596 DEBG IO Flush 1330 has deps [JobId(1329), JobId(1328), JobId(1327)]
54380 Sep 22 23:24:32.925 DEBG IO Write 1331 has deps [JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1245), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1171), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54381 Sep 22 23:24:32.925 DEBG up_ds_listen was notified
54382 Sep 22 23:24:32.926 DEBG up_ds_listen process 1331
54383 Sep 22 23:24:32.926 DEBG [A] ack job 1331:332, : downstairs
54384 Sep 22 23:24:32.926 DEBG up_ds_listen checked 1 jobs, back to waiting
54385 Sep 22 23:24:33.255 DEBG IO Write 1332 has deps [JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1247), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1172), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54386 Sep 22 23:24:33.255 DEBG up_ds_listen was notified
54387 Sep 22 23:24:33.255 DEBG up_ds_listen process 1332
54388 Sep 22 23:24:33.256 DEBG [A] ack job 1332:333, : downstairs
54389 Sep 22 23:24:33.256 DEBG up_ds_listen checked 1 jobs, back to waiting
54390 Sep 22 23:24:33.256 DEBG IO Flush 1333 has deps [JobId(1332), JobId(1331), JobId(1330)]
54391 Sep 22 23:24:33.586 DEBG IO Write 1334 has deps [JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1249), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1174), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54392 Sep 22 23:24:33.586 DEBG up_ds_listen was notified
54393 Sep 22 23:24:33.586 DEBG up_ds_listen process 1334
54394 Sep 22 23:24:33.586 DEBG [A] ack job 1334:335, : downstairs
54395 Sep 22 23:24:33.586 DEBG up_ds_listen checked 1 jobs, back to waiting
54396 Sep 22 23:24:33.916 DEBG IO Write 1335 has deps [JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1251), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1175), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54397 Sep 22 23:24:33.916 DEBG up_ds_listen was notified
54398 Sep 22 23:24:33.916 DEBG up_ds_listen process 1335
54399 Sep 22 23:24:33.916 DEBG [A] ack job 1335:336, : downstairs
54400 Sep 22 23:24:33.916 DEBG up_ds_listen checked 1 jobs, back to waiting
54401 Sep 22 23:24:33.917 DEBG IO Flush 1336 has deps [JobId(1335), JobId(1334), JobId(1333)]
54402 Sep 22 23:24:34.267 DEBG IO Write 1337 has deps [JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1253), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1177), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54403 Sep 22 23:24:34.267 DEBG up_ds_listen was notified
54404 Sep 22 23:24:34.267 DEBG up_ds_listen process 1337
54405 Sep 22 23:24:34.267 DEBG [A] ack job 1337:338, : downstairs
54406 Sep 22 23:24:34.267 DEBG up_ds_listen checked 1 jobs, back to waiting
54407 Sep 22 23:24:34.314 INFO [lossy] skipping 1323
54408 Sep 22 23:24:34.314 INFO [lossy] skipping 1324
54409 Sep 22 23:24:34.315 WARN returning error on write!
54410 Sep 22 23:24:34.315 DEBG Write :1323 deps:[JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1235), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1163), JobId(1162)] res:false
54411 Sep 22 23:24:34.316 WARN returning error on write!
54412 Sep 22 23:24:34.316 DEBG Write :1323 deps:[JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1235), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1163), JobId(1162)] res:false
54413 Sep 22 23:24:34.317 WARN returning error on write!
54414 Sep 22 23:24:34.317 DEBG Write :1323 deps:[JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1235), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1163), JobId(1162)] res:false
54415 Sep 22 23:24:34.347 DEBG Write :1323 deps:[JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1235), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1163), JobId(1162)] res:true
54416 Sep 22 23:24:34.352 DEBG Flush :1324 extent_limit None deps:[JobId(1323), JobId(1322), JobId(1321)] res:true f:136 g:1
54417 Sep 22 23:24:34.681 DEBG IO Write 1338 has deps [JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1255), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1179), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54418 Sep 22 23:24:34.681 DEBG up_ds_listen was notified
54419 Sep 22 23:24:34.681 DEBG up_ds_listen process 1338
54420 Sep 22 23:24:34.681 DEBG [A] ack job 1338:339, : downstairs
54421 Sep 22 23:24:34.681 DEBG up_ds_listen checked 1 jobs, back to waiting
54422 Sep 22 23:24:34.682 DEBG IO Flush 1339 has deps [JobId(1338), JobId(1337), JobId(1336)]
54423 Sep 22 23:24:35.011 DEBG IO Write 1340 has deps [JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1257), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1180), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54424 Sep 22 23:24:35.011 DEBG up_ds_listen was notified
54425 Sep 22 23:24:35.011 DEBG up_ds_listen process 1340
54426 Sep 22 23:24:35.011 DEBG [A] ack job 1340:341, : downstairs
54427 Sep 22 23:24:35.011 DEBG up_ds_listen checked 1 jobs, back to waiting
54428 Sep 22 23:24:35.341 DEBG IO Write 1341 has deps [JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1259), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1182), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54429 Sep 22 23:24:35.341 DEBG up_ds_listen was notified
54430 Sep 22 23:24:35.341 DEBG up_ds_listen process 1341
54431 Sep 22 23:24:35.341 DEBG [A] ack job 1341:342, : downstairs
54432 Sep 22 23:24:35.341 DEBG up_ds_listen checked 1 jobs, back to waiting
54433 Sep 22 23:24:35.342 DEBG IO Flush 1342 has deps [JobId(1341), JobId(1340), JobId(1339)]
54434 Sep 22 23:24:35.671 DEBG IO Write 1343 has deps [JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1261), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1183), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54435 Sep 22 23:24:35.671 DEBG up_ds_listen was notified
54436 Sep 22 23:24:35.671 DEBG up_ds_listen process 1343
54437 Sep 22 23:24:35.672 DEBG [A] ack job 1343:344, : downstairs
54438 Sep 22 23:24:35.672 DEBG up_ds_listen checked 1 jobs, back to waiting
54439 Sep 22 23:24:36.002 DEBG IO Write 1344 has deps [JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1263), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1185), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54440 Sep 22 23:24:36.002 DEBG up_ds_listen was notified
54441 Sep 22 23:24:36.002 DEBG up_ds_listen process 1344
54442 Sep 22 23:24:36.002 DEBG [A] ack job 1344:345, : downstairs
54443 Sep 22 23:24:36.002 DEBG up_ds_listen checked 1 jobs, back to waiting
54444 Sep 22 23:24:36.003 DEBG IO Flush 1345 has deps [JobId(1344), JobId(1343), JobId(1342)]
54445 Sep 22 23:24:36.332 DEBG IO Write 1346 has deps [JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1265), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1186), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54446 Sep 22 23:24:36.332 DEBG up_ds_listen was notified
54447 Sep 22 23:24:36.332 DEBG up_ds_listen process 1346
54448 Sep 22 23:24:36.332 DEBG [A] ack job 1346:347, : downstairs
54449 Sep 22 23:24:36.332 DEBG up_ds_listen checked 1 jobs, back to waiting
54450 Sep 22 23:24:36.662 DEBG IO Write 1347 has deps [JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1267), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1188), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54451 Sep 22 23:24:36.662 DEBG up_ds_listen was notified
54452 Sep 22 23:24:36.662 DEBG up_ds_listen process 1347
54453 Sep 22 23:24:36.662 DEBG [A] ack job 1347:348, : downstairs
54454 Sep 22 23:24:36.662 DEBG up_ds_listen checked 1 jobs, back to waiting
54455 Sep 22 23:24:36.662 DEBG IO Flush 1348 has deps [JobId(1347), JobId(1346), JobId(1345)]
54456 Sep 22 23:24:36.992 DEBG IO Write 1349 has deps [JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1269), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1189), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54457 Sep 22 23:24:36.992 DEBG up_ds_listen was notified
54458 Sep 22 23:24:36.992 DEBG up_ds_listen process 1349
54459 Sep 22 23:24:36.992 DEBG [A] ack job 1349:350, : downstairs
54460 Sep 22 23:24:36.992 DEBG up_ds_listen checked 1 jobs, back to waiting
54461 Sep 22 23:24:37.343 DEBG IO Write 1350 has deps [JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1271), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1191), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54462 Sep 22 23:24:37.343 DEBG up_ds_listen was notified
54463 Sep 22 23:24:37.343 DEBG up_ds_listen process 1350
54464 Sep 22 23:24:37.343 DEBG [A] ack job 1350:351, : downstairs
54465 Sep 22 23:24:37.343 DEBG up_ds_listen checked 1 jobs, back to waiting
54466 Sep 22 23:24:37.390 DEBG IO Flush 1351 has deps [JobId(1350), JobId(1349), JobId(1348)]
54467 Sep 22 23:24:37.390 INFO [lossy] skipping 1325
54468 Sep 22 23:24:37.390 INFO [lossy] skipping 1325
54469 Sep 22 23:24:37.420 DEBG Write :1325 deps:[JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1237), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1165), JobId(1164), JobId(1162)] res:true
54470 Sep 22 23:24:37.750 DEBG IO Write 1352 has deps [JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1273), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1193), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54471 Sep 22 23:24:37.750 DEBG up_ds_listen was notified
54472 Sep 22 23:24:37.750 DEBG up_ds_listen process 1352
54473 Sep 22 23:24:37.750 DEBG [A] ack job 1352:353, : downstairs
54474 Sep 22 23:24:37.750 DEBG up_ds_listen checked 1 jobs, back to waiting
54475 Sep 22 23:24:38.079 DEBG IO Write 1353 has deps [JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1275), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1194), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54476 Sep 22 23:24:38.080 DEBG up_ds_listen was notified
54477 Sep 22 23:24:38.080 DEBG up_ds_listen process 1353
54478 Sep 22 23:24:38.080 DEBG [A] ack job 1353:354, : downstairs
54479 Sep 22 23:24:38.080 DEBG up_ds_listen checked 1 jobs, back to waiting
54480 Sep 22 23:24:38.080 DEBG IO Flush 1354 has deps [JobId(1353), JobId(1352), JobId(1351)]
54481 Sep 22 23:24:38.409 DEBG IO Write 1355 has deps [JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1277), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1196), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54482 Sep 22 23:24:38.410 DEBG up_ds_listen was notified
54483 Sep 22 23:24:38.410 DEBG up_ds_listen process 1355
54484 Sep 22 23:24:38.410 DEBG [A] ack job 1355:356, : downstairs
54485 Sep 22 23:24:38.410 DEBG up_ds_listen checked 1 jobs, back to waiting
54486 Sep 22 23:24:38.740 DEBG IO Write 1356 has deps [JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1279), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1197), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54487 Sep 22 23:24:38.740 DEBG up_ds_listen was notified
54488 Sep 22 23:24:38.740 DEBG up_ds_listen process 1356
54489 Sep 22 23:24:38.740 DEBG [A] ack job 1356:357, : downstairs
54490 Sep 22 23:24:38.740 DEBG up_ds_listen checked 1 jobs, back to waiting
54491 Sep 22 23:24:38.741 DEBG IO Flush 1357 has deps [JobId(1356), JobId(1355), JobId(1354)]
54492 Sep 22 23:24:39.070 DEBG IO Write 1358 has deps [JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1281), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1199), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54493 Sep 22 23:24:39.070 DEBG up_ds_listen was notified
54494 Sep 22 23:24:39.070 DEBG up_ds_listen process 1358
54495 Sep 22 23:24:39.070 DEBG [A] ack job 1358:359, : downstairs
54496 Sep 22 23:24:39.070 DEBG up_ds_listen checked 1 jobs, back to waiting
54497 Sep 22 23:24:39.399 DEBG IO Write 1359 has deps [JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1283), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1200), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54498 Sep 22 23:24:39.400 DEBG up_ds_listen was notified
54499 Sep 22 23:24:39.400 DEBG up_ds_listen process 1359
54500 Sep 22 23:24:39.400 DEBG [A] ack job 1359:360, : downstairs
54501 Sep 22 23:24:39.400 DEBG up_ds_listen checked 1 jobs, back to waiting
54502 Sep 22 23:24:39.400 DEBG IO Flush 1360 has deps [JobId(1359), JobId(1358), JobId(1357)]
54503 Sep 22 23:24:39.729 DEBG IO Write 1361 has deps [JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1285), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1202), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54504 Sep 22 23:24:39.730 DEBG up_ds_listen was notified
54505 Sep 22 23:24:39.730 DEBG up_ds_listen process 1361
54506 Sep 22 23:24:39.730 DEBG [A] ack job 1361:362, : downstairs
54507 Sep 22 23:24:39.730 DEBG up_ds_listen checked 1 jobs, back to waiting
54508 Sep 22 23:24:40.060 DEBG IO Write 1362 has deps [JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1287), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1203), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54509 Sep 22 23:24:40.060 DEBG up_ds_listen was notified
54510 Sep 22 23:24:40.060 DEBG up_ds_listen process 1362
54511 Sep 22 23:24:40.060 DEBG [A] ack job 1362:363, : downstairs
54512 Sep 22 23:24:40.060 DEBG up_ds_listen checked 1 jobs, back to waiting
54513 Sep 22 23:24:40.061 DEBG IO Flush 1363 has deps [JobId(1362), JobId(1361), JobId(1360)]
54514 Sep 22 23:24:40.412 DEBG IO Write 1364 has deps [JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1288), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1205), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54515 Sep 22 23:24:40.413 DEBG up_ds_listen was notified
54516 Sep 22 23:24:40.413 DEBG up_ds_listen process 1364
54517 Sep 22 23:24:40.413 DEBG [A] ack job 1364:365, : downstairs
54518 Sep 22 23:24:40.413 DEBG up_ds_listen checked 1 jobs, back to waiting
54519 Sep 22 23:24:40.490 DEBG Write :1326 deps:[JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1239), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1166), JobId(1164), JobId(1162)] res:true
54520 Sep 22 23:24:40.491 INFO [lossy] skipping 1327
54521 Sep 22 23:24:40.491 INFO [lossy] skipping 1327
54522 Sep 22 23:24:40.491 WARN returning error on flush!
54523 Sep 22 23:24:40.491 DEBG Flush :1327 extent_limit None deps:[JobId(1326), JobId(1325), JobId(1324)] res:false f:137 g:1
54524 Sep 22 23:24:40.500 DEBG Flush :1327 extent_limit None deps:[JobId(1326), JobId(1325), JobId(1324)] res:true f:137 g:1
54525 Sep 22 23:24:40.829 DEBG IO Write 1365 has deps [JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1290), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1207), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54526 Sep 22 23:24:40.829 DEBG up_ds_listen was notified
54527 Sep 22 23:24:40.829 DEBG up_ds_listen process 1365
54528 Sep 22 23:24:40.829 DEBG [A] ack job 1365:366, : downstairs
54529 Sep 22 23:24:40.830 DEBG up_ds_listen checked 1 jobs, back to waiting
54530 Sep 22 23:24:40.830 DEBG IO Flush 1366 has deps [JobId(1365), JobId(1364), JobId(1363)]
54531 Sep 22 23:24:41.159 DEBG IO Write 1367 has deps [JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1292), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1208), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54532 Sep 22 23:24:41.160 DEBG up_ds_listen was notified
54533 Sep 22 23:24:41.160 DEBG up_ds_listen process 1367
54534 Sep 22 23:24:41.160 DEBG [A] ack job 1367:368, : downstairs
54535 Sep 22 23:24:41.160 DEBG up_ds_listen checked 1 jobs, back to waiting
54536 Sep 22 23:24:41.490 DEBG IO Write 1368 has deps [JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1293), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1210), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54537 Sep 22 23:24:41.490 DEBG up_ds_listen was notified
54538 Sep 22 23:24:41.490 DEBG up_ds_listen process 1368
54539 Sep 22 23:24:41.490 DEBG [A] ack job 1368:369, : downstairs
54540 Sep 22 23:24:41.490 DEBG up_ds_listen checked 1 jobs, back to waiting
54541 Sep 22 23:24:41.491 DEBG IO Flush 1369 has deps [JobId(1368), JobId(1367), JobId(1366)]
54542 Sep 22 23:24:41.820 DEBG IO Write 1370 has deps [JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1295), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1211), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54543 Sep 22 23:24:41.820 DEBG up_ds_listen was notified
54544 Sep 22 23:24:41.820 DEBG up_ds_listen process 1370
54545 Sep 22 23:24:41.820 DEBG [A] ack job 1370:371, : downstairs
54546 Sep 22 23:24:41.820 DEBG up_ds_listen checked 1 jobs, back to waiting
54547 Sep 22 23:24:42.150 DEBG IO Write 1371 has deps [JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1297), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1213), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54548 Sep 22 23:24:42.150 DEBG up_ds_listen was notified
54549 Sep 22 23:24:42.150 DEBG up_ds_listen process 1371
54550 Sep 22 23:24:42.151 DEBG [A] ack job 1371:372, : downstairs
54551 Sep 22 23:24:42.151 DEBG up_ds_listen checked 1 jobs, back to waiting
54552 Sep 22 23:24:42.151 DEBG IO Flush 1372 has deps [JobId(1371), JobId(1370), JobId(1369)]
54553 Sep 22 23:24:42.481 DEBG IO Write 1373 has deps [JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1299), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1214), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54554 Sep 22 23:24:42.481 DEBG up_ds_listen was notified
54555 Sep 22 23:24:42.481 DEBG up_ds_listen process 1373
54556 Sep 22 23:24:42.481 DEBG [A] ack job 1373:374, : downstairs
54557 Sep 22 23:24:42.481 DEBG up_ds_listen checked 1 jobs, back to waiting
54558 Sep 22 23:24:42.811 DEBG IO Write 1374 has deps [JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1301), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1216), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54559 Sep 22 23:24:42.811 DEBG up_ds_listen was notified
54560 Sep 22 23:24:42.811 DEBG up_ds_listen process 1374
54561 Sep 22 23:24:42.811 DEBG [A] ack job 1374:375, : downstairs
54562 Sep 22 23:24:42.812 DEBG up_ds_listen checked 1 jobs, back to waiting
54563 Sep 22 23:24:42.812 DEBG IO Flush 1375 has deps [JobId(1374), JobId(1373), JobId(1372)]
54564 Sep 22 23:24:43.141 DEBG IO Write 1376 has deps [JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1302), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1217), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54565 Sep 22 23:24:43.142 DEBG up_ds_listen was notified
54566 Sep 22 23:24:43.142 DEBG up_ds_listen process 1376
54567 Sep 22 23:24:43.142 DEBG [A] ack job 1376:377, : downstairs
54568 Sep 22 23:24:43.142 DEBG up_ds_listen checked 1 jobs, back to waiting
54569 Sep 22 23:24:43.472 DEBG IO Write 1377 has deps [JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1304), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1219), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54570 Sep 22 23:24:43.472 DEBG up_ds_listen was notified
54571 Sep 22 23:24:43.472 DEBG up_ds_listen process 1377
54572 Sep 22 23:24:43.472 DEBG [A] ack job 1377:378, : downstairs
54573 Sep 22 23:24:43.472 DEBG up_ds_listen checked 1 jobs, back to waiting
54574 Sep 22 23:24:43.473 DEBG IO Flush 1378 has deps [JobId(1377), JobId(1376), JobId(1375)]
54575 Sep 22 23:24:43.824 DEBG IO Write 1379 has deps [JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1305), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1220), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54576 Sep 22 23:24:43.824 DEBG up_ds_listen was notified
54577 Sep 22 23:24:43.824 DEBG up_ds_listen process 1379
54578 Sep 22 23:24:43.824 DEBG [A] ack job 1379:380, : downstairs
54579 Sep 22 23:24:43.824 DEBG up_ds_listen checked 1 jobs, back to waiting
54580 Sep 22 23:24:43.901 DEBG Write :1328 deps:[JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1241), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1168), JobId(1167), JobId(1164), JobId(1162)] res:true
54581 Sep 22 23:24:44.231 DEBG IO Write 1380 has deps [JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1307), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1222), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54582 Sep 22 23:24:44.232 DEBG up_ds_listen was notified
54583 Sep 22 23:24:44.232 DEBG up_ds_listen process 1380
54584 Sep 22 23:24:44.232 DEBG [A] ack job 1380:381, : downstairs
54585 Sep 22 23:24:44.232 DEBG up_ds_listen checked 1 jobs, back to waiting
54586 Sep 22 23:24:44.232 DEBG IO Flush 1381 has deps [JobId(1380), JobId(1379), JobId(1378)]
54587 Sep 22 23:24:44.561 DEBG IO Write 1382 has deps [JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1309), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1223), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54588 Sep 22 23:24:44.562 DEBG up_ds_listen was notified
54589 Sep 22 23:24:44.562 DEBG up_ds_listen process 1382
54590 Sep 22 23:24:44.562 DEBG [A] ack job 1382:383, : downstairs
54591 Sep 22 23:24:44.562 DEBG up_ds_listen checked 1 jobs, back to waiting
54592 Sep 22 23:24:44.892 DEBG IO Write 1383 has deps [JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1311), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1225), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54593 Sep 22 23:24:44.892 DEBG up_ds_listen was notified
54594 Sep 22 23:24:44.892 DEBG up_ds_listen process 1383
54595 Sep 22 23:24:44.892 DEBG [A] ack job 1383:384, : downstairs
54596 Sep 22 23:24:44.892 DEBG up_ds_listen checked 1 jobs, back to waiting
54597 Sep 22 23:24:44.893 DEBG IO Flush 1384 has deps [JobId(1383), JobId(1382), JobId(1381)]
54598 Sep 22 23:24:45.223 DEBG IO Write 1385 has deps [JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1313), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1226), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54599 Sep 22 23:24:45.223 DEBG up_ds_listen was notified
54600 Sep 22 23:24:45.223 DEBG up_ds_listen process 1385
54601 Sep 22 23:24:45.223 DEBG [A] ack job 1385:386, : downstairs
54602 Sep 22 23:24:45.223 DEBG up_ds_listen checked 1 jobs, back to waiting
54603 Sep 22 23:24:45.553 DEBG IO Write 1386 has deps [JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1315), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1228), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54604 Sep 22 23:24:45.553 DEBG up_ds_listen was notified
54605 Sep 22 23:24:45.553 DEBG up_ds_listen process 1386
54606 Sep 22 23:24:45.553 DEBG [A] ack job 1386:387, : downstairs
54607 Sep 22 23:24:45.553 DEBG up_ds_listen checked 1 jobs, back to waiting
54608 Sep 22 23:24:45.554 DEBG IO Flush 1387 has deps [JobId(1386), JobId(1385), JobId(1384)]
54609 Sep 22 23:24:45.883 DEBG IO Write 1388 has deps [JobId(1387), JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1317), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1229), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54610 Sep 22 23:24:45.883 DEBG up_ds_listen was notified
54611 Sep 22 23:24:45.883 DEBG up_ds_listen process 1388
54612 Sep 22 23:24:45.883 DEBG [A] ack job 1388:389, : downstairs
54613 Sep 22 23:24:45.884 DEBG up_ds_listen checked 1 jobs, back to waiting
54614 Sep 22 23:24:46.214 DEBG IO Write 1389 has deps [JobId(1387), JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1319), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1231), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54615 Sep 22 23:24:46.214 DEBG up_ds_listen was notified
54616 Sep 22 23:24:46.214 DEBG up_ds_listen process 1389
54617 Sep 22 23:24:46.214 DEBG [A] ack job 1389:390, : downstairs
54618 Sep 22 23:24:46.214 DEBG up_ds_listen checked 1 jobs, back to waiting
54619 Sep 22 23:24:46.215 DEBG IO Flush 1390 has deps [JobId(1389), JobId(1388), JobId(1387)]
54620 Sep 22 23:24:46.544 DEBG IO Write 1391 has deps [JobId(1390), JobId(1387), JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1320), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1232), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)]
54621 Sep 22 23:24:46.545 DEBG up_ds_listen was notified
54622 Sep 22 23:24:46.545 DEBG up_ds_listen process 1391
54623 Sep 22 23:24:46.545 DEBG [A] ack job 1391:392, : downstairs
54624 Sep 22 23:24:46.545 DEBG up_ds_listen checked 1 jobs, back to waiting
54625 Sep 22 23:24:46.895 DEBG IO Write 1392 has deps [JobId(1390), JobId(1387), JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1322), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1234), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162), JobId(1161)]
54626 Sep 22 23:24:46.896 DEBG up_ds_listen was notified
54627 Sep 22 23:24:46.896 DEBG up_ds_listen process 1392
54628 Sep 22 23:24:46.896 DEBG [A] ack job 1392:393, : downstairs
54629 Sep 22 23:24:46.896 DEBG up_ds_listen checked 1 jobs, back to waiting
54630 Sep 22 23:24:46.943 DEBG IO Flush 1393 has deps [JobId(1392), JobId(1391), JobId(1390)]
54631 Sep 22 23:24:46.943 INFO [lossy] sleeping 1 second
54632 Sep 22 23:24:46.944 DEBG IO Read 1394 has deps [JobId(1393)]
54633 Sep 22 23:24:47.172 ERRO [2] job id 1323 saw error GenericError("test error")
54634 Sep 22 23:24:47.172 ERRO [2] job id 1323 saw error GenericError("test error")
54635 Sep 22 23:24:47.172 ERRO [2] job id 1323 saw error GenericError("test error")
54636 Sep 22 23:24:47.172 ERRO [2] job id 1327 saw error GenericError("test error")
54637 Sep 22 23:24:47.494 DEBG IO Flush 1395 has deps [JobId(1394), JobId(1393)]
54638 Sep 22 23:24:48.005 DEBG Write :1329 deps:[JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1243), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1169), JobId(1167), JobId(1164), JobId(1162)] res:true
54639 Sep 22 23:24:48.015 DEBG Flush :1330 extent_limit None deps:[JobId(1329), JobId(1328), JobId(1327)] res:true f:138 g:1
54640 Sep 22 23:24:48.015 INFO [lossy] skipping 1331
54641 Sep 22 23:24:48.044 DEBG Write :1332 deps:[JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1247), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1172), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54642 Sep 22 23:24:48.045 INFO [lossy] skipping 1335
54643 Sep 22 23:24:48.045 INFO [lossy] skipping 1346
54644 Sep 22 23:24:48.045 INFO [lossy] skipping 1331
54645 Sep 22 23:24:48.074 DEBG Write :1331 deps:[JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1245), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1171), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54646 Sep 22 23:24:48.083 DEBG Flush :1333 extent_limit None deps:[JobId(1332), JobId(1331), JobId(1330)] res:true f:139 g:1
54647 Sep 22 23:24:48.083 INFO [lossy] skipping 1334
54648 Sep 22 23:24:48.084 WARN returning error on write!
54649 Sep 22 23:24:48.084 DEBG Write :1335 deps:[JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1251), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1175), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54650 Sep 22 23:24:48.084 INFO [lossy] skipping 1336
54651 Sep 22 23:24:48.084 INFO [lossy] skipping 1337
54652 Sep 22 23:24:48.084 WARN 1338 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54653 Sep 22 23:24:48.084 INFO [lossy] skipping 1340
54654 Sep 22 23:24:48.084 WARN 1341 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54655 Sep 22 23:24:48.084 INFO [lossy] skipping 1343
54656 Sep 22 23:24:48.084 WARN 1344 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54657 Sep 22 23:24:48.084 WARN 1346 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54658 Sep 22 23:24:48.084 INFO [lossy] skipping 1347
54659 Sep 22 23:24:48.084 WARN 1349 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
54660 Sep 22 23:24:48.084 WARN 1350 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
54661 Sep 22 23:24:48.084 WARN 1352 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
54662 Sep 22 23:24:48.086 WARN returning error on write!
54663 Sep 22 23:24:48.086 DEBG Write :1334 deps:[JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1249), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1174), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54664 Sep 22 23:24:48.087 WARN returning error on write!
54665 Sep 22 23:24:48.087 DEBG Write :1335 deps:[JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1251), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1175), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54666 Sep 22 23:24:48.087 WARN 1336 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54667 Sep 22 23:24:48.087 WARN 1337 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54668 Sep 22 23:24:48.087 INFO [lossy] skipping 1340
54669 Sep 22 23:24:48.087 WARN 1343 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54670 Sep 22 23:24:48.087 WARN 1347 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54671 Sep 22 23:24:48.088 WARN returning error on write!
54672 Sep 22 23:24:48.088 DEBG Write :1334 deps:[JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1249), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1174), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54673 Sep 22 23:24:48.117 DEBG Write :1335 deps:[JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1251), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1175), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54674 Sep 22 23:24:48.119 WARN 1340 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54675 Sep 22 23:24:48.148 DEBG Write :1334 deps:[JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1249), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1174), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54676 Sep 22 23:24:48.157 DEBG Flush :1336 extent_limit None deps:[JobId(1335), JobId(1334), JobId(1333)] res:true f:140 g:1
54677 Sep 22 23:24:48.186 DEBG Write :1337 deps:[JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1253), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1177), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54678 Sep 22 23:24:48.217 DEBG Write :1338 deps:[JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1255), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1179), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54679 Sep 22 23:24:48.226 DEBG Flush :1339 extent_limit None deps:[JobId(1338), JobId(1337), JobId(1336)] res:true f:141 g:1
54680 Sep 22 23:24:48.255 DEBG Write :1340 deps:[JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1257), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1180), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54681 Sep 22 23:24:48.257 WARN returning error on write!
54682 Sep 22 23:24:48.258 DEBG Write :1341 deps:[JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1259), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1182), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54683 Sep 22 23:24:48.258 WARN 1342 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54684 Sep 22 23:24:48.258 WARN 1343 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54685 Sep 22 23:24:48.258 WARN 1344 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54686 Sep 22 23:24:48.258 WARN 1346 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54687 Sep 22 23:24:48.258 WARN 1347 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54688 Sep 22 23:24:48.258 INFO [lossy] skipping 1348
54689 Sep 22 23:24:48.258 WARN 1349 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54690 Sep 22 23:24:48.258 WARN 1350 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54691 Sep 22 23:24:48.258 INFO [lossy] skipping 1352
54692 Sep 22 23:24:48.258 INFO [lossy] skipping 1341
54693 Sep 22 23:24:48.258 WARN 1352 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54694 Sep 22 23:24:48.287 DEBG Write :1341 deps:[JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1259), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1182), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54695 Sep 22 23:24:48.296 DEBG Flush :1342 extent_limit None deps:[JobId(1341), JobId(1340), JobId(1339)] res:true f:142 g:1
54696 Sep 22 23:24:48.326 DEBG Write :1343 deps:[JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1261), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1183), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54697 Sep 22 23:24:48.327 INFO [lossy] skipping 1344
54698 Sep 22 23:24:48.327 WARN 1345 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54699 Sep 22 23:24:48.327 WARN 1346 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54700 Sep 22 23:24:48.327 INFO [lossy] skipping 1347
54701 Sep 22 23:24:48.327 INFO [lossy] skipping 1349
54702 Sep 22 23:24:48.327 WARN 1350 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54703 Sep 22 23:24:48.327 INFO [lossy] skipping 1351
54704 Sep 22 23:24:48.327 WARN 1352 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54705 Sep 22 23:24:48.328 WARN returning error on write!
54706 Sep 22 23:24:48.328 DEBG Write :1344 deps:[JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1263), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1185), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54707 Sep 22 23:24:48.328 INFO [lossy] skipping 1347
54708 Sep 22 23:24:48.328 WARN 1349 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54709 Sep 22 23:24:48.329 WARN returning error on write!
54710 Sep 22 23:24:48.329 DEBG Write :1344 deps:[JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1263), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1185), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54711 Sep 22 23:24:48.329 INFO [lossy] skipping 1347
54712 Sep 22 23:24:48.330 WARN returning error on write!
54713 Sep 22 23:24:48.330 DEBG Write :1344 deps:[JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1263), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1185), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54714 Sep 22 23:24:48.330 WARN 1347 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54715 Sep 22 23:24:48.330 INFO [lossy] skipping 1344
54716 Sep 22 23:24:48.331 WARN returning error on write!
54717 Sep 22 23:24:48.331 DEBG Write :1344 deps:[JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1263), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1185), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54718 Sep 22 23:24:48.331 INFO [lossy] skipping 1344
54719 Sep 22 23:24:48.331 INFO [lossy] skipping 1344
54720 Sep 22 23:24:48.332 WARN returning error on write!
54721 Sep 22 23:24:48.332 DEBG Write :1344 deps:[JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1263), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1185), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54722 Sep 22 23:24:48.361 DEBG Write :1344 deps:[JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1263), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1185), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54723 Sep 22 23:24:48.362 INFO [lossy] skipping 1345
54724 Sep 22 23:24:48.362 INFO [lossy] skipping 1348
54725 Sep 22 23:24:48.362 INFO [lossy] skipping 1352
54726 Sep 22 23:24:48.362 WARN returning error on flush!
54727 Sep 22 23:24:48.362 DEBG Flush :1345 extent_limit None deps:[JobId(1344), JobId(1343), JobId(1342)] res:false f:143 g:1
54728 Sep 22 23:24:48.362 INFO [lossy] skipping 1352
54729 Sep 22 23:24:48.370 DEBG Flush :1345 extent_limit None deps:[JobId(1344), JobId(1343), JobId(1342)] res:true f:143 g:1
54730 Sep 22 23:24:48.370 INFO [lossy] skipping 1352
54731 Sep 22 23:24:48.370 WARN 1352 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54732 Sep 22 23:24:48.370 INFO [lossy] skipping 1346
54733 Sep 22 23:24:48.400 DEBG Write :1347 deps:[JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1267), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1188), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54734 Sep 22 23:24:48.401 WARN 1348 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54735 Sep 22 23:24:48.401 WARN 1349 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54736 Sep 22 23:24:48.401 WARN 1350 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54737 Sep 22 23:24:48.401 INFO [lossy] skipping 1352
54738 Sep 22 23:24:48.402 WARN returning error on write!
54739 Sep 22 23:24:48.402 DEBG Write :1346 deps:[JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1265), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1186), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54740 Sep 22 23:24:48.431 DEBG Write :1346 deps:[JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1265), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1186), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54741 Sep 22 23:24:48.441 DEBG Flush :1348 extent_limit None deps:[JobId(1347), JobId(1346), JobId(1345)] res:true f:144 g:1
54742 Sep 22 23:24:48.441 INFO [lossy] skipping 1349
54743 Sep 22 23:24:48.471 DEBG Write :1350 deps:[JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1271), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1191), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54744 Sep 22 23:24:48.472 INFO [lossy] skipping 1351
54745 Sep 22 23:24:48.472 WARN 1352 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54746 Sep 22 23:24:48.502 DEBG Write :1349 deps:[JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1269), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1189), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54747 Sep 22 23:24:48.512 DEBG Flush :1351 extent_limit None deps:[JobId(1350), JobId(1349), JobId(1348)] res:true f:145 g:1
54748 Sep 22 23:24:48.512 INFO [lossy] sleeping 1 second
54749 Sep 22 23:24:49.546 WARN returning error on write!
54750 Sep 22 23:24:49.546 DEBG Write :1352 deps:[JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1273), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1193), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54751 Sep 22 23:24:49.546 INFO [lossy] skipping 1353
54752 Sep 22 23:24:49.546 INFO [lossy] skipping 1354
54753 Sep 22 23:24:49.546 INFO [lossy] skipping 1356
54754 Sep 22 23:24:49.546 INFO [lossy] skipping 1360
54755 Sep 22 23:24:49.546 INFO [lossy] skipping 1364
54756 Sep 22 23:24:49.546 INFO [lossy] skipping 1366
54757 Sep 22 23:24:49.546 INFO [lossy] skipping 1368
54758 Sep 22 23:24:49.546 INFO [lossy] skipping 1374
54759 Sep 22 23:24:49.546 INFO [lossy] skipping 1375
54760 Sep 22 23:24:49.547 WARN returning error on write!
54761 Sep 22 23:24:49.547 DEBG Write :1352 deps:[JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1273), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1193), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54762 Sep 22 23:24:49.578 DEBG Write :1353 deps:[JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1275), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1194), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54763 Sep 22 23:24:49.579 INFO [lossy] skipping 1366
54764 Sep 22 23:24:49.579 INFO [lossy] skipping 1374
54765 Sep 22 23:24:49.579 WARN returning error on write!
54766 Sep 22 23:24:49.579 DEBG Write :1352 deps:[JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1273), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1193), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54767 Sep 22 23:24:49.609 DEBG Write :1352 deps:[JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1273), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1193), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54768 Sep 22 23:24:49.619 DEBG Flush :1354 extent_limit None deps:[JobId(1353), JobId(1352), JobId(1351)] res:true f:146 g:1
54769 Sep 22 23:24:49.619 INFO [lossy] skipping 1355
54770 Sep 22 23:24:49.619 INFO [lossy] skipping 1356
54771 Sep 22 23:24:49.619 INFO [lossy] skipping 1357
54772 Sep 22 23:24:49.619 WARN 1358 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54773 Sep 22 23:24:49.619 INFO [lossy] skipping 1359
54774 Sep 22 23:24:49.619 WARN 1361 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54775 Sep 22 23:24:49.619 WARN 1362 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54776 Sep 22 23:24:49.619 INFO [lossy] skipping 1364
54777 Sep 22 23:24:49.619 WARN 1365 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54778 Sep 22 23:24:49.619 WARN 1367 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54779 Sep 22 23:24:49.619 WARN 1368 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54780 Sep 22 23:24:49.619 WARN 1370 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
54781 Sep 22 23:24:49.619 WARN 1371 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
54782 Sep 22 23:24:49.619 INFO [lossy] skipping 1372
54783 Sep 22 23:24:49.619 WARN 1373 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
54784 Sep 22 23:24:49.619 INFO [lossy] skipping 1374
54785 Sep 22 23:24:49.619 INFO [lossy] skipping 1355
54786 Sep 22 23:24:49.619 INFO [lossy] skipping 1356
54787 Sep 22 23:24:49.619 WARN 1357 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54788 Sep 22 23:24:49.619 WARN 1359 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54789 Sep 22 23:24:49.619 WARN 1364 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54790 Sep 22 23:24:49.619 INFO [lossy] skipping 1372
54791 Sep 22 23:24:49.619 WARN 1374 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
54792 Sep 22 23:24:49.649 DEBG Write :1355 deps:[JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1277), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1196), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54793 Sep 22 23:24:49.680 DEBG Write :1356 deps:[JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1279), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1197), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54794 Sep 22 23:24:49.681 INFO [lossy] sleeping 1 second
54795 Sep 22 23:24:50.484 ERRO [2] job id 1335 saw error GenericError("test error")
54796 Sep 22 23:24:50.484 ERRO [2] job id 1334 saw error GenericError("test error")
54797 Sep 22 23:24:50.484 ERRO [2] job id 1335 saw error GenericError("test error")
54798 Sep 22 23:24:50.484 ERRO [2] job id 1334 saw error GenericError("test error")
54799 Sep 22 23:24:50.484 ERRO [2] job id 1341 saw error GenericError("test error")
54800 Sep 22 23:24:50.484 ERRO [2] job id 1344 saw error GenericError("test error")
54801 Sep 22 23:24:50.484 ERRO [2] job id 1344 saw error GenericError("test error")
54802 Sep 22 23:24:50.484 ERRO [2] job id 1344 saw error GenericError("test error")
54803 Sep 22 23:24:50.484 ERRO [2] job id 1344 saw error GenericError("test error")
54804 Sep 22 23:24:50.484 ERRO [2] job id 1344 saw error GenericError("test error")
54805 Sep 22 23:24:50.485 ERRO [2] job id 1345 saw error GenericError("test error")
54806 Sep 22 23:24:50.485 ERRO [2] job id 1346 saw error GenericError("test error")
54807 Sep 22 23:24:50.485 ERRO [2] job id 1352 saw error GenericError("test error")
54808 Sep 22 23:24:50.485 ERRO [2] job id 1352 saw error GenericError("test error")
54809 Sep 22 23:24:50.485 ERRO [2] job id 1352 saw error GenericError("test error")
54810 Sep 22 23:24:50.691 DEBG Flush :1357 extent_limit None deps:[JobId(1356), JobId(1355), JobId(1354)] res:true f:147 g:1
54811 Sep 22 23:24:50.721 DEBG Write :1358 deps:[JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1281), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1199), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54812 Sep 22 23:24:50.752 DEBG Write :1359 deps:[JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1283), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1200), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54813 Sep 22 23:24:50.761 DEBG Flush :1360 extent_limit None deps:[JobId(1359), JobId(1358), JobId(1357)] res:true f:148 g:1
54814 Sep 22 23:24:50.791 DEBG Write :1361 deps:[JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1285), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1202), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54815 Sep 22 23:24:50.793 WARN returning error on write!
54816 Sep 22 23:24:50.793 DEBG Write :1362 deps:[JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1287), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1203), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54817 Sep 22 23:24:50.793 INFO [lossy] skipping 1363
54818 Sep 22 23:24:50.793 WARN 1364 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54819 Sep 22 23:24:50.793 INFO [lossy] skipping 1365
54820 Sep 22 23:24:50.793 INFO [lossy] skipping 1366
54821 Sep 22 23:24:50.793 INFO [lossy] skipping 1367
54822 Sep 22 23:24:50.793 WARN 1368 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54823 Sep 22 23:24:50.793 WARN 1370 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54824 Sep 22 23:24:50.793 INFO [lossy] skipping 1371
54825 Sep 22 23:24:50.793 WARN 1373 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54826 Sep 22 23:24:50.794 WARN 1374 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54827 Sep 22 23:24:50.794 INFO [lossy] skipping 1375
54828 Sep 22 23:24:50.794 INFO [lossy] skipping 1376
54829 Sep 22 23:24:50.794 INFO [lossy] skipping 1377
54830 Sep 22 23:24:50.794 INFO [lossy] skipping 1380
54831 Sep 22 23:24:50.794 INFO [lossy] skipping 1383
54832 Sep 22 23:24:50.794 INFO [lossy] skipping 1392
54833 Sep 22 23:24:50.823 DEBG Write :1362 deps:[JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1287), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1203), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54834 Sep 22 23:24:50.832 DEBG Flush :1363 extent_limit None deps:[JobId(1362), JobId(1361), JobId(1360)] res:true f:149 g:1
54835 Sep 22 23:24:50.862 DEBG Write :1365 deps:[JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1290), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1207), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54836 Sep 22 23:24:50.863 WARN 1366 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54837 Sep 22 23:24:50.863 WARN 1367 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54838 Sep 22 23:24:50.863 WARN 1371 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54839 Sep 22 23:24:50.863 INFO [lossy] skipping 1375
54840 Sep 22 23:24:50.863 INFO [lossy] skipping 1376
54841 Sep 22 23:24:50.863 INFO [lossy] skipping 1377
54842 Sep 22 23:24:50.863 INFO [lossy] skipping 1380
54843 Sep 22 23:24:50.863 INFO [lossy] skipping 1375
54844 Sep 22 23:24:50.863 INFO [lossy] skipping 1376
54845 Sep 22 23:24:50.863 INFO [lossy] sleeping 1 second
54846 Sep 22 23:24:50.864 ERRO [2] job id 1362 saw error GenericError("test error")
54847 Sep 22 23:24:51.866 WARN returning error on write!
54848 Sep 22 23:24:51.866 DEBG Write :1364 deps:[JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1288), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1205), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54849 Sep 22 23:24:51.866 INFO [lossy] skipping 1366
54850 Sep 22 23:24:51.866 WARN 1368 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54851 Sep 22 23:24:51.866 INFO [lossy] skipping 1369
54852 Sep 22 23:24:51.866 WARN 1370 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54853 Sep 22 23:24:51.866 WARN 1373 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54854 Sep 22 23:24:51.866 WARN 1374 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54855 Sep 22 23:24:51.866 INFO [lossy] skipping 1376
54856 Sep 22 23:24:51.866 INFO [lossy] skipping 1377
54857 Sep 22 23:24:51.866 INFO [lossy] skipping 1378
54858 Sep 22 23:24:51.866 WARN 1379 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
54859 Sep 22 23:24:51.866 WARN 1382 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
54860 Sep 22 23:24:51.866 INFO [lossy] skipping 1385
54861 Sep 22 23:24:51.866 INFO [lossy] skipping 1386
54862 Sep 22 23:24:51.866 INFO [lossy] skipping 1388
54863 Sep 22 23:24:51.866 WARN 1389 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 8 deps, role: work
54864 Sep 22 23:24:51.866 WARN 1391 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 9 deps, role: work
54865 Sep 22 23:24:51.866 INFO [lossy] skipping 1394
54866 Sep 22 23:24:51.866 INFO [lossy] skipping 1395
54867 Sep 22 23:24:51.896 DEBG Write :1364 deps:[JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1288), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1205), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54868 Sep 22 23:24:51.905 DEBG Flush :1366 extent_limit None deps:[JobId(1365), JobId(1364), JobId(1363)] res:true f:150 g:1
54869 Sep 22 23:24:51.905 WARN 1369 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54870 Sep 22 23:24:51.905 WARN 1376 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54871 Sep 22 23:24:51.905 WARN 1377 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54872 Sep 22 23:24:51.905 INFO [lossy] skipping 1378
54873 Sep 22 23:24:51.905 WARN 1385 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
54874 Sep 22 23:24:51.905 WARN 1386 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
54875 Sep 22 23:24:51.906 WARN 1388 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
54876 Sep 22 23:24:51.935 DEBG Write :1367 deps:[JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1292), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1208), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54877 Sep 22 23:24:51.938 WARN returning error on write!
54878 Sep 22 23:24:51.938 DEBG Write :1368 deps:[JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1293), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1210), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54879 Sep 22 23:24:51.938 INFO [lossy] skipping 1369
54880 Sep 22 23:24:51.938 WARN 1370 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54881 Sep 22 23:24:51.938 WARN 1371 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54882 Sep 22 23:24:51.938 WARN 1373 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54883 Sep 22 23:24:51.938 WARN 1374 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54884 Sep 22 23:24:51.938 INFO [lossy] skipping 1376
54885 Sep 22 23:24:51.938 INFO [lossy] skipping 1379
54886 Sep 22 23:24:51.938 WARN 1380 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54887 Sep 22 23:24:51.938 WARN 1382 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
54888 Sep 22 23:24:51.938 WARN 1383 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
54889 Sep 22 23:24:51.938 INFO [lossy] skipping 1384
54890 Sep 22 23:24:51.938 INFO [lossy] skipping 1386
54891 Sep 22 23:24:51.938 INFO [lossy] skipping 1387
54892 Sep 22 23:24:51.938 WARN 1389 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 7 deps, role: work
54893 Sep 22 23:24:51.938 WARN 1391 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 8 deps, role: work
54894 Sep 22 23:24:51.938 WARN 1392 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 8 deps, role: work
54895 Sep 22 23:24:51.938 INFO [lossy] skipping 1368
54896 Sep 22 23:24:51.938 WARN 1369 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54897 Sep 22 23:24:51.938 WARN 1379 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54898 Sep 22 23:24:51.938 INFO [lossy] skipping 1387
54899 Sep 22 23:24:51.938 INFO [lossy] skipping 1368
54900 Sep 22 23:24:51.968 DEBG Write :1368 deps:[JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1293), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1210), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54901 Sep 22 23:24:51.969 INFO [lossy] sleeping 1 second
54902 Sep 22 23:24:51.969 ERRO [2] job id 1364 saw error GenericError("test error")
54903 Sep 22 23:24:51.969 ERRO [2] job id 1368 saw error GenericError("test error")
54904 Sep 22 23:24:52.979 DEBG Flush :1369 extent_limit None deps:[JobId(1368), JobId(1367), JobId(1366)] res:true f:151 g:1
54905 Sep 22 23:24:53.009 DEBG Write :1370 deps:[JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1295), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1211), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54906 Sep 22 23:24:53.041 DEBG Write :1371 deps:[JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1297), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1213), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54907 Sep 22 23:24:53.050 DEBG Flush :1372 extent_limit None deps:[JobId(1371), JobId(1370), JobId(1369)] res:true f:152 g:1
54908 Sep 22 23:24:53.050 INFO [lossy] skipping 1373
54909 Sep 22 23:24:53.051 WARN returning error on write!
54910 Sep 22 23:24:53.051 DEBG Write :1374 deps:[JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1301), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1216), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54911 Sep 22 23:24:53.052 INFO [lossy] skipping 1375
54912 Sep 22 23:24:53.052 WARN 1376 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54913 Sep 22 23:24:53.052 WARN 1377 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54914 Sep 22 23:24:53.052 WARN 1379 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54915 Sep 22 23:24:53.052 INFO [lossy] skipping 1380
54916 Sep 22 23:24:53.052 WARN 1382 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54917 Sep 22 23:24:53.052 WARN 1383 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54918 Sep 22 23:24:53.052 WARN 1385 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54919 Sep 22 23:24:53.052 INFO [lossy] skipping 1386
54920 Sep 22 23:24:53.052 INFO [lossy] skipping 1388
54921 Sep 22 23:24:53.052 WARN 1389 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
54922 Sep 22 23:24:53.052 INFO [lossy] skipping 1390
54923 Sep 22 23:24:53.052 WARN 1391 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
54924 Sep 22 23:24:53.052 WARN 1392 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 6 deps, role: work
54925 Sep 22 23:24:53.052 INFO [lossy] skipping 1393
54926 Sep 22 23:24:53.052 INFO [lossy] skipping 1373
54927 Sep 22 23:24:53.082 DEBG Write :1374 deps:[JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1301), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1216), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54928 Sep 22 23:24:53.083 WARN 1375 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54929 Sep 22 23:24:53.083 WARN 1380 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54930 Sep 22 23:24:53.083 INFO [lossy] skipping 1386
54931 Sep 22 23:24:53.083 WARN 1388 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
54932 Sep 22 23:24:53.113 DEBG Write :1373 deps:[JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1299), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1214), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54933 Sep 22 23:24:53.114 WARN 1386 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54934 Sep 22 23:24:53.114 INFO [lossy] sleeping 1 second
54935 Sep 22 23:24:53.115 ERRO [2] job id 1374 saw error GenericError("test error")
54936 Sep 22 23:24:54.115 WARN returning error on flush!
54937 Sep 22 23:24:54.115 DEBG Flush :1375 extent_limit None deps:[JobId(1374), JobId(1373), JobId(1372)] res:false f:153 g:1
54938 Sep 22 23:24:54.115 INFO [lossy] skipping 1381
54939 Sep 22 23:24:54.115 INFO [lossy] skipping 1384
54940 Sep 22 23:24:54.115 INFO [lossy] skipping 1385
54941 Sep 22 23:24:54.115 INFO [lossy] skipping 1386
54942 Sep 22 23:24:54.115 INFO [lossy] skipping 1388
54943 Sep 22 23:24:54.124 DEBG Flush :1375 extent_limit None deps:[JobId(1374), JobId(1373), JobId(1372)] res:true f:153 g:1
54944 Sep 22 23:24:54.124 WARN 1385 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54945 Sep 22 23:24:54.124 WARN 1386 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54946 Sep 22 23:24:54.124 WARN 1388 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54947 Sep 22 23:24:54.125 WARN returning error on write!
54948 Sep 22 23:24:54.125 DEBG Write :1376 deps:[JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1302), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1217), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54949 Sep 22 23:24:54.156 DEBG Write :1377 deps:[JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1304), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1219), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54950 Sep 22 23:24:54.157 INFO [lossy] skipping 1378
54951 Sep 22 23:24:54.157 WARN 1379 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54952 Sep 22 23:24:54.158 INFO [lossy] skipping 1380
54953 Sep 22 23:24:54.158 WARN 1382 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54954 Sep 22 23:24:54.158 WARN 1383 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54955 Sep 22 23:24:54.158 INFO [lossy] skipping 1386
54956 Sep 22 23:24:54.158 INFO [lossy] skipping 1387
54957 Sep 22 23:24:54.158 INFO [lossy] skipping 1389
54958 Sep 22 23:24:54.158 WARN 1391 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
54959 Sep 22 23:24:54.158 WARN 1392 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 5 deps, role: work
54960 Sep 22 23:24:54.158 WARN returning error on write!
54961 Sep 22 23:24:54.158 DEBG Write :1376 deps:[JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1302), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1217), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54962 Sep 22 23:24:54.159 WARN 1378 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54963 Sep 22 23:24:54.159 INFO [lossy] skipping 1380
54964 Sep 22 23:24:54.159 WARN 1389 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54965 Sep 22 23:24:54.188 DEBG Write :1376 deps:[JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1302), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1217), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54966 Sep 22 23:24:54.189 WARN 1380 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54967 Sep 22 23:24:54.197 DEBG Flush :1378 extent_limit None deps:[JobId(1377), JobId(1376), JobId(1375)] res:true f:154 g:1
54968 Sep 22 23:24:54.198 WARN returning error on write!
54969 Sep 22 23:24:54.198 DEBG Write :1379 deps:[JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1305), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1220), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54970 Sep 22 23:24:54.200 WARN returning error on write!
54971 Sep 22 23:24:54.200 DEBG Write :1380 deps:[JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1307), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1222), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
54972 Sep 22 23:24:54.200 WARN 1381 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54973 Sep 22 23:24:54.200 INFO [lossy] skipping 1382
54974 Sep 22 23:24:54.200 INFO [lossy] skipping 1383
54975 Sep 22 23:24:54.200 INFO [lossy] skipping 1384
54976 Sep 22 23:24:54.200 INFO [lossy] skipping 1385
54977 Sep 22 23:24:54.200 WARN 1386 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54978 Sep 22 23:24:54.200 WARN 1388 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54979 Sep 22 23:24:54.200 WARN 1389 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
54980 Sep 22 23:24:54.200 WARN 1391 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54981 Sep 22 23:24:54.200 WARN 1392 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 4 deps, role: work
54982 Sep 22 23:24:54.200 INFO [lossy] skipping 1394
54983 Sep 22 23:24:54.230 DEBG Write :1379 deps:[JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1305), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1220), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54984 Sep 22 23:24:54.261 DEBG Write :1380 deps:[JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1307), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1222), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54985 Sep 22 23:24:54.262 WARN 1382 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54986 Sep 22 23:24:54.262 WARN 1383 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54987 Sep 22 23:24:54.262 WARN 1385 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
54988 Sep 22 23:24:54.262 INFO [lossy] skipping 1381
54989 Sep 22 23:24:54.262 INFO [lossy] skipping 1383
54990 Sep 22 23:24:54.262 INFO [lossy] skipping 1386
54991 Sep 22 23:24:54.262 INFO [lossy] skipping 1387
54992 Sep 22 23:24:54.262 INFO [lossy] skipping 1394
54993 Sep 22 23:24:54.262 INFO [lossy] skipping 1395
54994 Sep 22 23:24:54.270 DEBG Flush :1381 extent_limit None deps:[JobId(1380), JobId(1379), JobId(1378)] res:true f:155 g:1
54995 Sep 22 23:24:54.300 DEBG Write :1383 deps:[JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1311), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1225), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54996 Sep 22 23:24:54.301 WARN 1386 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
54997 Sep 22 23:24:54.331 DEBG Write :1382 deps:[JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1309), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1223), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
54998 Sep 22 23:24:54.332 INFO [lossy] skipping 1384
54999 Sep 22 23:24:54.332 INFO [lossy] skipping 1385
55000 Sep 22 23:24:54.332 INFO [lossy] skipping 1387
55001 Sep 22 23:24:54.332 WARN 1388 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
55002 Sep 22 23:24:54.332 WARN 1389 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
55003 Sep 22 23:24:54.332 INFO [lossy] skipping 1391
55004 Sep 22 23:24:54.332 WARN 1392 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
55005 Sep 22 23:24:54.332 INFO [lossy] skipping 1384
55006 Sep 22 23:24:54.332 INFO [lossy] skipping 1385
55007 Sep 22 23:24:54.332 WARN 1391 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 3 deps, role: work
55008 Sep 22 23:24:54.332 WARN returning error on flush!
55009 Sep 22 23:24:54.332 DEBG Flush :1384 extent_limit None deps:[JobId(1383), JobId(1382), JobId(1381)] res:false f:156 g:1
55010 Sep 22 23:24:54.332 WARN 1385 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
55011 Sep 22 23:24:54.332 WARN returning error on flush!
55012 Sep 22 23:24:54.332 DEBG Flush :1384 extent_limit None deps:[JobId(1383), JobId(1382), JobId(1381)] res:false f:156 g:1
55013 Sep 22 23:24:54.340 DEBG Flush :1384 extent_limit None deps:[JobId(1383), JobId(1382), JobId(1381)] res:true f:156 g:1
55014 Sep 22 23:24:54.340 INFO [lossy] sleeping 1 second
55015 Sep 22 23:24:54.341 ERRO [2] job id 1375 saw error GenericError("test error")
55016 Sep 22 23:24:54.341 ERRO [2] job id 1376 saw error GenericError("test error")
55017 Sep 22 23:24:54.341 ERRO [2] job id 1376 saw error GenericError("test error")
55018 Sep 22 23:24:54.341 ERRO [2] job id 1379 saw error GenericError("test error")
55019 Sep 22 23:24:54.341 ERRO [2] job id 1380 saw error GenericError("test error")
55020 Sep 22 23:24:54.341 ERRO [2] job id 1384 saw error GenericError("test error")
55021 Sep 22 23:24:54.341 ERRO [2] job id 1384 saw error GenericError("test error")
55022 Sep 22 23:24:55.372 DEBG Write :1385 deps:[JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1313), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1227), JobId(1226), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
55023 Sep 22 23:24:55.403 DEBG Write :1386 deps:[JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1316), JobId(1315), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1228), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
55024 Sep 22 23:24:55.404 WARN returning error on flush!
55025 Sep 22 23:24:55.404 DEBG Flush :1387 extent_limit None deps:[JobId(1386), JobId(1385), JobId(1384)] res:false f:157 g:1
55026 Sep 22 23:24:55.404 WARN 1388 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
55027 Sep 22 23:24:55.404 WARN 1389 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
55028 Sep 22 23:24:55.404 INFO [lossy] skipping 1390
55029 Sep 22 23:24:55.404 WARN 1391 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
55030 Sep 22 23:24:55.404 WARN 1392 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
55031 Sep 22 23:24:55.404 INFO [lossy] skipping 1393
55032 Sep 22 23:24:55.404 INFO [lossy] skipping 1395
55033 Sep 22 23:24:55.412 DEBG Flush :1387 extent_limit None deps:[JobId(1386), JobId(1385), JobId(1384)] res:true f:157 g:1
55034 Sep 22 23:24:55.412 INFO [lossy] skipping 1390
55035 Sep 22 23:24:55.412 INFO [lossy] skipping 1395
55036 Sep 22 23:24:55.412 WARN 1390 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 2 deps, role: work
55037 Sep 22 23:24:55.413 WARN returning error on write!
55038 Sep 22 23:24:55.413 DEBG Write :1388 deps:[JobId(1387), JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1317), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1229), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:false
55039 Sep 22 23:24:55.443 DEBG Write :1389 deps:[JobId(1387), JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1319), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1231), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
55040 Sep 22 23:24:55.445 WARN 1390 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
55041 Sep 22 23:24:55.445 WARN 1391 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
55042 Sep 22 23:24:55.445 WARN 1392 job Write for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
55043 Sep 22 23:24:55.474 DEBG Write :1388 deps:[JobId(1387), JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1318), JobId(1317), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1230), JobId(1229), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
55044 Sep 22 23:24:55.475 INFO [lossy] sleeping 1 second
55045 Sep 22 23:24:55.475 ERRO [2] job id 1387 saw error GenericError("test error")
55046 Sep 22 23:24:55.475 ERRO [2] job id 1388 saw error GenericError("test error")
55047 Sep 22 23:24:56.476 WARN returning error on flush!
55048 Sep 22 23:24:56.476 DEBG Flush :1390 extent_limit None deps:[JobId(1389), JobId(1388), JobId(1387)] res:false f:158 g:1
55049 Sep 22 23:24:56.476 INFO [lossy] skipping 1392
55050 Sep 22 23:24:56.476 INFO [lossy] skipping 1394
55051 Sep 22 23:24:56.476 INFO [lossy] skipping 1390
55052 Sep 22 23:24:56.476 INFO [lossy] skipping 1392
55053 Sep 22 23:24:56.485 DEBG Flush :1390 extent_limit None deps:[JobId(1389), JobId(1388), JobId(1387)] res:true f:158 g:1
55054 Sep 22 23:24:56.486 WARN returning error on write!
55055 Sep 22 23:24:56.486 DEBG Write :1392 deps:[JobId(1390), JobId(1387), JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1322), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1234), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162), JobId(1161)] res:false
55056 Sep 22 23:24:56.487 INFO [lossy] skipping 1392
55057 Sep 22 23:24:56.487 INFO [lossy] skipping 1392
55058 Sep 22 23:24:56.487 WARN returning error on write!
55059 Sep 22 23:24:56.487 DEBG Write :1392 deps:[JobId(1390), JobId(1387), JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1322), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1234), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162), JobId(1161)] res:false
55060 Sep 22 23:24:56.488 INFO [lossy] skipping 1392
55061 Sep 22 23:24:56.517 DEBG Write :1392 deps:[JobId(1390), JobId(1387), JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1322), JobId(1321), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1234), JobId(1233), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162), JobId(1161)] res:true
55062 Sep 22 23:24:56.548 DEBG Write :1391 deps:[JobId(1390), JobId(1387), JobId(1384), JobId(1381), JobId(1378), JobId(1375), JobId(1372), JobId(1369), JobId(1366), JobId(1363), JobId(1360), JobId(1357), JobId(1354), JobId(1351), JobId(1348), JobId(1345), JobId(1342), JobId(1339), JobId(1336), JobId(1333), JobId(1330), JobId(1327), JobId(1324), JobId(1321), JobId(1320), JobId(1318), JobId(1316), JobId(1314), JobId(1312), JobId(1310), JobId(1308), JobId(1306), JobId(1303), JobId(1300), JobId(1298), JobId(1296), JobId(1294), JobId(1291), JobId(1289), JobId(1286), JobId(1284), JobId(1282), JobId(1280), JobId(1278), JobId(1276), JobId(1274), JobId(1272), JobId(1270), JobId(1268), JobId(1266), JobId(1264), JobId(1262), JobId(1260), JobId(1258), JobId(1256), JobId(1254), JobId(1252), JobId(1250), JobId(1248), JobId(1246), JobId(1244), JobId(1242), JobId(1240), JobId(1238), JobId(1236), JobId(1233), JobId(1232), JobId(1230), JobId(1227), JobId(1224), JobId(1221), JobId(1218), JobId(1215), JobId(1212), JobId(1209), JobId(1206), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1190), JobId(1187), JobId(1184), JobId(1181), JobId(1178), JobId(1176), JobId(1173), JobId(1170), JobId(1167), JobId(1164), JobId(1162)] res:true
55063 Sep 22 23:24:56.549 INFO [lossy] skipping 1393
55064 Sep 22 23:24:56.549 INFO [lossy] skipping 1394
55065 Sep 22 23:24:56.549 INFO [lossy] skipping 1395
55066 Sep 22 23:24:56.558 DEBG Flush :1393 extent_limit None deps:[JobId(1392), JobId(1391), JobId(1390)] res:true f:159 g:1
55067 Sep 22 23:24:56.564 DEBG Read :1394 deps:[JobId(1393)] res:true
55068 Sep 22 23:24:56.585 ERRO [2] job id 1390 saw error GenericError("test error")
55069 Sep 22 23:24:56.585 ERRO [2] job id 1392 saw error GenericError("test error")
55070 Sep 22 23:24:56.585 ERRO [2] job id 1392 saw error GenericError("test error")
55071 Sep 22 23:24:56.587 INFO [lossy] skipping 1395
55072 Sep 22 23:24:56.587 DEBG Flush :1395 extent_limit None deps:[JobId(1394), JobId(1393)] res:true f:160 g:1
55073 Sep 22 23:24:56.587 INFO [lossy] sleeping 1 second
55074 Sep 22 23:24:56.964 DEBG [2] Read AckReady 1394, : downstairs
55075 Sep 22 23:24:56.965 DEBG up_ds_listen was notified
55076 Sep 22 23:24:56.965 DEBG up_ds_listen process 1394
55077 Sep 22 23:24:56.965 DEBG [A] ack job 1394:395, : downstairs
55078 Sep 22 23:24:57.018 DEBG up_ds_listen checked 1 jobs, back to waiting
55079 Sep 22 23:24:57.019 DEBG IO Read 1396 has deps [JobId(1395)]
55080 Sep 22 23:24:57.086 DEBG IO Flush 1397 has deps [JobId(1396), JobId(1395)]
55081 Sep 22 23:24:57.588 INFO [lossy] skipping 1396
55082 Sep 22 23:24:57.588 WARN returning error on read!
55083 Sep 22 23:24:57.588 DEBG Read :1396 deps:[JobId(1395)] res:false
55084 Sep 22 23:24:57.588 INFO [lossy] skipping 1396
55085 Sep 22 23:24:57.588 INFO [lossy] skipping 1396
55086 Sep 22 23:24:57.588 WARN returning error on read!
55087 Sep 22 23:24:57.588 DEBG Read :1396 deps:[JobId(1395)] res:false
55088 Sep 22 23:24:57.595 DEBG Read :1396 deps:[JobId(1395)] res:true
55089 Sep 22 23:24:57.616 ERRO [2] job id 1396 saw error GenericError("test error")
55090 Sep 22 23:24:57.616 ERRO [2] job id 1396 saw error GenericError("test error")
55091 Sep 22 23:24:57.618 INFO [lossy] sleeping 1 second
55092 Sep 22 23:24:57.996 DEBG [2] Read AckReady 1396, : downstairs
55093 Sep 22 23:24:57.997 DEBG up_ds_listen was notified
55094 Sep 22 23:24:57.997 DEBG up_ds_listen process 1396
55095 Sep 22 23:24:57.997 DEBG [A] ack job 1396:397, : downstairs
55096 Sep 22 23:24:58.050 DEBG up_ds_listen checked 1 jobs, back to waiting
55097 Sep 22 23:24:58.051 DEBG IO Read 1398 has deps [JobId(1397)]
55098 Sep 22 23:24:58.088 DEBG IO Flush 1399 has deps [JobId(1398), JobId(1397)]
55099 Sep 22 23:24:58.620 DEBG Flush :1397 extent_limit None deps:[JobId(1396), JobId(1395)] res:true f:161 g:1
55100 Sep 22 23:24:58.620 WARN returning error on read!
55101 Sep 22 23:24:58.620 DEBG Read :1398 deps:[JobId(1397)] res:false
55102 Sep 22 23:24:58.620 INFO [lossy] skipping 1398
55103 Sep 22 23:24:58.626 DEBG Read :1398 deps:[JobId(1397)] res:true
55104 Sep 22 23:24:58.647 ERRO [2] job id 1398 saw error GenericError("test error")
55105 Sep 22 23:24:58.649 DEBG Flush :1399 extent_limit None deps:[JobId(1398), JobId(1397)] res:true f:162 g:1
55106 Sep 22 23:24:58.649 INFO [lossy] sleeping 1 second
55107 Sep 22 23:24:59.026 DEBG [2] Read AckReady 1398, : downstairs
55108 Sep 22 23:24:59.027 DEBG up_ds_listen was notified
55109 Sep 22 23:24:59.027 DEBG up_ds_listen process 1398
55110 Sep 22 23:24:59.027 DEBG [A] ack job 1398:399, : downstairs
55111 Sep 22 23:24:59.080 DEBG up_ds_listen checked 1 jobs, back to waiting
55112 Sep 22 23:24:59.081 DEBG IO Read 1400 has deps [JobId(1399)]
55113 Sep 22 23:24:59.090 DEBG IO Flush 1401 has deps [JobId(1400), JobId(1399)]
55114 Sep 22 23:24:59.657 DEBG Read :1400 deps:[JobId(1399)] res:true
55115 Sep 22 23:24:59.680 DEBG Flush :1401 extent_limit None deps:[JobId(1400), JobId(1399)] res:true f:163 g:1
55116 Sep 22 23:24:59.680 INFO [lossy] sleeping 1 second
55117 Sep 22 23:25:00.056 DEBG [2] Read AckReady 1400, : downstairs
55118 Sep 22 23:25:00.057 DEBG up_ds_listen was notified
55119 Sep 22 23:25:00.057 DEBG up_ds_listen process 1400
55120 Sep 22 23:25:00.057 DEBG [A] ack job 1400:401, : downstairs
55121 Sep 22 23:25:00.110 DEBG up_ds_listen checked 1 jobs, back to waiting
55122 Sep 22 23:25:00.111 DEBG IO Read 1402 has deps [JobId(1401)]
55123 Sep 22 23:25:00.612 DEBG IO Flush 1403 has deps [JobId(1402), JobId(1401)]
55124 Sep 22 23:25:00.688 DEBG Read :1402 deps:[JobId(1401)] res:true
55125 Sep 22 23:25:00.711 WARN returning error on flush!
55126 Sep 22 23:25:00.711 DEBG Flush :1403 extent_limit None deps:[JobId(1402), JobId(1401)] res:false f:164 g:1
55127 Sep 22 23:25:00.711 INFO [lossy] skipping 1403
55128 Sep 22 23:25:00.711 INFO [lossy] skipping 1403
55129 Sep 22 23:25:00.711 DEBG Flush :1403 extent_limit None deps:[JobId(1402), JobId(1401)] res:true f:164 g:1
55130 Sep 22 23:25:00.711 INFO [lossy] sleeping 1 second
55131 Sep 22 23:25:01.088 DEBG [2] Read AckReady 1402, : downstairs
55132 Sep 22 23:25:01.089 ERRO [2] job id 1403 saw error GenericError("test error")
55133 Sep 22 23:25:01.089 DEBG up_ds_listen was notified
55134 Sep 22 23:25:01.089 DEBG up_ds_listen process 1402
55135 Sep 22 23:25:01.089 DEBG [A] ack job 1402:403, : downstairs
55136 Sep 22 23:25:01.142 DEBG up_ds_listen checked 1 jobs, back to waiting
55137 Sep 22 23:25:01.143 DEBG IO Read 1404 has deps [JobId(1403)]
55138 Sep 22 23:25:01.644 DEBG IO Flush 1405 has deps [JobId(1404), JobId(1403)]
55139 Sep 22 23:25:01.713 WARN returning error on read!
55140 Sep 22 23:25:01.713 DEBG Read :1404 deps:[JobId(1403)] res:false
55141 Sep 22 23:25:01.713 INFO [lossy] skipping 1404
55142 Sep 22 23:25:01.713 INFO [lossy] skipping 1404
55143 Sep 22 23:25:01.719 DEBG Read :1404 deps:[JobId(1403)] res:true
55144 Sep 22 23:25:01.740 ERRO [2] job id 1404 saw error GenericError("test error")
55145 Sep 22 23:25:01.742 WARN returning error on flush!
55146 Sep 22 23:25:01.742 DEBG Flush :1405 extent_limit None deps:[JobId(1404), JobId(1403)] res:false f:165 g:1
55147 Sep 22 23:25:01.742 DEBG Flush :1405 extent_limit None deps:[JobId(1404), JobId(1403)] res:true f:165 g:1
55148 Sep 22 23:25:01.742 INFO [lossy] sleeping 1 second
55149 Sep 22 23:25:02.121 DEBG [2] Read AckReady 1404, : downstairs
55150 Sep 22 23:25:02.122 ERRO [2] job id 1405 saw error GenericError("test error")
55151 Sep 22 23:25:02.122 DEBG up_ds_listen was notified
55152 Sep 22 23:25:02.122 DEBG up_ds_listen process 1404
55153 Sep 22 23:25:02.122 DEBG [A] ack job 1404:405, : downstairs
55154 Sep 22 23:25:02.175 DEBG up_ds_listen checked 1 jobs, back to waiting
55155 Sep 22 23:25:02.177 DEBG IO Read 1406 has deps [JobId(1405)]
55156 Sep 22 23:25:02.677 DEBG IO Flush 1407 has deps [JobId(1406), JobId(1405)]
55157 Sep 22 23:25:02.749 DEBG Read :1406 deps:[JobId(1405)] res:true
55158 Sep 22 23:25:02.772 DEBG Flush :1407 extent_limit None deps:[JobId(1406), JobId(1405)] res:true f:166 g:1
55159 Sep 22 23:25:02.772 INFO [lossy] sleeping 1 second
55160 Sep 22 23:25:03.150 DEBG [2] Read AckReady 1406, : downstairs
55161 Sep 22 23:25:03.151 DEBG up_ds_listen was notified
55162 Sep 22 23:25:03.151 DEBG up_ds_listen process 1406
55163 Sep 22 23:25:03.151 DEBG [A] ack job 1406:407, : downstairs
55164 Sep 22 23:25:03.203 DEBG up_ds_listen checked 1 jobs, back to waiting
55165 Sep 22 23:25:03.205 DEBG IO Read 1408 has deps [JobId(1407)]
55166 Sep 22 23:25:03.706 DEBG IO Flush 1409 has deps [JobId(1408), JobId(1407)]
55167 Sep 22 23:25:03.779 DEBG Read :1408 deps:[JobId(1407)] res:true
55168 Sep 22 23:25:03.802 DEBG Flush :1409 extent_limit None deps:[JobId(1408), JobId(1407)] res:true f:167 g:1
55169 Sep 22 23:25:03.802 INFO [lossy] sleeping 1 second
55170 Sep 22 23:25:04.181 DEBG [2] Read AckReady 1408, : downstairs
55171 Sep 22 23:25:04.182 DEBG up_ds_listen was notified
55172 Sep 22 23:25:04.182 DEBG up_ds_listen process 1408
55173 Sep 22 23:25:04.182 DEBG [A] ack job 1408:409, : downstairs
55174 Sep 22 23:25:04.235 DEBG up_ds_listen checked 1 jobs, back to waiting
55175 Sep 22 23:25:04.236 DEBG IO Read 1410 has deps [JobId(1409)]
55176 Sep 22 23:25:04.737 DEBG IO Flush 1411 has deps [JobId(1410), JobId(1409)]
55177 Sep 22 23:25:04.811 DEBG Read :1410 deps:[JobId(1409)] res:true
55178 Sep 22 23:25:04.834 INFO [lossy] skipping 1411
55179 Sep 22 23:25:04.834 DEBG Flush :1411 extent_limit None deps:[JobId(1410), JobId(1409)] res:true f:168 g:1
55180 Sep 22 23:25:04.834 INFO [lossy] sleeping 1 second
55181 Sep 22 23:25:05.211 DEBG [2] Read AckReady 1410, : downstairs
55182 Sep 22 23:25:05.212 DEBG up_ds_listen was notified
55183 Sep 22 23:25:05.212 DEBG up_ds_listen process 1410
55184 Sep 22 23:25:05.212 DEBG [A] ack job 1410:411, : downstairs
55185 Sep 22 23:25:05.265 DEBG up_ds_listen checked 1 jobs, back to waiting
55186 Sep 22 23:25:05.266 DEBG IO Read 1412 has deps [JobId(1411)]
55187 Sep 22 23:25:05.767 DEBG IO Flush 1413 has deps [JobId(1412), JobId(1411)]
55188 Sep 22 23:25:05.835 WARN returning error on read!
55189 Sep 22 23:25:05.835 DEBG Read :1412 deps:[JobId(1411)] res:false
55190 Sep 22 23:25:05.841 DEBG Read :1412 deps:[JobId(1411)] res:true
55191 Sep 22 23:25:05.863 ERRO [2] job id 1412 saw error GenericError("test error")
55192 Sep 22 23:25:05.864 DEBG Flush :1413 extent_limit None deps:[JobId(1412), JobId(1411)] res:true f:169 g:1
55193 Sep 22 23:25:05.864 INFO [lossy] sleeping 1 second
55194 Sep 22 23:25:06.241 DEBG [2] Read AckReady 1412, : downstairs
55195 Sep 22 23:25:06.242 DEBG up_ds_listen was notified
55196 Sep 22 23:25:06.242 DEBG up_ds_listen process 1412
55197 Sep 22 23:25:06.242 DEBG [A] ack job 1412:413, : downstairs
55198 Sep 22 23:25:06.295 DEBG up_ds_listen checked 1 jobs, back to waiting
55199 Sep 22 23:25:06.296 DEBG IO Read 1414 has deps [JobId(1413)]
55200 Sep 22 23:25:06.797 DEBG IO Flush 1415 has deps [JobId(1414), JobId(1413)]
55201 Sep 22 23:25:06.872 DEBG Read :1414 deps:[JobId(1413)] res:true
55202 Sep 22 23:25:06.895 DEBG Flush :1415 extent_limit None deps:[JobId(1414), JobId(1413)] res:true f:170 g:1
55203 Sep 22 23:25:06.895 INFO [lossy] sleeping 1 second
55204 Sep 22 23:25:07.271 DEBG [2] Read AckReady 1414, : downstairs
55205 Sep 22 23:25:07.273 DEBG up_ds_listen was notified
55206 Sep 22 23:25:07.273 DEBG up_ds_listen process 1414
55207 Sep 22 23:25:07.273 DEBG [A] ack job 1414:415, : downstairs
55208 Sep 22 23:25:07.325 DEBG up_ds_listen checked 1 jobs, back to waiting
55209 Sep 22 23:25:07.326 DEBG IO Read 1416 has deps [JobId(1415)]
55210 Sep 22 23:25:07.827 DEBG IO Flush 1417 has deps [JobId(1416), JobId(1415)]
55211 Sep 22 23:25:07.903 DEBG Read :1416 deps:[JobId(1415)] res:true
55212 Sep 22 23:25:07.926 INFO [lossy] skipping 1417
55213 Sep 22 23:25:07.926 DEBG Flush :1417 extent_limit None deps:[JobId(1416), JobId(1415)] res:true f:171 g:1
55214 Sep 22 23:25:07.926 INFO [lossy] sleeping 1 second
55215 Sep 22 23:25:08.302 DEBG [2] Read AckReady 1416, : downstairs
55216 Sep 22 23:25:08.303 DEBG up_ds_listen was notified
55217 Sep 22 23:25:08.303 DEBG up_ds_listen process 1416
55218 Sep 22 23:25:08.303 DEBG [A] ack job 1416:417, : downstairs
55219 Sep 22 23:25:08.356 DEBG up_ds_listen checked 1 jobs, back to waiting
55220 Sep 22 23:25:08.357 DEBG IO Read 1418 has deps [JobId(1417)]
55221 Sep 22 23:25:08.858 DEBG IO Flush 1419 has deps [JobId(1418), JobId(1417)]
55222 Sep 22 23:25:08.933 DEBG Read :1418 deps:[JobId(1417)] res:true
55223 Sep 22 23:25:08.956 DEBG Flush :1419 extent_limit None deps:[JobId(1418), JobId(1417)] res:true f:172 g:1
55224 Sep 22 23:25:08.956 INFO [lossy] sleeping 1 second
55225 Sep 22 23:25:09.334 DEBG [2] Read AckReady 1418, : downstairs
55226 Sep 22 23:25:09.335 DEBG up_ds_listen was notified
55227 Sep 22 23:25:09.335 DEBG up_ds_listen process 1418
55228 Sep 22 23:25:09.335 DEBG [A] ack job 1418:419, : downstairs
55229 Sep 22 23:25:09.388 DEBG up_ds_listen checked 1 jobs, back to waiting
55230 Sep 22 23:25:09.389 DEBG IO Read 1420 has deps [JobId(1419)]
55231 Sep 22 23:25:09.890 DEBG IO Flush 1421 has deps [JobId(1420), JobId(1419)]
55232 Sep 22 23:25:09.964 DEBG Read :1420 deps:[JobId(1419)] res:true
55233 Sep 22 23:25:09.987 DEBG Flush :1421 extent_limit None deps:[JobId(1420), JobId(1419)] res:true f:173 g:1
55234 Sep 22 23:25:09.987 INFO [lossy] sleeping 1 second
55235 Sep 22 23:25:10.364 DEBG [2] Read AckReady 1420, : downstairs
55236 Sep 22 23:25:10.365 DEBG up_ds_listen was notified
55237 Sep 22 23:25:10.365 DEBG up_ds_listen process 1420
55238 Sep 22 23:25:10.366 DEBG [A] ack job 1420:421, : downstairs
55239 Sep 22 23:25:10.418 DEBG up_ds_listen checked 1 jobs, back to waiting
55240 Sep 22 23:25:10.420 DEBG IO Read 1422 has deps [JobId(1421)]
55241 Sep 22 23:25:10.921 DEBG IO Flush 1423 has deps [JobId(1422), JobId(1421)]
55242 Sep 22 23:25:10.994 DEBG Read :1422 deps:[JobId(1421)] res:true
55243 Sep 22 23:25:11.017 DEBG Flush :1423 extent_limit None deps:[JobId(1422), JobId(1421)] res:true f:174 g:1
55244 Sep 22 23:25:11.017 INFO [lossy] sleeping 1 second
55245 Sep 22 23:25:11.394 DEBG [2] Read AckReady 1422, : downstairs
55246 Sep 22 23:25:11.395 DEBG up_ds_listen was notified
55247 Sep 22 23:25:11.395 DEBG up_ds_listen process 1422
55248 Sep 22 23:25:11.395 DEBG [A] ack job 1422:423, : downstairs
55249 Sep 22 23:25:11.447 DEBG up_ds_listen checked 1 jobs, back to waiting
55250 Sep 22 23:25:11.449 DEBG IO Read 1424 has deps [JobId(1423)]
55251 Sep 22 23:25:11.950 DEBG IO Flush 1425 has deps [JobId(1424), JobId(1423)]
55252 Sep 22 23:25:12.019 INFO [lossy] skipping 1424
55253 Sep 22 23:25:12.026 DEBG Read :1424 deps:[JobId(1423)] res:true
55254 Sep 22 23:25:12.049 DEBG Flush :1425 extent_limit None deps:[JobId(1424), JobId(1423)] res:true f:175 g:1
55255 Sep 22 23:25:12.049 INFO [lossy] sleeping 1 second
55256 Sep 22 23:25:12.425 DEBG [2] Read AckReady 1424, : downstairs
55257 Sep 22 23:25:12.426 DEBG up_ds_listen was notified
55258 Sep 22 23:25:12.427 DEBG up_ds_listen process 1424
55259 Sep 22 23:25:12.427 DEBG [A] ack job 1424:425, : downstairs
55260 Sep 22 23:25:12.479 DEBG up_ds_listen checked 1 jobs, back to waiting
55261 Sep 22 23:25:12.481 DEBG IO Read 1426 has deps [JobId(1425)]
55262 Sep 22 23:25:12.981 DEBG IO Flush 1427 has deps [JobId(1426), JobId(1425)]
55263 Sep 22 23:25:13.050 WARN returning error on read!
55264 Sep 22 23:25:13.050 DEBG Read :1426 deps:[JobId(1425)] res:false
55265 Sep 22 23:25:13.056 DEBG Read :1426 deps:[JobId(1425)] res:true
55266 Sep 22 23:25:13.078 ERRO [2] job id 1426 saw error GenericError("test error")
55267 Sep 22 23:25:13.079 INFO [lossy] skipping 1427
55268 Sep 22 23:25:13.079 DEBG Flush :1427 extent_limit None deps:[JobId(1426), JobId(1425)] res:true f:176 g:1
55269 Sep 22 23:25:13.079 INFO [lossy] sleeping 1 second
55270 Sep 22 23:25:13.456 DEBG [2] Read AckReady 1426, : downstairs
55271 Sep 22 23:25:13.457 DEBG up_ds_listen was notified
55272 Sep 22 23:25:13.457 DEBG up_ds_listen process 1426
55273 Sep 22 23:25:13.457 DEBG [A] ack job 1426:427, : downstairs
55274 Sep 22 23:25:13.509 DEBG up_ds_listen checked 1 jobs, back to waiting
55275 Sep 22 23:25:13.511 DEBG IO Read 1428 has deps [JobId(1427)]
55276 Sep 22 23:25:14.012 DEBG IO Flush 1429 has deps [JobId(1428), JobId(1427)]
55277 Sep 22 23:25:14.086 DEBG Read :1428 deps:[JobId(1427)] res:true
55278 Sep 22 23:25:14.109 DEBG Flush :1429 extent_limit None deps:[JobId(1428), JobId(1427)] res:true f:177 g:1
55279 Sep 22 23:25:14.109 INFO [lossy] sleeping 1 second
55280 Sep 22 23:25:14.485 DEBG [2] Read AckReady 1428, : downstairs
55281 Sep 22 23:25:14.486 DEBG up_ds_listen was notified
55282 Sep 22 23:25:14.486 DEBG up_ds_listen process 1428
55283 Sep 22 23:25:14.486 DEBG [A] ack job 1428:429, : downstairs
55284 Sep 22 23:25:14.538 DEBG up_ds_listen checked 1 jobs, back to waiting
55285 Sep 22 23:25:14.540 DEBG IO Read 1430 has deps [JobId(1429)]
55286 Sep 22 23:25:15.041 DEBG IO Flush 1431 has deps [JobId(1430), JobId(1429)]
55287 Sep 22 23:25:15.116 DEBG Read :1430 deps:[JobId(1429)] res:true
55288 Sep 22 23:25:15.139 DEBG Flush :1431 extent_limit None deps:[JobId(1430), JobId(1429)] res:true f:178 g:1
55289 Sep 22 23:25:15.139 INFO [lossy] sleeping 1 second
55290 Sep 22 23:25:15.515 DEBG [2] Read AckReady 1430, : downstairs
55291 Sep 22 23:25:15.516 DEBG up_ds_listen was notified
55292 Sep 22 23:25:15.516 DEBG up_ds_listen process 1430
55293 Sep 22 23:25:15.516 DEBG [A] ack job 1430:431, : downstairs
55294 Sep 22 23:25:15.569 DEBG up_ds_listen checked 1 jobs, back to waiting
55295 Sep 22 23:25:15.570 DEBG IO Read 1432 has deps [JobId(1431)]
55296 Sep 22 23:25:16.070 DEBG IO Flush 1433 has deps [JobId(1432), JobId(1431)]
55297 Sep 22 23:25:16.140 WARN returning error on read!
55298 Sep 22 23:25:16.140 DEBG Read :1432 deps:[JobId(1431)] res:false
55299 Sep 22 23:25:16.147 DEBG Read :1432 deps:[JobId(1431)] res:true
55300 Sep 22 23:25:16.168 ERRO [2] job id 1432 saw error GenericError("test error")
55301 Sep 22 23:25:16.170 DEBG Flush :1433 extent_limit None deps:[JobId(1432), JobId(1431)] res:true f:179 g:1
55302 Sep 22 23:25:16.170 INFO [lossy] sleeping 1 second
55303 Sep 22 23:25:16.547 DEBG [2] Read AckReady 1432, : downstairs
55304 Sep 22 23:25:16.548 DEBG up_ds_listen was notified
55305 Sep 22 23:25:16.548 DEBG up_ds_listen process 1432
55306 Sep 22 23:25:16.548 DEBG [A] ack job 1432:433, : downstairs
55307 Sep 22 23:25:16.600 DEBG up_ds_listen checked 1 jobs, back to waiting
55308 Sep 22 23:25:16.601 DEBG IO Read 1434 has deps [JobId(1433)]
55309 Sep 22 23:25:17.102 DEBG IO Flush 1435 has deps [JobId(1434), JobId(1433)]
55310 Sep 22 23:25:17.177 DEBG Read :1434 deps:[JobId(1433)] res:true
55311 Sep 22 23:25:17.200 INFO [lossy] skipping 1435
55312 Sep 22 23:25:17.200 WARN returning error on flush!
55313 Sep 22 23:25:17.200 DEBG Flush :1435 extent_limit None deps:[JobId(1434), JobId(1433)] res:false f:180 g:1
55314 Sep 22 23:25:17.200 INFO [lossy] skipping 1435
55315 Sep 22 23:25:17.200 DEBG Flush :1435 extent_limit None deps:[JobId(1434), JobId(1433)] res:true f:180 g:1
55316 Sep 22 23:25:17.200 INFO [lossy] sleeping 1 second
55317 Sep 22 23:25:17.577 DEBG [2] Read AckReady 1434, : downstairs
55318 Sep 22 23:25:17.578 ERRO [2] job id 1435 saw error GenericError("test error")
55319 Sep 22 23:25:17.578 DEBG up_ds_listen was notified
55320 Sep 22 23:25:17.578 DEBG up_ds_listen process 1434
55321 Sep 22 23:25:17.578 DEBG [A] ack job 1434:435, : downstairs
55322 Sep 22 23:25:17.631 DEBG up_ds_listen checked 1 jobs, back to waiting
55323 Sep 22 23:25:17.632 DEBG IO Read 1436 has deps [JobId(1435)]
55324 Sep 22 23:25:18.133 DEBG IO Flush 1437 has deps [JobId(1436), JobId(1435)]
55325 Sep 22 23:25:18.207 DEBG Read :1436 deps:[JobId(1435)] res:true
55326 Sep 22 23:25:18.230 DEBG Flush :1437 extent_limit None deps:[JobId(1436), JobId(1435)] res:true f:181 g:1
55327 Sep 22 23:25:18.230 INFO [lossy] sleeping 1 second
55328 Sep 22 23:25:18.606 DEBG [2] Read AckReady 1436, : downstairs
55329 Sep 22 23:25:18.607 DEBG up_ds_listen was notified
55330 Sep 22 23:25:18.607 DEBG up_ds_listen process 1436
55331 Sep 22 23:25:18.608 DEBG [A] ack job 1436:437, : downstairs
55332 Sep 22 23:25:18.660 DEBG up_ds_listen checked 1 jobs, back to waiting
55333 Sep 22 23:25:18.661 DEBG IO Read 1438 has deps [JobId(1437)]
55334 Sep 22 23:25:19.163 DEBG IO Flush 1439 has deps [JobId(1438), JobId(1437)]
55335 Sep 22 23:25:19.231 WARN returning error on read!
55336 Sep 22 23:25:19.231 DEBG Read :1438 deps:[JobId(1437)] res:false
55337 Sep 22 23:25:19.231 INFO [lossy] skipping 1439
55338 Sep 22 23:25:19.237 DEBG Read :1438 deps:[JobId(1437)] res:true
55339 Sep 22 23:25:19.258 ERRO [2] job id 1438 saw error GenericError("test error")
55340 Sep 22 23:25:19.260 DEBG Flush :1439 extent_limit None deps:[JobId(1438), JobId(1437)] res:true f:182 g:1
55341 Sep 22 23:25:19.260 INFO [lossy] sleeping 1 second
55342 Sep 22 23:25:19.636 DEBG [2] Read AckReady 1438, : downstairs
55343 Sep 22 23:25:19.637 DEBG up_ds_listen was notified
55344 Sep 22 23:25:19.637 DEBG up_ds_listen process 1438
55345 Sep 22 23:25:19.637 DEBG [A] ack job 1438:439, : downstairs
55346 Sep 22 23:25:19.689 DEBG up_ds_listen checked 1 jobs, back to waiting
55347 Sep 22 23:25:19.691 DEBG IO Read 1440 has deps [JobId(1439)]
55348 Sep 22 23:25:20.192 DEBG IO Flush 1441 has deps [JobId(1440), JobId(1439)]
55349 Sep 22 23:25:20.267 DEBG Read :1440 deps:[JobId(1439)] res:true
55350 Sep 22 23:25:20.290 DEBG Flush :1441 extent_limit None deps:[JobId(1440), JobId(1439)] res:true f:183 g:1
55351 Sep 22 23:25:20.666 DEBG [2] Read AckReady 1440, : downstairs
55352 Sep 22 23:25:20.667 DEBG up_ds_listen was notified
55353 Sep 22 23:25:20.667 DEBG up_ds_listen process 1440
55354 Sep 22 23:25:20.667 DEBG [A] ack job 1440:441, : downstairs
55355 Sep 22 23:25:20.720 DEBG up_ds_listen checked 1 jobs, back to waiting
55356 Sep 22 23:25:20.721 DEBG IO Read 1442 has deps [JobId(1441)]
55357 Sep 22 23:25:20.725 INFO [lossy] skipping 1442
55358 Sep 22 23:25:20.725 INFO [lossy] skipping 1442
55359 Sep 22 23:25:20.732 DEBG Read :1442 deps:[JobId(1441)] res:true
55360 Sep 22 23:25:21.132 DEBG [2] Read AckReady 1442, : downstairs
55361 Sep 22 23:25:21.133 DEBG up_ds_listen was notified
55362 Sep 22 23:25:21.133 DEBG up_ds_listen process 1442
55363 Sep 22 23:25:21.134 DEBG [A] ack job 1442:443, : downstairs
55364 Sep 22 23:25:21.186 DEBG up_ds_listen checked 1 jobs, back to waiting
55365 Sep 22 23:25:21.188 DEBG IO Read 1443 has deps [JobId(1441)]
55366 Sep 22 23:25:21.192 WARN returning error on read!
55367 Sep 22 23:25:21.192 DEBG Read :1443 deps:[JobId(1441)] res:false
55368 Sep 22 23:25:21.198 DEBG Read :1443 deps:[JobId(1441)] res:true
55369 Sep 22 23:25:21.219 ERRO [2] job id 1443 saw error GenericError("test error")
55370 Sep 22 23:25:21.598 DEBG [2] Read AckReady 1443, : downstairs
55371 Sep 22 23:25:21.599 DEBG up_ds_listen was notified
55372 Sep 22 23:25:21.599 DEBG up_ds_listen process 1443
55373 Sep 22 23:25:21.599 DEBG [A] ack job 1443:444, : downstairs
55374 Sep 22 23:25:21.652 DEBG up_ds_listen checked 1 jobs, back to waiting
55375 Sep 22 23:25:21.653 DEBG IO Flush 1444 has deps [JobId(1443), JobId(1442), JobId(1441)]
55376 Sep 22 23:25:21.654 DEBG IO Read 1445 has deps [JobId(1444)]
55377 Sep 22 23:25:21.656 DEBG Flush :1444 extent_limit None deps:[JobId(1443), JobId(1442), JobId(1441)] res:true f:184 g:1
55378 Sep 22 23:25:21.658 INFO [lossy] skipping 1445
55379 Sep 22 23:25:21.664 DEBG Read :1445 deps:[JobId(1444)] res:true
55380 Sep 22 23:25:22.064 DEBG [2] Read AckReady 1445, : downstairs
55381 Sep 22 23:25:22.065 DEBG up_ds_listen was notified
55382 Sep 22 23:25:22.065 DEBG up_ds_listen process 1445
55383 Sep 22 23:25:22.065 DEBG [A] ack job 1445:446, : downstairs
55384 Sep 22 23:25:22.118 DEBG up_ds_listen checked 1 jobs, back to waiting
55385 Sep 22 23:25:22.119 DEBG IO Read 1446 has deps [JobId(1444)]
55386 Sep 22 23:25:22.130 DEBG Read :1446 deps:[JobId(1444)] res:true
55387 Sep 22 23:25:22.530 DEBG [2] Read AckReady 1446, : downstairs
55388 Sep 22 23:25:22.531 DEBG up_ds_listen was notified
55389 Sep 22 23:25:22.531 DEBG up_ds_listen process 1446
55390 Sep 22 23:25:22.531 DEBG [A] ack job 1446:447, : downstairs
55391 Sep 22 23:25:22.583 DEBG up_ds_listen checked 1 jobs, back to waiting
55392 Sep 22 23:25:22.584 DEBG IO Flush 1447 has deps [JobId(1446), JobId(1445), JobId(1444)]
55393 Sep 22 23:25:22.585 DEBG IO Read 1448 has deps [JobId(1447)]
55394 Sep 22 23:25:22.587 DEBG Flush :1447 extent_limit None deps:[JobId(1446), JobId(1445), JobId(1444)] res:true f:185 g:1
55395 Sep 22 23:25:22.596 DEBG Read :1448 deps:[JobId(1447)] res:true
55396 Sep 22 23:25:22.996 DEBG [2] Read AckReady 1448, : downstairs
55397 Sep 22 23:25:22.997 DEBG up_ds_listen was notified
55398 Sep 22 23:25:22.997 DEBG up_ds_listen process 1448
55399 Sep 22 23:25:22.997 DEBG [A] ack job 1448:449, : downstairs
55400 Sep 22 23:25:23.050 DEBG up_ds_listen checked 1 jobs, back to waiting
55401 Sep 22 23:25:23.051 INFO [lossy] sleeping 1 second
55402 Sep 22 23:25:23.051 DEBG IO Read 1449 has deps [JobId(1447)]
55403 Sep 22 23:25:23.085 DEBG IO Flush 1450 has deps [JobId(1449), JobId(1448), JobId(1447)]
55404 Sep 22 23:25:24.058 DEBG Read :1449 deps:[JobId(1447)] res:true
55405 Sep 22 23:25:24.080 DEBG Flush :1450 extent_limit None deps:[JobId(1449), JobId(1448), JobId(1447)] res:true f:186 g:1
55406 Sep 22 23:25:24.458 DEBG [2] Read AckReady 1449, : downstairs
55407 Sep 22 23:25:24.460 DEBG up_ds_listen was notified
55408 Sep 22 23:25:24.460 DEBG up_ds_listen process 1449
55409 Sep 22 23:25:24.460 DEBG [A] ack job 1449:450, : downstairs
55410 Sep 22 23:25:24.512 DEBG up_ds_listen checked 1 jobs, back to waiting
55411 Sep 22 23:25:24.514 DEBG IO Read 1451 has deps [JobId(1450)]
55412 Sep 22 23:25:24.518 INFO [lossy] skipping 1451
55413 Sep 22 23:25:24.524 DEBG Read :1451 deps:[JobId(1450)] res:true
55414 Sep 22 23:25:24.923 DEBG [2] Read AckReady 1451, : downstairs
55415 Sep 22 23:25:24.924 DEBG up_ds_listen was notified
55416 Sep 22 23:25:24.924 DEBG up_ds_listen process 1451
55417 Sep 22 23:25:24.924 DEBG [A] ack job 1451:452, : downstairs
55418 Sep 22 23:25:24.977 DEBG up_ds_listen checked 1 jobs, back to waiting
55419 Sep 22 23:25:24.978 DEBG IO Read 1452 has deps [JobId(1450)]
55420 Sep 22 23:25:24.988 DEBG Read :1452 deps:[JobId(1450)] res:true
55421 Sep 22 23:25:25.387 DEBG [2] Read AckReady 1452, : downstairs
55422 Sep 22 23:25:25.388 DEBG up_ds_listen was notified
55423 Sep 22 23:25:25.388 DEBG up_ds_listen process 1452
55424 Sep 22 23:25:25.388 DEBG [A] ack job 1452:453, : downstairs
55425 Sep 22 23:25:25.440 DEBG up_ds_listen checked 1 jobs, back to waiting
55426 Sep 22 23:25:25.441 DEBG IO Flush 1453 has deps [JobId(1452), JobId(1451), JobId(1450)]
55427 Sep 22 23:25:25.442 DEBG IO Read 1454 has deps [JobId(1453)]
55428 Sep 22 23:25:25.444 INFO [lossy] sleeping 1 second
55429 Sep 22 23:25:25.943 DEBG IO Flush 1455 has deps [JobId(1454), JobId(1453)]
55430 Sep 22 23:25:26.444 INFO [lossy] skipping 1453
55431 Sep 22 23:25:26.444 INFO [lossy] skipping 1455
55432 Sep 22 23:25:26.444 INFO [lossy] skipping 1453
55433 Sep 22 23:25:26.444 DEBG Flush :1453 extent_limit None deps:[JobId(1452), JobId(1451), JobId(1450)] res:true f:187 g:1
55434 Sep 22 23:25:26.445 INFO [lossy] sleeping 1 second
55435 Sep 22 23:25:27.446 WARN returning error on read!
55436 Sep 22 23:25:27.446 DEBG Read :1454 deps:[JobId(1453)] res:false
55437 Sep 22 23:25:27.446 WARN 1455 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
55438 Sep 22 23:25:27.446 WARN returning error on read!
55439 Sep 22 23:25:27.446 DEBG Read :1454 deps:[JobId(1453)] res:false
55440 Sep 22 23:25:27.446 INFO [lossy] skipping 1454
55441 Sep 22 23:25:27.453 DEBG Read :1454 deps:[JobId(1453)] res:true
55442 Sep 22 23:25:27.474 ERRO [2] job id 1454 saw error GenericError("test error")
55443 Sep 22 23:25:27.474 ERRO [2] job id 1454 saw error GenericError("test error")
55444 Sep 22 23:25:27.476 DEBG Flush :1455 extent_limit None deps:[JobId(1454), JobId(1453)] res:true f:188 g:1
55445 Sep 22 23:25:27.852 DEBG [2] Read AckReady 1454, : downstairs
55446 Sep 22 23:25:27.853 DEBG up_ds_listen was notified
55447 Sep 22 23:25:27.853 DEBG up_ds_listen process 1454
55448 Sep 22 23:25:27.853 DEBG [A] ack job 1454:455, : downstairs
55449 Sep 22 23:25:27.906 DEBG up_ds_listen checked 1 jobs, back to waiting
55450 Sep 22 23:25:27.907 DEBG IO Read 1456 has deps [JobId(1455)]
55451 Sep 22 23:25:27.911 INFO [lossy] skipping 1456
55452 Sep 22 23:25:27.912 WARN returning error on read!
55453 Sep 22 23:25:27.912 DEBG Read :1456 deps:[JobId(1455)] res:false
55454 Sep 22 23:25:27.917 DEBG Read :1456 deps:[JobId(1455)] res:true
55455 Sep 22 23:25:27.939 ERRO [2] job id 1456 saw error GenericError("test error")
55456 Sep 22 23:25:28.317 DEBG [2] Read AckReady 1456, : downstairs
55457 Sep 22 23:25:28.318 DEBG up_ds_listen was notified
55458 Sep 22 23:25:28.318 DEBG up_ds_listen process 1456
55459 Sep 22 23:25:28.318 DEBG [A] ack job 1456:457, : downstairs
55460 Sep 22 23:25:28.371 DEBG up_ds_listen checked 1 jobs, back to waiting
55461 Sep 22 23:25:28.372 DEBG IO Flush 1457 has deps [JobId(1456), JobId(1455)]
55462 Sep 22 23:25:28.372 DEBG IO Read 1458 has deps [JobId(1457)]
55463 Sep 22 23:25:28.374 DEBG Flush :1457 extent_limit None deps:[JobId(1456), JobId(1455)] res:true f:189 g:1
55464 Sep 22 23:25:28.382 DEBG Read :1458 deps:[JobId(1457)] res:true
55465 Sep 22 23:25:28.780 DEBG [2] Read AckReady 1458, : downstairs
55466 Sep 22 23:25:28.781 DEBG up_ds_listen was notified
55467 Sep 22 23:25:28.781 DEBG up_ds_listen process 1458
55468 Sep 22 23:25:28.781 DEBG [A] ack job 1458:459, : downstairs
55469 Sep 22 23:25:28.833 DEBG up_ds_listen checked 1 jobs, back to waiting
55470 Sep 22 23:25:28.835 DEBG IO Read 1459 has deps [JobId(1457)]
55471 Sep 22 23:25:28.839 INFO [lossy] skipping 1459
55472 Sep 22 23:25:28.845 DEBG Read :1459 deps:[JobId(1457)] res:true
55473 Sep 22 23:25:29.244 DEBG [2] Read AckReady 1459, : downstairs
55474 Sep 22 23:25:29.245 DEBG up_ds_listen was notified
55475 Sep 22 23:25:29.245 DEBG up_ds_listen process 1459
55476 Sep 22 23:25:29.245 DEBG [A] ack job 1459:460, : downstairs
55477 Sep 22 23:25:29.298 DEBG up_ds_listen checked 1 jobs, back to waiting
55478 Sep 22 23:25:29.299 DEBG IO Flush 1460 has deps [JobId(1459), JobId(1458), JobId(1457)]
55479 Sep 22 23:25:29.300 DEBG IO Read 1461 has deps [JobId(1460)]
55480 Sep 22 23:25:29.302 INFO [lossy] skipping 1460
55481 Sep 22 23:25:29.302 DEBG Flush :1460 extent_limit None deps:[JobId(1459), JobId(1458), JobId(1457)] res:true f:190 g:1
55482 Sep 22 23:25:29.304 INFO [lossy] sleeping 1 second
55483 Sep 22 23:25:29.800 DEBG IO Flush 1462 has deps [JobId(1461), JobId(1460)]
55484 Sep 22 23:25:30.312 DEBG Read :1461 deps:[JobId(1460)] res:true
55485 Sep 22 23:25:30.335 DEBG Flush :1462 extent_limit None deps:[JobId(1461), JobId(1460)] res:true f:191 g:1
55486 Sep 22 23:25:30.335 INFO [lossy] sleeping 1 second
55487 Sep 22 23:25:30.711 DEBG [2] Read AckReady 1461, : downstairs
55488 Sep 22 23:25:30.712 DEBG up_ds_listen was notified
55489 Sep 22 23:25:30.712 DEBG up_ds_listen process 1461
55490 Sep 22 23:25:30.712 DEBG [A] ack job 1461:462, : downstairs
55491 Sep 22 23:25:30.765 DEBG up_ds_listen checked 1 jobs, back to waiting
55492 Sep 22 23:25:30.766 DEBG IO Read 1463 has deps [JobId(1462)]
55493 Sep 22 23:25:30.803 DEBG IO Flush 1464 has deps [JobId(1463), JobId(1462)]
55494 Sep 22 23:25:31.342 DEBG Read :1463 deps:[JobId(1462)] res:true
55495 Sep 22 23:25:31.365 INFO [lossy] skipping 1464
55496 Sep 22 23:25:31.365 DEBG Flush :1464 extent_limit None deps:[JobId(1463), JobId(1462)] res:true f:192 g:1
55497 Sep 22 23:25:31.741 DEBG [2] Read AckReady 1463, : downstairs
55498 Sep 22 23:25:31.742 DEBG up_ds_listen was notified
55499 Sep 22 23:25:31.742 DEBG up_ds_listen process 1463
55500 Sep 22 23:25:31.743 DEBG [A] ack job 1463:464, : downstairs
55501 Sep 22 23:25:31.795 DEBG up_ds_listen checked 1 jobs, back to waiting
55502 Sep 22 23:25:31.797 DEBG IO Read 1465 has deps [JobId(1464)]
55503 Sep 22 23:25:31.801 INFO [lossy] skipping 1465
55504 Sep 22 23:25:31.801 WARN returning error on read!
55505 Sep 22 23:25:31.801 DEBG Read :1465 deps:[JobId(1464)] res:false
55506 Sep 22 23:25:31.807 DEBG Read :1465 deps:[JobId(1464)] res:true
55507 Sep 22 23:25:31.828 DEBG IO Flush 1466 has deps [JobId(1465), JobId(1464)]
55508 Sep 22 23:25:31.828 ERRO [2] job id 1465 saw error GenericError("test error")
55509 Sep 22 23:25:31.830 INFO [lossy] sleeping 1 second
55510 Sep 22 23:25:32.206 DEBG [2] Read AckReady 1465, : downstairs
55511 Sep 22 23:25:32.207 DEBG up_ds_listen was notified
55512 Sep 22 23:25:32.207 DEBG up_ds_listen process 1465
55513 Sep 22 23:25:32.207 DEBG [A] ack job 1465:466, : downstairs
55514 Sep 22 23:25:32.260 DEBG up_ds_listen checked 1 jobs, back to waiting
55515 Sep 22 23:25:32.261 DEBG IO Read 1467 has deps [JobId(1466)]
55516 Sep 22 23:25:32.330 DEBG IO Flush 1468 has deps [JobId(1467), JobId(1466)]
55517 Sep 22 23:25:32.831 WARN returning error on flush!
55518 Sep 22 23:25:32.831 DEBG Flush :1466 extent_limit None deps:[JobId(1465), JobId(1464)] res:false f:193 g:1
55519 Sep 22 23:25:32.831 DEBG Flush :1466 extent_limit None deps:[JobId(1465), JobId(1464)] res:true f:193 g:1
55520 Sep 22 23:25:32.831 INFO [lossy] sleeping 1 second
55521 Sep 22 23:25:32.831 ERRO [2] job id 1466 saw error GenericError("test error")
55522 Sep 22 23:25:33.832 INFO [lossy] skipping 1467
55523 Sep 22 23:25:33.832 WARN 1468 job Flush for connection UpstairsConnection { upstairs_id: b937f86b-985c-4e08-8b98-07f7aae5f860, session_id: 27d81b06-bab5-4e4f-b4d3-4909cf4b2397, gen: 1 } waiting on 1 deps, role: work
55524 Sep 22 23:25:33.839 DEBG Read :1467 deps:[JobId(1466)] res:true
55525 Sep 22 23:25:33.862 WARN returning error on flush!
55526 Sep 22 23:25:33.862 DEBG Flush :1468 extent_limit None deps:[JobId(1467), JobId(1466)] res:false f:194 g:1
55527 Sep 22 23:25:33.862 INFO [lossy] skipping 1468
55528 Sep 22 23:25:33.862 INFO [lossy] skipping 1468
55529 Sep 22 23:25:33.862 WARN returning error on flush!
55530 Sep 22 23:25:33.862 DEBG Flush :1468 extent_limit None deps:[JobId(1467), JobId(1466)] res:false f:194 g:1
55531 Sep 22 23:25:33.862 DEBG Flush :1468 extent_limit None deps:[JobId(1467), JobId(1466)] res:true f:194 g:1
55532 Sep 22 23:25:34.238 DEBG [2] Read AckReady 1467, : downstairs
55533 Sep 22 23:25:34.239 ERRO [2] job id 1468 saw error GenericError("test error")
55534 Sep 22 23:25:34.239 ERRO [2] job id 1468 saw error GenericError("test error")
55535 Sep 22 23:25:34.239 DEBG up_ds_listen was notified
55536 Sep 22 23:25:34.239 DEBG up_ds_listen process 1467
55537 Sep 22 23:25:34.240 DEBG [A] ack job 1467:468, : downstairs
55538 Sep 22 23:25:34.292 DEBG up_ds_listen checked 1 jobs, back to waiting
55539 Sep 22 23:25:34.293 DEBG IO Read 1469 has deps [JobId(1468)]
55540 Sep 22 23:25:34.297 INFO [lossy] skipping 1469
55541 Sep 22 23:25:34.304 DEBG Read :1469 deps:[JobId(1468)] res:true
55542 Sep 22 23:25:34.704 DEBG [2] Read AckReady 1469, : downstairs
55543 Sep 22 23:25:34.705 DEBG up_ds_listen was notified
55544 Sep 22 23:25:34.705 DEBG up_ds_listen process 1469
55545 Sep 22 23:25:34.705 DEBG [A] ack job 1469:470, : downstairs
55546 Sep 22 23:25:34.758 DEBG up_ds_listen checked 1 jobs, back to waiting
55547 Sep 22 23:25:34.759 DEBG IO Flush 1470 has deps [JobId(1469), JobId(1468)]
55548 Sep 22 23:25:34.760 DEBG IO Read 1471 has deps [JobId(1470)]
55549 Sep 22 23:25:34.762 DEBG Flush :1470 extent_limit None deps:[JobId(1469), JobId(1468)] res:true f:195 g:1
55550 Sep 22 23:25:34.770 DEBG Read :1471 deps:[JobId(1470)] res:true
55551 Sep 22 23:25:35.171 DEBG [2] Read AckReady 1471, : downstairs
55552 Sep 22 23:25:35.172 DEBG up_ds_listen was notified
55553 Sep 22 23:25:35.172 DEBG up_ds_listen process 1471
55554 Sep 22 23:25:35.172 DEBG [A] ack job 1471:472, : downstairs
55555 Sep 22 23:25:35.225 DEBG up_ds_listen checked 1 jobs, back to waiting
55556 Sep 22 23:25:35.226 DEBG IO Read 1472 has deps [JobId(1470)]
55557 Sep 22 23:25:35.231 WARN returning error on read!
55558 Sep 22 23:25:35.231 DEBG Read :1472 deps:[JobId(1470)] res:false
55559 Sep 22 23:25:35.231 INFO [lossy] skipping 1472
55560 Sep 22 23:25:35.236 DEBG Read :1472 deps:[JobId(1470)] res:true
55561 Sep 22 23:25:35.258 ERRO [2] job id 1472 saw error GenericError("test error")
55562 Sep 22 23:25:35.636 DEBG [2] Read AckReady 1472, : downstairs
55563 Sep 22 23:25:35.637 DEBG up_ds_listen was notified
55564 Sep 22 23:25:35.637 DEBG up_ds_listen process 1472
55565 Sep 22 23:25:35.637 DEBG [A] ack job 1472:473, : downstairs
55566 Sep 22 23:25:35.690 DEBG up_ds_listen checked 1 jobs, back to waiting
55567 Sep 22 23:25:35.691 DEBG IO Flush 1473 has deps [JobId(1472), JobId(1471), JobId(1470)]
55568 Sep 22 23:25:35.692 DEBG IO Read 1474 has deps [JobId(1473)]
55569 Sep 22 23:25:35.694 DEBG Flush :1473 extent_limit None deps:[JobId(1472), JobId(1471), JobId(1470)] res:true f:196 g:1
55570 Sep 22 23:25:35.696 INFO [lossy] sleeping 1 second
55571 Sep 22 23:25:36.192 DEBG IO Flush 1475 has deps [JobId(1474), JobId(1473)]
55572 Sep 22 23:25:36.703 DEBG Read :1474 deps:[JobId(1473)] res:true
55573 Sep 22 23:25:36.726 DEBG Flush :1475 extent_limit None deps:[JobId(1474), JobId(1473)] res:true f:197 g:1
55574 Sep 22 23:25:36.726 INFO [lossy] sleeping 1 second
55575 Sep 22 23:25:37.103 DEBG [2] Read AckReady 1474, : downstairs
55576 Sep 22 23:25:37.104 DEBG up_ds_listen was notified
55577 Sep 22 23:25:37.104 DEBG up_ds_listen process 1474
55578 Sep 22 23:25:37.104 DEBG [A] ack job 1474:475, : downstairs
55579 Sep 22 23:25:37.157 DEBG up_ds_listen checked 1 jobs, back to waiting
55580 Sep 22 23:25:37.158 DEBG IO Read 1476 has deps [JobId(1475)]
55581 Sep 22 23:25:37.195 DEBG IO Flush 1477 has deps [JobId(1476), JobId(1475)]
55582 Sep 22 23:25:37.728 INFO [lossy] skipping 1476
55583 Sep 22 23:25:37.728 WARN returning error on read!
55584 Sep 22 23:25:37.728 DEBG Read :1476 deps:[JobId(1475)] res:false
55585 Sep 22 23:25:37.735 DEBG Read :1476 deps:[JobId(1475)] res:true
55586 Sep 22 23:25:37.756 ERRO [2] job id 1476 saw error GenericError("test error")
55587 Sep 22 23:25:37.757 INFO [lossy] skipping 1477
55588 Sep 22 23:25:37.758 DEBG Flush :1477 extent_limit None deps:[JobId(1476), JobId(1475)] res:true f:198 g:1
55589 Sep 22 23:25:37.758 INFO [lossy] sleeping 1 second
55590 Sep 22 23:25:38.134 DEBG [2] Read AckReady 1476, : downstairs
55591 Sep 22 23:25:38.135 DEBG up_ds_listen was notified
55592 Sep 22 23:25:38.135 DEBG up_ds_listen process 1476
55593 Sep 22 23:25:38.136 DEBG [A] ack job 1476:477, : downstairs
55594 Sep 22 23:25:38.188 DEBG up_ds_listen checked 1 jobs, back to waiting
55595 Sep 22 23:25:38.190 DEBG IO Read 1478 has deps [JobId(1477)]
55596 Sep 22 23:25:38.197 DEBG IO Flush 1479 has deps [JobId(1478), JobId(1477)]
55597 Sep 22 23:25:38.758 INFO [lossy] skipping 1478
55598 Sep 22 23:25:38.764 DEBG Read :1478 deps:[JobId(1477)] res:true
55599 Sep 22 23:25:38.787 INFO [lossy] skipping 1479
55600 Sep 22 23:25:38.787 INFO [lossy] skipping 1479
55601 Sep 22 23:25:38.787 DEBG Flush :1479 extent_limit None deps:[JobId(1478), JobId(1477)] res:true f:199 g:1
55602 Sep 22 23:25:38.787 INFO [lossy] sleeping 1 second
55603 Sep 22 23:25:39.164 DEBG [2] Read AckReady 1478, : downstairs
55604 Sep 22 23:25:39.165 DEBG up_ds_listen was notified
55605 Sep 22 23:25:39.165 DEBG up_ds_listen process 1478
55606 Sep 22 23:25:39.165 DEBG [A] ack job 1478:479, : downstairs
55607 Sep 22 23:25:39.218 DEBG up_ds_listen checked 1 jobs, back to waiting
55608 Sep 22 23:25:39.219 DEBG IO Read 1480 has deps [JobId(1479)]
55609 Sep 22 23:25:39.720 DEBG IO Flush 1481 has deps [JobId(1480), JobId(1479)]
55610 Sep 22 23:25:39.795 DEBG Read :1480 deps:[JobId(1479)] res:true
55611 Sep 22 23:25:39.818 DEBG Flush :1481 extent_limit None deps:[JobId(1480), JobId(1479)] res:true f:200 g:1
55612 Sep 22 23:25:40.195 DEBG [2] Read AckReady 1480, : downstairs
55613 Sep 22 23:25:40.197 DEBG up_ds_listen was notified
55614 Sep 22 23:25:40.197 DEBG up_ds_listen process 1480
55615 Sep 22 23:25:40.197 DEBG [A] ack job 1480:481, : downstairs
55616 Sep 22 23:25:40.249 DEBG up_ds_listen checked 1 jobs, back to waiting
55617 test test::integration_test_problematic_downstairs ... ok
55618 
55619 test result: ok. 57 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 242.88s
55620 
55621 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_nbd_server-8c1612631a1669fd --nocapture`
55622 
55623 running 0 tests
55624 
55625 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
55626 
55627 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_package-e8ff0170d25e0da5 --nocapture`
55628 
55629 running 0 tests
55630 
55631 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
55632 
55633 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_pantry-8e1bf492bfe90e8c --nocapture`
55634 
55635 running 0 tests
55636 
55637 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
55638 
55639 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_pantry-b51bc30f7a0cbfa5 --nocapture`
55640 
55641 running 1 test
55642 test tests::test_crucible_pantry_openapi ... ok
55643 
55644 test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.02s
55645 
55646 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_pantry_client-8a27f01eb086219e --nocapture`
55647 
55648 running 0 tests
55649 
55650 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
55651 
55652 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_protocol-c776b78ce4b42bf6 --nocapture`
55653 
55654 running 9 tests
55655 test tests::latest_message_version ... ok
55656 test tests::correctly_detect_truncated_message ... ok
55657 test tests::rt_evp ... ok
55658 test tests::rt_ev_0 ... ok
55659 test tests::rt_ev_7 ... ok
55660 test tests::rt_imok ... ok
55661 test tests::rt_here_i_am ... ok
55662 test tests::rt_ruok ... ok
55663 test tests::rt_yes_its_me ... ok
55664 
55665 test result: ok. 9 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
55666 
55667 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_protocol-d81e6562be2ffe77 --nocapture`
55668 
55669 running 0 tests
55670 
55671 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
55672 
55673 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_smf-6114df38a9482a0c --nocapture`
55674 
55675 running 10 tests
55676 SCF_LIMIT_MAX_NAME_LENGTH = 119
55677 SCF_LIMIT_MAX_VALUE_LENGTH = 4095
55678 SCF_LIMIT_MAX_PG_TYPE_LENGTH = 119
55679 SCF_LIMIT_MAX_FMRI_LENGTH = 628
55680 test scf_sys::tests::limits ... ok
55681 test scf_sys::tests::handle ... ok
55682 name = Ok("localhost")
55683 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55684 test tests::scope_not_set ... ok
55685 test tests::iter ... ok
55686 test tests::basic ... ok
55687 milestone/multi-user
55688 test tests::scope_local ... ok
55689 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
55690 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55691 test tests::scope_iter ... ok
55692 milestone/multi-user
55693 milestone/multi-user
55694 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55695 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55696 default
55697 milestone/name-services
55698 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
55699 default
55700 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55701 milestone/single-user
55702 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55703 milestone/name-services
55704 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55705 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55706 initial
55707 network/datalink-management
55708 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55709 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55710 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55711 default
55712 system/install-discovery
55713 last-import
55714 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55715 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55716 previous
55717 system/device/local
55718 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55719 milestone/single-user
55720 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55721 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55722 running
55723 network/physical
55724 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55725 default
55726 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55727 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55728 start
55729 network/initial
55730 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55731 network/ip-interface-management
55732 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55733 network/datalink-management
55734 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55735 network/loopback
55736 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55737 default
55738 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55739 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
55740 network/iptun
55741 milestone/name-services
55742 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55743 network/netcfg
55744 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55745 system/install-discovery
55746 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
55747 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55748 default
55749 network/rpc/bind
55750 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55751 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55752 system/boot-archive
55753 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55754 initial
55755 system/device/local
55756 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55757 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55758 milestone/devices
55759 last-import
55760 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55761 default
55762 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55763 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55764 system/filesystem/local
55765 previous
55766 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55767 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55768 system/filesystem/minimal
55769 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55770 running
55771 network/physical
55772 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55773 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55774 system/filesystem/root
55775 start
55776 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55777 default
55778 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55779 system/filesystem/usr
55780 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55781 nwam
55782 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55783 system/identity
55784 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55785 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55786 system/manifest-import
55787 network/initial
55788 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
55789 milestone/single-user
55790 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55791 system/svc/global
55792 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55793 default
55794 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55795 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
55796 system/svc/restarter
55797 default
55798 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55799 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55800 milestone/multi-user-server
55801 network/ip-interface-management
55802 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55803 initial
55804 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55805 network/inetd-upgrade
55806 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55807 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55808 default
55809 last-import
55810 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55811 system/console-login
55812 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55813 previous
55814 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55815 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55816 network/loopback
55817 system/utmp
55818 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55819 running
55820 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55821 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55822 application/management/net-snmp
55823 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55824 default
55825 start
55826 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55827 smf/manifest
55828 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55829 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55830 network/iptun
55831 application/pkg/repositories-setup
55832 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55833 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55834 application/pkg/dynamic-mirror
55835 default
55836 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
55837 network/datalink-management
55838 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55839 application/pkg/mirror
55840 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55841 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
55842 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55843 network/netcfg
55844 default
55845 application/pkg/server
55846 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55847 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55848 application/security/tcsd
55849 default
55850 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55851 initial
55852 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55853 milestone/sysconfig
55854 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55855 last-import
55856 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55857 network/rpc/bind
55858 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55859 milestone/network
55860 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55861 previous
55862 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55863 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55864 default
55865 network/ntp
55866 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55867 running
55868 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55869 network/ipmp
55870 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55871 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55872 start
55873 system/boot-archive
55874 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55875 network/routing/rdisc
55876 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55877 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55878 default
55879 network/routing/route
55880 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55881 network/routing/ndp
55882 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
55883 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55884 milestone/devices
55885 system/install-discovery
55886 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55887 network/routing/legacy-routing
55888 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55889 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55890 default
55891 network/routing/ripng
55892 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55893 network/inetd
55894 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
55895 system/device/local
55896 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55897 system/filesystem/local
55898 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55899 network/bridge
55900 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
55901 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55902 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55903 default
55904 default
55905 network/ipv4-forwarding
55906 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55907 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55908 network/ipv6-forwarding
55909 initial
55910 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55911 system/filesystem/minimal
55912 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55913 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55914 network/ipqos
55915 last-import
55916 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55917 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55918 default
55919 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55920 network/ipsec/ipsecalgs
55921 previous
55922 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55923 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55924 network/ipsec/policy
55925 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55926 running
55927 system/filesystem/root
55928 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55929 network/ipsec/manual-key
55930 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55931 start
55932 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55933 default
55934 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55935 network/ipsec/ike
55936 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55937 network/install
55938 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55939 system/filesystem/usr
55940 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55941 network/shares/group
55942 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
55943 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55944 network/physical
55945 default
55946 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55947 system/filesystem/reparse
55948 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55949 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
55950 network/rpc/smserver
55951 default
55952 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55953 system/identity
55954 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55955 network/rpc/keyserv
55956 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55957 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55958 initial
55959 domain
55960 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55961 network/rpc/gss
55962 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55963 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55964 node
55965 last-import
55966 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55967 network/ipfilter
55968 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55969 previous
55970 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55971 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55972 network/ldap/client
55973 system/manifest-import
55974 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55975 running
55976 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55977 network/smb/client
55978 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55979 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55980 default
55981 start
55982 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55983 network/smb/server
55984 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55985 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
55986 network/routing-setup
55987 system/svc/global
55988 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
55989 nwam
55990 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55991 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
55992 network/npiv_config
55993 default
55994 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55995 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
55996 system/device/fc-fabric
55997 initial
55998 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
55999 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56000 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56001 system/svc/restarter
56002 network/ssh
56003 last-import
56004 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56005 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56006 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56007 network/varpd
56008 previous
56009 default
56010 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56011 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56012 network/security/kadmin
56013 running
56014 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56015 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56016 network/security/krb5kdc
56017 milestone/multi-user-server
56018 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56019 network/security/ktkt_warn
56020 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56021 default
56022 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56023 network/device-discovery/printers
56024 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56025 network/initial
56026 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56027 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56028 network/service
56029 network/inetd-upgrade
56030 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56031 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56032 default
56033 network/nis/client
56034 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56035 default
56036 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56037 network/location
56038 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56039 initial
56040 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56041 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56042 network/dns/client
56043 system/console-login
56044 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56045 last-import
56046 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56047 system/name-service-cache
56048 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56049 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56050 default
56051 previous
56052 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56053 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56054 network/nfs/mapid
56055 vt2
56056 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56057 running
56058 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56059 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56060 vt3
56061 network/chrony
56062 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56063 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56064 start
56065 vt4
56066 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56067 network/ibd-post-upgrade
56068 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56069 vt5
56070 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56071 network/tftp/udp6
56072 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56073 vt6
56074 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56075 network/netmask
56076 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56077 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56078 network/ip-interface-management
56079 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56080 network/dns/multicast
56081 system/utmp
56082 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56083 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56084 network/dns/install
56085 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56086 default
56087 default
56088 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56089 network/nfs/log
56090 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56091 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56092 initial
56093 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56094 network/nfs/rquota
56095 application/management/net-snmp
56096 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56097 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56098 last-import
56099 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56100 network/nfs/client
56101 default
56102 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56103 previous
56104 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56105 network/nfs/server
56106 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56107 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56108 running
56109 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56110 network/nfs/cbd
56111 smf/manifest
56112 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56113 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56114 start
56115 network/nfs/status
56116 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56117 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56118 network/nfs/nlockmgr
56119 application/pkg/repositories-setup
56120 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56121 platform/i86pc/acpihpd
56122 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56123 default
56124 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56125 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56126 system/sac
56127 network/loopback
56128 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56129 system/fcoe_initiator
56130 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56131 application/pkg/dynamic-mirror
56132 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56133 default
56134 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56135 system/fmd
56136 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56137 default
56138 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56139 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56140 system/sysevent
56141 initial
56142 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56143 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56144 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56145 system/boot-config
56146 last-import
56147 application/pkg/mirror
56148 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56149 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56150 system/picl
56151 previous
56152 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56153 default
56154 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56155 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56156 system/coreadm
56157 running
56158 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56159 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56160 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56161 system/hal
56162 start
56163 application/pkg/server
56164 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56165 system/resource-mgmt
56166 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56167 default
56168 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56169 system/rcap
56170 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56171 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56172 system/system-log
56173 application/security/tcsd
56174 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56175 network/iptun
56176 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56177 system/dumpadm
56178 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56179 default
56180 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56181 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56182 system/dbus
56183 default
56184 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56185 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56186 milestone/sysconfig
56187 system/pools
56188 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56189 initial
56190 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56191 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56192 system/power
56193 default
56194 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56195 last-import
56196 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56197 system/keymap
56198 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56199 previous
56200 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56201 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56202 milestone/network
56203 system/auditset
56204 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56205 running
56206 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56207 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56208 default
56209 system/stmf
56210 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56211 start
56212 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56213 system/hotplug
56214 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56215 network/ntp
56216 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56217 system/rbac
56218 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56219 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56220 default
56221 system/logadm-upgrade
56222 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56223 network/netcfg
56224 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56225 system/hostid
56226 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56227 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56228 network/ipmp
56229 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56230 default
56231 system/filesystem/autofs
56232 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56233 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56234 default
56235 system/cron
56236 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56237 initial
56238 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56239 system/illumos/userscript
56240 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56241 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56242 last-import
56243 network/routing/rdisc
56244 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56245 system/vtdaemon
56246 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56247 previous
56248 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56249 default
56250 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56251 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56252 system/boot-archive-update
56253 running
56254 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56255 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56256 system/cryptosvc
56257 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56258 start
56259 network/routing/route
56260 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56261 system/intrd
56262 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56263 default
56264 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56265 system/rmtmpfiles
56266 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56267 system/t6init
56268 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56269 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56270 network/rpc/bind
56271 network/routing/ndp
56272 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56273 system/auditd
56274 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56275 default
56276 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56277 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56278 system/idmap
56279 default
56280 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56281 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56282 system/zones-monitoring
56283 network/routing/legacy-routing
56284 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56285 initial
56286 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56287 system/zones
56288 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56289 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56290 last-import
56291 ipv4
56292 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56293 system/ipcc
56294 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56295 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56296 previous
56297 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56298 ipv6
56299 system/update-man-index
56300 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56301 running
56302 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56303 system/process-security
56304 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56305 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56306 start
56307 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56308 network/routing/ripng
56309 system/fm/notify-params
56310 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56311 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56312 system/pkgserv
56313 default
56314 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56315 system/extended-accounting
56316 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56317 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56318 system/boot-archive
56319 system/consadm
56320 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56321 network/inetd
56322 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56323 system/scheduler
56324 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56325 default
56326 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56327 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56328 default
56329 system/pfexec
56330 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56331 initial
56332 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56333 system/illumos/metadata
56334 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56335 last-import
56336 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56337 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56338 system/sar
56339 network/bridge
56340 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56341 previous
56342 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56343 system/early-manifest-import
56344 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56345 running
56346 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56347 system/fcoe_target
56348 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56349 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56350 start
56351 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56352 network/ipv4-forwarding
56353 system/device/mpxio-upgrade
56354 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56355 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56356 system/device/audio
56357 default
56358 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56359 system/device/allocate
56360 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56361 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56362 milestone/devices
56363 smf/legacy_run
56364 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56365 network/ipv6-forwarding
56366 Ok(Service { scf: Scf { handle: 0x6b3050 }, service: 0x6b4810 })
56367 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56368 site/buildomat/agent
56369 default
56370 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56371 default
56372 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56373 initial
56374 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56375 test tests::service_iter ... Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56376 ok
56377 last-import
56378 network/ipqos
56379 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56380 previous
56381 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56382 default
56383 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56384 running
56385 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56386 start
56387 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56388 network/ipsec/ipsecalgs
56389 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56390 default
56391 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56392 system/filesystem/local
56393 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56394 network/ipsec/policy
56395 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56396 default
56397 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56398 default
56399 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56400 initial
56401 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56402 last-import
56403 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56404 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56405 network/ipsec/manual-key
56406 previous
56407 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56408 running
56409 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56410 default
56411 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56412 start
56413 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56414 network/ipsec/ike
56415 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56416 default
56417 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56418 system/filesystem/minimal
56419 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56420 default
56421 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56422 network/install
56423 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56424 initial
56425 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56426 default
56427 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56428 last-import
56429 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56430 previous
56431 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56432 network/shares/group
56433 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56434 running
56435 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56436 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56437 default
56438 start
56439 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56440 zfs
56441 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56442 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56443 system/filesystem/reparse
56444 system/filesystem/root
56445 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56446 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56447 default
56448 default
56449 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56450 initial
56451 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56452 network/rpc/smserver
56453 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56454 last-import
56455 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56456 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56457 default
56458 previous
56459 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56460 running
56461 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56462 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56463 start
56464 network/rpc/keyserv
56465 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56466 default
56467 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56468 system/filesystem/usr
56469 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56470 network/rpc/gss
56471 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56472 default
56473 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56474 default
56475 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56476 initial
56477 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56478 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56479 last-import
56480 network/ipfilter
56481 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56482 previous
56483 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56484 default
56485 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56486 running
56487 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56488 start
56489 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56490 network/ldap/client
56491 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56492 default
56493 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56494 system/identity
56495 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56496 network/smb/client
56497 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56498 domain
56499 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56500 default
56501 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56502 initial
56503 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56504 last-import
56505 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56506 network/smb/server
56507 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56508 previous
56509 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56510 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56511 running
56512 default
56513 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56514 start
56515 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56516 network/routing-setup
56517 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56518 node
56519 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56520 default
56521 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56522 initial
56523 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56524 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56525 last-import
56526 network/npiv_config
56527 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56528 previous
56529 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56530 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56531 default
56532 running
56533 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56534 start
56535 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56536 system/device/fc-fabric
56537 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56538 default
56539 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56540 system/manifest-import
56541 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56542 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56543 network/ssh
56544 default
56545 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56546 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56547 default
56548 initial
56549 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56550 last-import
56551 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56552 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56553 network/varpd
56554 previous
56555 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56556 running
56557 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56558 default
56559 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56560 start
56561 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56562 network/security/kadmin
56563 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56564 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56565 default
56566 system/svc/global
56567 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56568 default
56569 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56570 network/security/krb5kdc
56571 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56572 initial
56573 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56574 default
56575 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56576 last-import
56577 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56578 previous
56579 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56580 network/security/ktkt_warn
56581 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56582 running
56583 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56584 default
56585 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56586 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56587 network/device-discovery/printers
56588 system/svc/restarter
56589 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56590 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56591 snmp
56592 default
56593 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56594 initial
56595 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56596 network/service
56597 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56598 last-import
56599 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56600 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56601 previous
56602 default
56603 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56604 running
56605 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56606 network/nis/client
56607 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56608 default
56609 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56610 milestone/multi-user-server
56611 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56612 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56613 default
56614 network/location
56615 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56616 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56617 initial
56618 default
56619 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56620 last-import
56621 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56622 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56623 previous
56624 network/dns/client
56625 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56626 running
56627 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56628 default
56629 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56630 start
56631 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56632 system/name-service-cache
56633 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56634 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56635 network/inetd-upgrade
56636 default
56637 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56638 default
56639 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56640 network/nfs/mapid
56641 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56642 initial
56643 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56644 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56645 default
56646 last-import
56647 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56648 previous
56649 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56650 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56651 running
56652 network/chrony
56653 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56654 start
56655 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56656 default
56657 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56658 network/ibd-post-upgrade
56659 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56660 system/console-login
56661 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56662 default
56663 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56664 default
56665 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56666 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56667 initial
56668 network/tftp/udp6
56669 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56670 last-import
56671 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56672 default
56673 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56674 previous
56675 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56676 running
56677 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56678 network/netmask
56679 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56680 start
56681 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56682 default
56683 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56684 vt2
56685 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56686 network/dns/multicast
56687 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56688 initial
56689 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56690 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56691 last-import
56692 default
56693 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56694 previous
56695 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56696 running
56697 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56698 network/dns/install
56699 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56700 default
56701 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56702 vt3
56703 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56704 initial
56705 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56706 network/nfs/log
56707 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56708 last-import
56709 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56710 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56711 default
56712 previous
56713 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56714 running
56715 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56716 network/nfs/rquota
56717 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56718 vt4
56719 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56720 default
56721 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56722 initial
56723 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56724 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56725 last-import
56726 network/nfs/client
56727 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56728 previous
56729 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56730 default
56731 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56732 running
56733 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56734 network/nfs/server
56735 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56736 vt5
56737 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56738 default
56739 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56740 initial
56741 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56742 last-import
56743 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56744 network/nfs/cbd
56745 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56746 previous
56747 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56748 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56749 running
56750 default
56751 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56752 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56753 vt6
56754 network/nfs/status
56755 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56756 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56757 initial
56758 default
56759 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56760 last-import
56761 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56762 previous
56763 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56764 network/nfs/nlockmgr
56765 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56766 running
56767 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56768 default
56769 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56770 platform/i86pc/acpihpd
56771 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56772 system/utmp
56773 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56774 default
56775 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56776 default
56777 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56778 initial
56779 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56780 system/sac
56781 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56782 last-import
56783 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56784 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56785 default
56786 previous
56787 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56788 running
56789 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56790 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56791 system/fcoe_initiator
56792 start
56793 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56794 default
56795 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56796 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56797 application/management/net-snmp
56798 system/fmd
56799 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56800 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56801 default
56802 default
56803 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56804 initial
56805 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56806 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56807 system/sysevent
56808 last-import
56809 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56810 running
56811 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56812 default
56813 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56814 system/boot-config
56815 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56816 smf/manifest
56817 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56818 default
56819 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56820 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56821 application/pkg/repositories-setup
56822 system/picl
56823 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56824 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56825 default
56826 default
56827 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56828 initial
56829 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56830 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56831 system/coreadm
56832 last-import
56833 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56834 running
56835 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56836 default
56837 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56838 system/hal
56839 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56840 application/pkg/dynamic-mirror
56841 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56842 default
56843 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56844 default
56845 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56846 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56847 initial
56848 system/resource-mgmt
56849 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56850 last-import
56851 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56852 default
56853 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56854 running
56855 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56856 system/rcap
56857 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56858 application/pkg/mirror
56859 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56860 system/system-log
56861 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56862 default
56863 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56864 default
56865 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56866 initial
56867 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56868 rsyslog
56869 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56870 last-import
56871 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56872 running
56873 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56874 system/dumpadm
56875 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56876 default
56877 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56878 application/pkg/server
56879 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56880 system/dbus
56881 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56882 default
56883 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56884 default
56885 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56886 initial
56887 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56888 last-import
56889 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56890 system/pools
56891 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56892 running
56893 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56894 default
56895 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56896 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56897 application/security/tcsd
56898 system/power
56899 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56900 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56901 default
56902 default
56903 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56904 initial
56905 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56906 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56907 system/keymap
56908 last-import
56909 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56910 running
56911 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56912 default
56913 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56914 system/auditset
56915 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56916 milestone/sysconfig
56917 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56918 default
56919 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56920 default
56921 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56922 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56923 system/stmf
56924 initial
56925 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56926 last-import
56927 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56928 default
56929 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56930 running
56931 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56932 start
56933 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56934 system/hotplug
56935 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56936 default
56937 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56938 milestone/network
56939 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56940 system/rbac
56941 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56942 default
56943 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56944 default
56945 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56946 initial
56947 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56948 last-import
56949 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56950 system/logadm-upgrade
56951 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56952 running
56953 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56954 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56955 default
56956 start
56957 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56958 system/hostid
56959 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56960 network/ntp
56961 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56962 default
56963 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56964 default
56965 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56966 system/filesystem/autofs
56967 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56968 initial
56969 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56970 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56971 last-import
56972 default
56973 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56974 running
56975 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56976 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56977 start
56978 system/cron
56979 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56980 default
56981 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
56982 network/ipmp
56983 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56984 system/illumos/userscript
56985 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
56986 default
56987 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56988 default
56989 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56990 initial
56991 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56992 last-import
56993 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
56994 system/vtdaemon
56995 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56996 running
56997 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
56998 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
56999 default
57000 start
57001 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57002 system/boot-archive-update
57003 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57004 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57005 network/routing/rdisc
57006 default
57007 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57008 default
57009 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57010 system/cryptosvc
57011 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57012 initial
57013 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57014 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57015 default
57016 last-import
57017 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57018 running
57019 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57020 system/intrd
57021 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57022 default
57023 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57024 network/routing/route
57025 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57026 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57027 system/rmtmpfiles
57028 default
57029 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57030 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57031 default
57032 initial
57033 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57034 last-import
57035 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57036 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57037 running
57038 system/t6init
57039 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57040 default
57041 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57042 network/routing/ndp
57043 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57044 system/auditd
57045 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57046 default
57047 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57048 default
57049 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57050 initial
57051 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57052 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57053 last-import
57054 system/idmap
57055 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57056 running
57057 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57058 default
57059 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57060 start
57061 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57062 system/zones-monitoring
57063 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57064 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57065 default
57066 network/routing/legacy-routing
57067 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57068 ipv4
57069 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57070 system/zones
57071 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57072 initial
57073 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57074 default
57075 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57076 last-import
57077 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57078 running
57079 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57080 system/ipcc
57081 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57082 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57083 ipv6
57084 default
57085 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57086 initial
57087 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57088 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57089 system/update-man-index
57090 last-import
57091 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57092 running
57093 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57094 default
57095 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57096 system/process-security
57097 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57098 network/routing/ripng
57099 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57100 default
57101 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57102 default
57103 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57104 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57105 system/fm/notify-params
57106 initial
57107 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57108 last-import
57109 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57110 default
57111 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57112 running
57113 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57114 system/pkgserv
57115 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57116 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57117 default
57118 network/inetd
57119 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57120 default
57121 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57122 system/extended-accounting
57123 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57124 initial
57125 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57126 flow
57127 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57128 last-import
57129 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57130 net
57131 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57132 running
57133 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57134 process
57135 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57136 start
57137 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57138 task
57139 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57140 system/consadm
57141 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57142 network/bridge
57143 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57144 default
57145 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57146 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57147 network/ipv4-forwarding
57148 system/scheduler
57149 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57150 default
57151 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57152 default
57153 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57154 initial
57155 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57156 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57157 last-import
57158 system/pfexec
57159 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57160 running
57161 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57162 default
57163 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57164 system/illumos/metadata
57165 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57166 network/ipv6-forwarding
57167 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57168 default
57169 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57170 default
57171 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57172 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57173 initial
57174 system/sar
57175 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57176 last-import
57177 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57178 default
57179 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57180 running
57181 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57182 system/early-manifest-import
57183 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57184 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57185 default
57186 network/ipqos
57187 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57188 default
57189 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57190 system/fcoe_target
57191 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57192 initial
57193 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57194 default
57195 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57196 last-import
57197 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57198 running
57199 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57200 system/device/mpxio-upgrade
57201 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57202 default
57203 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57204 network/ipsec/ipsecalgs
57205 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57206 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57207 system/device/audio
57208 default
57209 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57210 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57211 default
57212 initial
57213 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57214 last-import
57215 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57216 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57217 running
57218 system/device/allocate
57219 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57220 start
57221 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57222 default
57223 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57224 smf/legacy_run
57225 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57226 network/ipsec/policy
57227 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57228 default
57229 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57230 initial
57231 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57232 last-import
57233 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57234 running
57235 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57236 start
57237 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57238 network/ipsec/manual-key
57239 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57240 default
57241 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57242 initial
57243 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57244 last-import
57245 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57246 running
57247 Ok(Service { scf: Scf { handle: 0x68c050 }, service: 0x6b4950 })
57248 site/buildomat/agent
57249 Ok(Instance { scf: Scf { handle: 0x68c050 }, instance: 0x6b4710 })
57250 default
57251 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57252 network/ipsec/ike
57253 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57254 default
57255 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57256 initial
57257 test tests::instance_iter ... ok
57258 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57259 last-import
57260 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57261 running
57262 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57263 network/install
57264 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57265 default
57266 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57267 initial
57268 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57269 last-import
57270 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57271 running
57272 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57273 network/shares/group
57274 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57275 default
57276 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57277 initial
57278 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57279 last-import
57280 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57281 running
57282 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57283 start
57284 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57285 zfs
57286 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57287 running
57288 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57289 start
57290 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57291 system/filesystem/reparse
57292 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57293 default
57294 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57295 initial
57296 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57297 last-import
57298 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57299 running
57300 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57301 network/rpc/smserver
57302 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57303 default
57304 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57305 initial
57306 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57307 last-import
57308 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57309 running
57310 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57311 start
57312 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57313 network/rpc/keyserv
57314 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57315 default
57316 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57317 initial
57318 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57319 last-import
57320 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57321 running
57322 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57323 network/rpc/gss
57324 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57325 default
57326 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57327 initial
57328 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57329 last-import
57330 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57331 running
57332 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57333 start
57334 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57335 network/ipfilter
57336 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57337 default
57338 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57339 initial
57340 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57341 last-import
57342 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57343 running
57344 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57345 network/ldap/client
57346 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57347 default
57348 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57349 initial
57350 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57351 last-import
57352 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57353 running
57354 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57355 network/smb/client
57356 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57357 default
57358 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57359 initial
57360 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57361 last-import
57362 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57363 running
57364 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57365 network/smb/server
57366 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57367 default
57368 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57369 initial
57370 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57371 last-import
57372 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57373 running
57374 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57375 network/routing-setup
57376 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57377 default
57378 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57379 initial
57380 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57381 last-import
57382 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57383 running
57384 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57385 start
57386 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57387 network/npiv_config
57388 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57389 default
57390 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57391 initial
57392 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57393 last-import
57394 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57395 running
57396 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57397 start
57398 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57399 system/device/fc-fabric
57400 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57401 default
57402 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57403 initial
57404 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57405 last-import
57406 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57407 previous
57408 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57409 running
57410 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57411 start
57412 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57413 network/ssh
57414 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57415 default
57416 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57417 initial
57418 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57419 last-import
57420 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57421 running
57422 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57423 start
57424 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57425 network/varpd
57426 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57427 default
57428 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57429 initial
57430 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57431 last-import
57432 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57433 running
57434 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57435 network/security/kadmin
57436 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57437 default
57438 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57439 initial
57440 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57441 last-import
57442 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57443 running
57444 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57445 network/security/krb5kdc
57446 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57447 default
57448 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57449 initial
57450 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57451 last-import
57452 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57453 running
57454 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57455 network/security/ktkt_warn
57456 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57457 default
57458 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57459 initial
57460 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57461 last-import
57462 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57463 running
57464 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57465 start
57466 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57467 network/device-discovery/printers
57468 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57469 snmp
57470 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57471 initial
57472 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57473 last-import
57474 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57475 running
57476 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57477 network/service
57478 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57479 default
57480 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57481 initial
57482 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57483 last-import
57484 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57485 running
57486 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57487 start
57488 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57489 network/nis/client
57490 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57491 default
57492 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57493 initial
57494 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57495 last-import
57496 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57497 running
57498 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57499 network/location
57500 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57501 default
57502 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57503 initial
57504 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57505 last-import
57506 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57507 running
57508 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57509 network/dns/client
57510 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57511 default
57512 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57513 initial
57514 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57515 last-import
57516 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57517 previous
57518 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57519 running
57520 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57521 start
57522 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57523 system/name-service-cache
57524 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57525 default
57526 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57527 initial
57528 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57529 last-import
57530 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57531 previous
57532 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57533 running
57534 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57535 start
57536 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57537 network/nfs/mapid
57538 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57539 default
57540 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57541 initial
57542 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57543 last-import
57544 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57545 previous
57546 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57547 running
57548 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57549 network/chrony
57550 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57551 default
57552 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57553 initial
57554 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57555 last-import
57556 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57557 running
57558 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57559 network/ibd-post-upgrade
57560 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57561 default
57562 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57563 initial
57564 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57565 last-import
57566 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57567 running
57568 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57569 start
57570 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57571 network/tftp/udp6
57572 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57573 default
57574 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57575 initial
57576 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57577 last-import
57578 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57579 running
57580 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57581 network/netmask
57582 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57583 default
57584 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57585 initial
57586 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57587 last-import
57588 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57589 running
57590 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57591 start
57592 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57593 network/dns/multicast
57594 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57595 default
57596 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57597 initial
57598 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57599 last-import
57600 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57601 running
57602 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57603 network/dns/install
57604 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57605 default
57606 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57607 initial
57608 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57609 last-import
57610 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57611 running
57612 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57613 network/nfs/log
57614 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57615 default
57616 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57617 initial
57618 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57619 last-import
57620 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57621 running
57622 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57623 network/nfs/rquota
57624 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57625 default
57626 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57627 initial
57628 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57629 last-import
57630 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57631 running
57632 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57633 network/nfs/client
57634 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57635 default
57636 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57637 initial
57638 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57639 last-import
57640 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57641 running
57642 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57643 network/nfs/server
57644 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57645 default
57646 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57647 initial
57648 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57649 last-import
57650 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57651 running
57652 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57653 network/nfs/cbd
57654 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57655 default
57656 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57657 initial
57658 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57659 last-import
57660 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57661 running
57662 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57663 network/nfs/status
57664 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57665 default
57666 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57667 initial
57668 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57669 last-import
57670 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57671 running
57672 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57673 network/nfs/nlockmgr
57674 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57675 default
57676 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57677 initial
57678 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57679 last-import
57680 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57681 running
57682 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57683 platform/i86pc/acpihpd
57684 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57685 default
57686 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57687 initial
57688 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57689 last-import
57690 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57691 running
57692 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57693 start
57694 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57695 system/sac
57696 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57697 default
57698 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57699 initial
57700 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57701 last-import
57702 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57703 running
57704 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57705 start
57706 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57707 system/fcoe_initiator
57708 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57709 default
57710 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57711 initial
57712 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57713 last-import
57714 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57715 running
57716 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57717 start
57718 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57719 system/fmd
57720 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57721 default
57722 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57723 initial
57724 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57725 last-import
57726 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57727 running
57728 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57729 start
57730 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57731 system/sysevent
57732 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57733 default
57734 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57735 initial
57736 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57737 last-import
57738 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57739 running
57740 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57741 start
57742 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57743 system/boot-config
57744 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57745 default
57746 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57747 initial
57748 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57749 last-import
57750 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57751 running
57752 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57753 start
57754 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57755 system/picl
57756 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57757 default
57758 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57759 initial
57760 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57761 last-import
57762 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57763 running
57764 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57765 start
57766 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57767 system/coreadm
57768 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57769 default
57770 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57771 initial
57772 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57773 last-import
57774 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57775 running
57776 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57777 start
57778 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57779 system/hal
57780 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57781 default
57782 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57783 initial
57784 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57785 last-import
57786 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57787 running
57788 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57789 start
57790 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57791 system/resource-mgmt
57792 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57793 default
57794 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57795 initial
57796 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57797 last-import
57798 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57799 running
57800 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57801 start
57802 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57803 system/rcap
57804 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57805 system/system-log
57806 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57807 default
57808 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57809 initial
57810 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57811 last-import
57812 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57813 previous
57814 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57815 running
57816 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57817 start
57818 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57819 rsyslog
57820 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57821 initial
57822 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57823 last-import
57824 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57825 previous
57826 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57827 running
57828 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57829 system/dumpadm
57830 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57831 default
57832 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57833 initial
57834 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57835 last-import
57836 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57837 running
57838 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57839 start
57840 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57841 system/dbus
57842 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57843 default
57844 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57845 initial
57846 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57847 last-import
57848 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57849 running
57850 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57851 start
57852 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57853 system/pools
57854 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57855 default
57856 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57857 initial
57858 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57859 last-import
57860 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57861 running
57862 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57863 system/power
57864 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57865 default
57866 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57867 initial
57868 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57869 last-import
57870 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57871 running
57872 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57873 start
57874 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57875 system/keymap
57876 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57877 default
57878 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57879 initial
57880 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57881 last-import
57882 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57883 running
57884 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57885 start
57886 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57887 system/auditset
57888 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57889 default
57890 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57891 initial
57892 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57893 last-import
57894 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57895 running
57896 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57897 start
57898 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57899 system/stmf
57900 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57901 default
57902 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57903 initial
57904 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57905 last-import
57906 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57907 running
57908 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57909 system/hotplug
57910 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57911 default
57912 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57913 initial
57914 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57915 last-import
57916 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57917 running
57918 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57919 system/rbac
57920 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57921 default
57922 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57923 initial
57924 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57925 last-import
57926 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57927 running
57928 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57929 start
57930 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57931 system/logadm-upgrade
57932 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57933 default
57934 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57935 initial
57936 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57937 last-import
57938 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57939 running
57940 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57941 start
57942 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57943 system/hostid
57944 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57945 default
57946 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57947 initial
57948 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57949 last-import
57950 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57951 running
57952 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57953 start
57954 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57955 system/filesystem/autofs
57956 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57957 default
57958 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57959 initial
57960 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57961 last-import
57962 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57963 running
57964 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57965 start
57966 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57967 system/cron
57968 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57969 default
57970 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57971 initial
57972 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57973 last-import
57974 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57975 running
57976 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57977 start
57978 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57979 system/illumos/userscript
57980 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57981 default
57982 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57983 initial
57984 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57985 last-import
57986 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57987 running
57988 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57989 start
57990 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
57991 system/vtdaemon
57992 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
57993 default
57994 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57995 initial
57996 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57997 last-import
57998 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
57999 running
58000 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58001 system/boot-archive-update
58002 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58003 default
58004 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58005 initial
58006 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58007 last-import
58008 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58009 running
58010 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58011 start
58012 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58013 system/cryptosvc
58014 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58015 default
58016 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58017 initial
58018 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58019 last-import
58020 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58021 running
58022 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58023 start
58024 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58025 system/intrd
58026 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58027 default
58028 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58029 initial
58030 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58031 last-import
58032 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58033 running
58034 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58035 start
58036 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58037 system/rmtmpfiles
58038 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58039 default
58040 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58041 initial
58042 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58043 last-import
58044 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58045 running
58046 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58047 start
58048 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58049 system/t6init
58050 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58051 default
58052 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58053 initial
58054 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58055 last-import
58056 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58057 running
58058 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58059 system/auditd
58060 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58061 default
58062 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58063 initial
58064 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58065 last-import
58066 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58067 running
58068 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58069 system/idmap
58070 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58071 default
58072 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58073 initial
58074 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58075 last-import
58076 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58077 running
58078 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58079 system/zones-monitoring
58080 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58081 default
58082 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58083 initial
58084 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58085 last-import
58086 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58087 running
58088 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58089 start
58090 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58091 system/zones
58092 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58093 default
58094 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58095 initial
58096 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58097 last-import
58098 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58099 previous
58100 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58101 running
58102 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58103 start
58104 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58105 system/ipcc
58106 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58107 default
58108 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58109 initial
58110 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58111 last-import
58112 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58113 running
58114 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58115 start
58116 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58117 system/update-man-index
58118 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58119 default
58120 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58121 initial
58122 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58123 last-import
58124 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58125 running
58126 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58127 start
58128 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58129 system/process-security
58130 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58131 default
58132 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58133 initial
58134 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58135 last-import
58136 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58137 running
58138 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58139 system/fm/notify-params
58140 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58141 default
58142 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58143 initial
58144 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58145 last-import
58146 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58147 running
58148 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58149 system/pkgserv
58150 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58151 default
58152 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58153 initial
58154 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58155 last-import
58156 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58157 running
58158 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58159 start
58160 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58161 system/extended-accounting
58162 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58163 flow
58164 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58165 initial
58166 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58167 last-import
58168 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58169 running
58170 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58171 net
58172 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58173 initial
58174 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58175 last-import
58176 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58177 running
58178 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58179 process
58180 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58181 initial
58182 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58183 last-import
58184 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58185 running
58186 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58187 task
58188 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58189 initial
58190 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58191 last-import
58192 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58193 running
58194 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58195 system/consadm
58196 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58197 default
58198 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58199 initial
58200 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58201 last-import
58202 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58203 running
58204 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58205 system/scheduler
58206 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58207 default
58208 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58209 initial
58210 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58211 last-import
58212 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58213 running
58214 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58215 start
58216 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58217 system/pfexec
58218 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58219 default
58220 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58221 initial
58222 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58223 last-import
58224 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58225 running
58226 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58227 start
58228 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58229 system/illumos/metadata
58230 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58231 default
58232 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58233 initial
58234 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58235 last-import
58236 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58237 running
58238 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58239 start
58240 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58241 system/sar
58242 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58243 default
58244 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58245 initial
58246 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58247 last-import
58248 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58249 running
58250 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58251 system/early-manifest-import
58252 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58253 default
58254 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58255 initial
58256 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58257 last-import
58258 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58259 running
58260 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58261 start
58262 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58263 system/fcoe_target
58264 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58265 default
58266 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58267 initial
58268 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58269 last-import
58270 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58271 running
58272 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58273 system/device/mpxio-upgrade
58274 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58275 default
58276 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58277 initial
58278 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58279 last-import
58280 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58281 running
58282 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58283 system/device/audio
58284 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58285 default
58286 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58287 initial
58288 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58289 last-import
58290 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58291 running
58292 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58293 start
58294 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58295 system/device/allocate
58296 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58297 default
58298 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58299 initial
58300 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58301 last-import
58302 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58303 running
58304 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58305 smf/legacy_run
58306 Ok(Service { scf: Scf { handle: 0x6b7a90 }, service: 0x6b47d0 })
58307 site/buildomat/agent
58308 Ok(Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 })
58309 default
58310 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58311 last-import
58312 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58313 running
58314 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58315 initial
58316 Instance { scf: Scf { handle: 0x6b7a90 }, instance: 0x6b46d0 }
58317 start
58318 test tests::snapshot_iter ... ok
58319 
58320 test result: ok. 10 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.11s
58321 
58322 Running `/work/oxidecomputer/crucible/target/debug/deps/crudd-3e9b00990c25260e --nocapture`
58323 
58324 running 0 tests
58325 
58326 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
58327 
58328 Running `/work/oxidecomputer/crucible/target/debug/deps/crutest-af78e92d646e2d06 --nocapture`
58329 
58330 running 64 tests
58331 test protocol::tests::correctly_detect_truncated_message ... ok
58332 test protocol::tests::rt_commit ... ok
58333 test protocol::tests::rt_deactivate ... ok
58334 test protocol::tests::rt_activate ... ok
58335 test protocol::tests::rt_done_ok ... ok
58336 test protocol::tests::rt_generic ... ok
58337 test protocol::tests::rt_info_please ... ok
58338 test protocol::tests::rt_info ... ok
58339 test protocol::tests::rt_is_active ... ok
58340 test protocol::tests::rt_is_show ... ok
58341 test protocol::tests::rt_my_uuid ... ok
58342 test protocol::tests::rt_perf ... ok
58343 test protocol::tests::rt_read ... ok
58344 test protocol::tests::rt_uuid ... ok
58345 test protocol::tests::rt_replace ... ok
58346 test protocol::tests::rt_write ... ok
58347 test protocol::tests::rt_write_unwritten ... ok
58348 test test::test_95_2 ... ok
58349 test test::test_95_20 ... ok
58350 test test::test_95_21 ... ok
58351 test test::test_95_10 ... ok
58352 test test::test_perc_bad_big_perc ... ok
58353 test test::test_perc_bad_perc ... ok
58354 test test::test_95_small ... ok
58355 test test::test_perc_mixed ... ok
58356 test test::test_read_compare_empty ... ok
58357 Mismatch Block::0 bo:1 Volume offset:1 Expected:1 Got:2
58358 test test::test_read_compare_1 ... ok
58359 Mismatch Block::0 bo:2 Volume offset:2 Expected:1 Got:2
58360 Mismatch Block::0 bo:3 Volume offset:3 Expected:1 Got:2
58361 Mismatch Block::0 bo:1 Volume offset:1 Expected:3 Got:2
58362 test test::test_read_compare_commit ... okMismatch Block::0 bo:2 Volume offset:2 Expected:3 Got:2
58363 Mismatch Block::0 bo:4 Volume offset:4 Expected:1 Got:2
58364 
58365 Mismatch Block::0 bo:3 Volume offset:3 Expected:3 Got:2
58366 Mismatch Block::0 bo:5 Volume offset:5 Expected:1 Got:2
58367 Mismatch Block::0 bo:4 Volume offset:4 Expected:3 Got:2
58368 Mismatch Block::0 bo:6 Volume offset:6 Expected:1 Got:2
58369 Mismatch Block::0 bo:5 Volume offset:5 Expected:3 Got:2
58370 Mismatch Block::0 bo:7 Volume offset:7 Expected:1 Got:2
58371 Mismatch Block::0 bo:6 Volume offset:6 Expected:3 Got:2
58372 Mismatch Block::0 bo:8 Volume offset:8 Expected:1 Got:2
58373 Mismatch Block::0 bo:7 Volume offset:7 Expected:3 Got:2
58374 Mismatch Block::0 bo:9 Volume offset:9 Expected:1 Got:2
58375 Mismatch Block::0 bo:8 Volume offset:8 Expected:3 Got:2
58376 Mismatch Block::0 bo:10 Volume offset:10 Expected:1 Got:2
58377 Mismatch Block::0 bo:9 Volume offset:9 Expected:3 Got:2
58378 Mismatch Block::0 bo:11 Volume offset:11 Expected:1 Got:2
58379 Mismatch Block::0 bo:10 Volume offset:10 Expected:3 Got:2
58380 Mismatch Block::0 bo:12 Volume offset:12 Expected:1 Got:2
58381 test test::test_read_compare ... Mismatch Block::0 bo:11 Volume offset:11 Expected:3 Got:2
58382 Mismatch Block::0 bo:12 Volume offset:12 Expected:3 Got:2
58383 ok
58384 Mismatch Block::0 bo:13 Volume offset:13 Expected:3 Got:2
58385 Mismatch Block::0 bo:13 Volume offset:13 Expected:1 Got:2
58386 Mismatch Block::0 bo:14 Volume offset:14 Expected:3 Got:2
58387 Mismatch Block::0 bo:14 Volume offset:14 Expected:1 Got:2
58388 Mismatch Block::0 bo:15 Volume offset:15 Expected:3 Got:2
58389 Mismatch Block::0 bo:15 Volume offset:15 Expected:1 Got:2
58390 Mismatch Block::0 bo:16 Volume offset:16 Expected:3 Got:2
58391 Mismatch Block::0 bo:16 Volume offset:16 Expected:1 Got:2
58392 Mismatch Block::0 bo:17 Volume offset:17 Expected:3 Got:2
58393 Mismatch Block::0 bo:17 Volume offset:17 Expected:1 Got:2
58394 Mismatch Block::0 bo:18 Volume offset:18 Expected:3 Got:2
58395 Mismatch Block::0 bo:18 Volume offset:18 Expected:1 Got:2
58396 Mismatch Block::0 bo:19 Volume offset:19 Expected:3 Got:2
58397 Mismatch Block::0 bo:19 Volume offset:19 Expected:1 Got:2
58398 Mismatch Block::0 bo:20 Volume offset:20 Expected:3 Got:2
58399 Mismatch Block::0 bo:20 Volume offset:20 Expected:1 Got:2
58400 test test::test_read_compare_empty_data ... ok
58401 Mismatch Block::0 bo:21 Volume offset:21 Expected:1 Got:2
58402 Mismatch Block::0 bo:22 Volume offset:22 Expected:1 Got:2
58403 Mismatch Block::0 bo:23 Volume offset:23 Expected:1 Got:2
58404 Mismatch Block::0 bo:24 Volume offset:24 Expected:1 Got:2
58405 Mismatch Block::0 bo:21 Volume offset:21 Expected:3 Got:2
58406 Mismatch Block::0 bo:25 Volume offset:25 Expected:1 Got:2
58407 Mismatch Block::0 bo:22 Volume offset:22 Expected:3 Got:2
58408 Mismatch Block::2 bo:1 Volume offset:1025 Expected:2 Got:9
58409 Mismatch Block::0 bo:23 Volume offset:23 Expected:3 Got:2
58410 Mismatch Block::0 bo:24 Volume offset:24 Expected:3 Got:2
58411 Mismatch Block::0 bo:25 Volume offset:25 Expected:3 Got:2
58412 Mismatch Block::0 bo:26 Volume offset:26 Expected:3 Got:2
58413 Mismatch Block::0 bo:27 Volume offset:27 Expected:3 Got:2
58414 Mismatch Block::0 bo:28 Volume offset:28 Expected:3 Got:2
58415 Mismatch Block::0 bo:29 Volume offset:29 Expected:3 Got:2
58416 SPEC v:2 min_av:254 cur_av:1 cm:1022 cc:1025
58417 Mismatch Block::0 bo:30 Volume offset:30 Expected:3 Got:2
58418 SPEC v:254 min_av:254 cur_av:1 cm:1022 cc:1025
58419 Mismatch Block::0 bo:31 Volume offset:31 Expected:3 Got:2
58420 new cur is 1022 from min
58421 Mismatch Block::0 bo:32 Volume offset:32 Expected:3 Got:2
58422 SPEC v:255 min_av:254 cur_av:1 cm:1022 cc:1025
58423 Mismatch Block::0 bo:33 Volume offset:33 Expected:3 Got:2
58424 new cur is 1023 from min
58425 Mismatch Block::0 bo:34 Volume offset:34 Expected:3 Got:2
58426 SPEC v:0 min_av:254 cur_av:1 cm:1022 cc:1025
58427 Mismatch Block::0 bo:35 Volume offset:35 Expected:3 Got:2
58428 new cur is 1024 from cur
58429 Mismatch Block::0 bo:36 Volume offset:36 Expected:3 Got:2
58430 SPEC v:1 min_av:254 cur_av:1 cm:1022 cc:1025
58431 test test::test_read_compare_span ... Mismatch Block::0 bo:37 Volume offset:37 Expected:3 Got:2
58432 new cur is 1025 from cur
58433 ok
58434 Mismatch Block::0 bo:38 Volume offset:38 Expected:3 Got:2
58435 Mismatch Block::0 bo:39 Volume offset:39 Expected:3 Got:2
58436 Mismatch Block::0 bo:40 Volume offset:40 Expected:3 Got:2
58437 Mismatch Block::0 bo:41 Volume offset:41 Expected:3 Got:2
58438 Mismatch Block::0 bo:42 Volume offset:42 Expected:3 Got:2
58439 test test::test_read_compare_span_fail ... Mismatch Block::0 bo:43 Volume offset:43 Expected:3 Got:2
58440 okMismatch Block::0 bo:44 Volume offset:44 Expected:3 Got:2
58441 Mismatch Block::0 bo:45 Volume offset:45 Expected:3 Got:2
58442 Mismatch Block::0 bo:46 Volume offset:46 Expected:3 Got:2
58443 Mismatch Block::0 bo:47 Volume offset:47 Expected:3 Got:2
58444 Mismatch Block::0 bo:48 Volume offset:48 Expected:3 Got:2
58445 Mismatch Block::0 bo:49 Volume offset:49 Expected:3 Got:2
58446 Mismatch Block::0 bo:50 Volume offset:50 Expected:3 Got:2
58447 Mismatch Block::0 bo:51 Volume offset:51 Expected:3 Got:2
58448 Mismatch Block::0 bo:52 Volume offset:52 Expected:3 Got:2
58449 Mismatch Block::0 bo:53 Volume offset:53 Expected:3 Got:2
58450 Mismatch Block::0 bo:54 Volume offset:54 Expected:3 Got:2
58451 Mismatch Block::0 bo:55 Volume offset:55 Expected:3 Got:2
58452 Mismatch Block::0 bo:56 Volume offset:56 Expected:3 Got:2
58453 Mismatch Block::0 bo:57 Volume offset:57 Expected:3 Got:2
58454 Mismatch Block::0 bo:58 Volume offset:58 Expected:3 Got:2
58455 Mismatch Block::0 bo:59 Volume offset:59 Expected:3 Got:2
58456 Mismatch Block::0 bo:60 Volume offset:60 Expected:3 Got:2
58457 Mismatch Block::0 bo:61 Volume offset:61 Expected:3 Got:2
58458 Mismatch Block::0 bo:62 Volume offset:62 Expected:3 Got:2
58459 
58460 Mismatch Block::0 bo:63 Volume offset:63 Expected:3 Got:2
58461 Mismatch Block::0 bo:64 Volume offset:64 Expected:3 Got:2
58462 Mismatch Block::0 bo:65 Volume offset:65 Expected:3 Got:2
58463 Mismatch Block::0 bo:66 Volume offset:66 Expected:3 Got:2
58464 Mismatch Block::0 bo:67 Volume offset:67 Expected:3 Got:2
58465 Mismatch Block::0 bo:68 Volume offset:68 Expected:3 Got:2
58466 Mismatch Block::0 bo:69 Volume offset:69 Expected:3 Got:2
58467 Mismatch Block::0 bo:70 Volume offset:70 Expected:3 Got:2
58468 Mismatch Block::0 bo:71 Volume offset:71 Expected:3 Got:2
58469 test test::test_wl_commit_1024_range_no_update_above_rollover ... ok
58470 Mismatch Block::0 bo:72 Volume offset:72 Expected:3 Got:2
58471 SPEC v:253 min_av:254 cur_av:1 cm:1022 cc:1025
58472 SPEC v:252 min_av:253 cur_av:0 cm:1021 cc:1024
58473 SPEC v:253 min_av:253 cur_av:0 cm:1021 cc:1024
58474 SPEC v:254 min_av:254 cur_av:1 cm:1022 cc:1025
58475 SPEC v:254 min_av:255 cur_av:1 cm:1023 cc:1025
58476 SPEC v:255 min_av:255 cur_av:1 cm:1023 cc:1025
58477 new cur is 1023 from min
58478 SPEC v:0 min_av:255 cur_av:1 cm:1023 cc:1025
58479 new cur is 1024 from cur
58480 SPEC v:1 min_av:255 cur_av:1 cm:1023 cc:1025
58481 new cur is 1025 from cur
58482 SPEC v:2 min_av:255 cur_av:1 cm:1023 cc:1025
58483 Mismatch Block::2 bo:2 Volume offset:1026 Expected:2 Got:9
58484 new cur is 1021 from min
58485 SPEC v:254 min_av:253 cur_av:0 cm:1021 cc:1024
58486 new cur is 1022 from min
58487 Mismatch Block::0 bo:73 Volume offset:73 Expected:3 Got:2
58488 SPEC v:255 min_av:253 cur_av:0 cm:1021 cc:1024
58489 new cur is 1023 from min
58490 SPEC v:0 min_av:253 cur_av:0 cm:1021 cc:1024
58491 Mismatch Block::0 bo:74 Volume offset:74 Expected:3 Got:2
58492 test test::test_wl_commit_1024_range_rollover_min_at ... Mismatch Block::0 bo:75 Volume offset:75 Expected:3 Got:2
58493 Mismatch Block::0 bo:76 Volume offset:76 Expected:3 Got:2
58494 Mismatch Block::0 bo:77 Volume offset:77 Expected:3 Got:2
58495 Mismatch Block::0 bo:78 Volume offset:78 Expected:3 Got:2
58496 Mismatch Block::0 bo:79 Volume offset:79 Expected:3 Got:2
58497 Mismatch Block::0 bo:80 Volume offset:80 Expected:3 Got:2
58498 Mismatch Block::0 bo:81 Volume offset:81 Expected:3 Got:2
58499 Mismatch Block::0 bo:82 Volume offset:82 Expected:3 Got:2
58500 Mismatch Block::0 bo:83 Volume offset:83 Expected:3 Got:2
58501 Mismatch Block::0 bo:84 Volume offset:84 Expected:3 Got:2
58502 Mismatch Block::0 bo:85 Volume offset:85 Expected:3 Got:2
58503 Mismatch Block::0 bo:86 Volume offset:86 Expected:3 Got:2
58504 new cur is 1024 from cur
58505 Mismatch Block::0 bo:26 Volume offset:26 Expected:1 Got:2
58506 Mismatch Block::0 bo:27 Volume offset:27 Expected:1 Got:2
58507 Mismatch Block::0 bo:28 Volume offset:28 Expected:1 Got:2
58508 Mismatch Block::0 bo:29 Volume offset:29 Expected:1 Got:2
58509 Mismatch Block::0 bo:30 Volume offset:30 Expected:1 Got:2
58510 Mismatch Block::0 bo:31 Volume offset:31 Expected:1 Got:2
58511 Mismatch Block::0 bo:32 Volume offset:32 Expected:1 Got:2
58512 Mismatch Block::0 bo:33 Volume offset:33 Expected:1 Got:2
58513 Mismatch Block::0 bo:34 Volume offset:34 Expected:1 Got:2
58514 Mismatch Block::0 bo:35 Volume offset:35 Expected:1 Got:2
58515 Mismatch Block::0 bo:36 Volume offset:36 Expected:1 Got:2
58516 Mismatch Block::0 bo:37 Volume offset:37 Expected:1 Got:2
58517 Mismatch Block::0 bo:38 Volume offset:38 Expected:1 Got:2
58518 Mismatch Block::0 bo:39 Volume offset:39 Expected:1 Got:2
58519 okMismatch Block::0 bo:40 Volume offset:40 Expected:1 Got:2
58520 
58521 Mismatch Block::0 bo:41 Volume offset:41 Expected:1 Got:2
58522 SPEC v:1 min_av:253 cur_av:0 cm:1021 cc:1024
58523 Mismatch Block::0 bo:87 Volume offset:87 Expected:3 Got:2
58524 Mismatch Block::0 bo:88 Volume offset:88 Expected:3 Got:2
58525 Mismatch Block::0 bo:89 Volume offset:89 Expected:3 Got:2
58526 Mismatch Block::0 bo:90 Volume offset:90 Expected:3 Got:2
58527 Mismatch Block::0 bo:91 Volume offset:91 Expected:3 Got:2
58528 Mismatch Block::0 bo:92 Volume offset:92 Expected:3 Got:2
58529 Mismatch Block::0 bo:93 Volume offset:93 Expected:3 Got:2
58530 Mismatch Block::0 bo:94 Volume offset:94 Expected:3 Got:2
58531 Mismatch Block::0 bo:95 Volume offset:95 Expected:3 Got:2
58532 Mismatch Block::0 bo:96 Volume offset:96 Expected:3 Got:2
58533 Mismatch Block::0 bo:97 Volume offset:97 Expected:3 Got:2
58534 Mismatch Block::0 bo:98 Volume offset:98 Expected:3 Got:2
58535 Mismatch Block::0 bo:99 Volume offset:99 Expected:3 Got:2
58536 Mismatch Block::0 bo:100 Volume offset:100 Expected:3 Got:2
58537 Mismatch Block::0 bo:42 Volume offset:42 Expected:1 Got:2
58538 Mismatch Block::0 bo:101 Volume offset:101 Expected:3 Got:2
58539 Mismatch Block::0 bo:43 Volume offset:43 Expected:1 Got:2
58540 Mismatch Block::0 bo:102 Volume offset:102 Expected:3 Got:2
58541 Mismatch Block::0 bo:44 Volume offset:44 Expected:1 Got:2
58542 Mismatch Block::0 bo:103 Volume offset:103 Expected:3 Got:2
58543 Mismatch Block::0 bo:45 Volume offset:45 Expected:1 Got:2
58544 Mismatch Block::0 bo:104 Volume offset:104 Expected:3 Got:2
58545 Mismatch Block::0 bo:46 Volume offset:46 Expected:1 Got:2
58546 Mismatch Block::0 bo:105 Volume offset:105 Expected:3 Got:2
58547 Mismatch Block::0 bo:47 Volume offset:47 Expected:1 Got:2
58548 Mismatch Block::0 bo:106 Volume offset:106 Expected:3 Got:2
58549 Mismatch Block::0 bo:48 Volume offset:48 Expected:1 Got:2
58550 Mismatch Block::0 bo:107 Volume offset:107 Expected:3 Got:2
58551 Mismatch Block::0 bo:49 Volume offset:49 Expected:1 Got:2
58552 Mismatch Block::0 bo:108 Volume offset:108 Expected:3 Got:2
58553 Mismatch Block::0 bo:50 Volume offset:50 Expected:1 Got:2
58554 Mismatch Block::0 bo:109 Volume offset:109 Expected:3 Got:2
58555 Mismatch Block::0 bo:51 Volume offset:51 Expected:1 Got:2
58556 Mismatch Block::0 bo:110 Volume offset:110 Expected:3 Got:2
58557 Mismatch Block::0 bo:52 Volume offset:52 Expected:1 Got:2
58558 Mismatch Block::0 bo:111 Volume offset:111 Expected:3 Got:2
58559 Mismatch Block::0 bo:53 Volume offset:53 Expected:1 Got:2
58560 Mismatch Block::0 bo:112 Volume offset:112 Expected:3 Got:2
58561 Mismatch Block::0 bo:54 Volume offset:54 Expected:1 Got:2
58562 Mismatch Block::0 bo:113 Volume offset:113 Expected:3 Got:2
58563 Mismatch Block::0 bo:55 Volume offset:55 Expected:1 Got:2
58564 Mismatch Block::0 bo:114 Volume offset:114 Expected:3 Got:2
58565 Mismatch Block::0 bo:56 Volume offset:56 Expected:1 Got:2
58566 Mismatch Block::0 bo:115 Volume offset:115 Expected:3 Got:2
58567 Mismatch Block::0 bo:57 Volume offset:57 Expected:1 Got:2
58568 Mismatch Block::0 bo:116 Volume offset:116 Expected:3 Got:2
58569 Mismatch Block::0 bo:58 Volume offset:58 Expected:1 Got:2
58570 test test::test_read_compare_span_fail_2 ... Mismatch Block::0 bo:117 Volume offset:117 Expected:3 Got:2
58571 ok
58572 Mismatch Block::0 bo:118 Volume offset:118 Expected:3 Got:2
58573 Mismatch Block::0 bo:59 Volume offset:59 Expected:1 Got:2
58574 Mismatch Block::0 bo:119 Volume offset:119 Expected:3 Got:2
58575 Mismatch Block::0 bo:60 Volume offset:60 Expected:1 Got:2
58576 Mismatch Block::0 bo:120 Volume offset:120 Expected:3 Got:2
58577 SPEC v:253 min_av:254 cur_av:1 cm:1022 cc:1025
58578 test test::test_wl_commit_1024_range_rollover_max_at ... SPEC v:254 min_av:254 cur_av:1 cm:1022 cc:1025
58579 oknew cur is 1022 from min
58580 SPEC v:255 min_av:254 cur_av:1 cm:1022 cc:1025
58581 SPEC v:253 min_av:254 cur_av:1 cm:1022 cc:1025
58582 new cur is 1023 from min
58583 Mismatch Block::0 bo:121 Volume offset:121 Expected:3 Got:2
58584 SPEC v:0 min_av:254 cur_av:1 cm:1022 cc:1025
58585 Mismatch Block::0 bo:122 Volume offset:122 Expected:3 Got:2
58586 new cur is 1024 from cur
58587 Mismatch Block::0 bo:61 Volume offset:61 Expected:1 Got:2
58588 SPEC v:0 min_av:254 cur_av:1 cm:1022 cc:1025
58589 SPEC v:1 min_av:254 cur_av:1 cm:1022 cc:1025
58590 new cur is 1025 from cur
58591 SPEC v:2 min_av:254 cur_av:1 cm:1022 cc:1025
58592 new cur is 1022 from min
58593 Mismatch Block::0 bo:62 Volume offset:62 Expected:1 Got:2
58594 SPEC v:255 min_av:254 cur_av:1 cm:1022 cc:1025
58595 Mismatch Block::0 bo:63 Volume offset:63 Expected:1 Got:2
58596 new cur is 1023 from min
58597 Mismatch Block::0 bo:64 Volume offset:64 Expected:1 Got:2
58598 SPEC v:0 min_av:254 cur_av:1 cm:1022 cc:1025
58599 Mismatch Block::0 bo:65 Volume offset:65 Expected:1 Got:2
58600 new cur is 1024 from cur
58601 Mismatch Block::0 bo:66 Volume offset:66 Expected:1 Got:2
58602 SPEC v:1 min_av:254 cur_av:1 cm:1022 cc:1025
58603 Mismatch Block::0 bo:67 Volume offset:67 Expected:1 Got:2
58604 new cur is 1025 from cur
58605 Mismatch Block::0 bo:68 Volume offset:68 Expected:1 Got:2
58606 SPEC v:2 min_av:254 cur_av:1 cm:1022 cc:1025
58607 Mismatch Block::0 bo:69 Volume offset:69 Expected:1 Got:2
58608 Mismatch Block::0 bo:70 Volume offset:70 Expected:1 Got:2
58609 Mismatch Block::0 bo:123 Volume offset:123 Expected:3 Got:2
58610 Mismatch Block::0 bo:71 Volume offset:71 Expected:1 Got:2
58611 Mismatch Block::0 bo:72 Volume offset:72 Expected:1 Got:2
58612 Mismatch Block::0 bo:73 Volume offset:73 Expected:1 Got:2
58613 Mismatch Block::0 bo:124 Volume offset:124 Expected:3 Got:2
58614 Mismatch Block::0 bo:74 Volume offset:74 Expected:1 Got:2
58615 Mismatch Block::0 bo:125 Volume offset:125 Expected:3 Got:2
58616 Mismatch Block::0 bo:75 Volume offset:75 Expected:1 Got:2
58617 Mismatch Block::0 bo:126 Volume offset:126 Expected:3 Got:2
58618 Mismatch Block::0 bo:76 Volume offset:76 Expected:1 Got:2
58619 Mismatch Block::0 bo:127 Volume offset:127 Expected:3 Got:2
58620 Mismatch Block::0 bo:77 Volume offset:77 Expected:1 Got:2
58621 Mismatch Block::0 bo:128 Volume offset:128 Expected:3 Got:2
58622 Mismatch Block::0 bo:78 Volume offset:78 Expected:1 Got:2
58623 Mismatch Block::0 bo:129 Volume offset:129 Expected:3 Got:2
58624 Mismatch Block::0 bo:79 Volume offset:79 Expected:1 Got:2
58625 Mismatch Block::0 bo:130 Volume offset:130 Expected:3 Got:2
58626 Mismatch Block::0 bo:80 Volume offset:80 Expected:1 Got:2
58627 Mismatch Block::0 bo:131 Volume offset:131 Expected:3 Got:2
58628 Mismatch Block::0 bo:81 Volume offset:81 Expected:1 Got:2
58629 Mismatch Block::0 bo:132 Volume offset:132 Expected:3 Got:2
58630 Mismatch Block::0 bo:82 Volume offset:82 Expected:1 Got:2
58631 Mismatch Block::0 bo:133 Volume offset:133 Expected:3 Got:2
58632 Mismatch Block::0 bo:83 Volume offset:83 Expected:1 Got:2
58633 Mismatch Block::0 bo:134 Volume offset:134 Expected:3 Got:2
58634 Mismatch Block::0 bo:84 Volume offset:84 Expected:1 Got:2
58635 Mismatch Block::0 bo:135 Volume offset:135 Expected:3 Got:2
58636 Mismatch Block::0 bo:85 Volume offset:85 Expected:1 Got:2
58637 Mismatch Block::0 bo:136 Volume offset:136 Expected:3 Got:2
58638 Mismatch Block::0 bo:86 Volume offset:86 Expected:1 Got:2
58639 Mismatch Block::0 bo:137 Volume offset:137 Expected:3 Got:2
58640 Mismatch Block::0 bo:87 Volume offset:87 Expected:1 Got:2
58641 Mismatch Block::0 bo:138 Volume offset:138 Expected:3 Got:2
58642 Mismatch Block::0 bo:88 Volume offset:88 Expected:1 Got:2
58643 Mismatch Block::0 bo:139 Volume offset:139 Expected:3 Got:2
58644 Mismatch Block::0 bo:89 Volume offset:89 Expected:1 Got:2
58645 Mismatch Block::0 bo:140 Volume offset:140 Expected:3 Got:2
58646 Mismatch Block::0 bo:90 Volume offset:90 Expected:1 Got:2
58647 Mismatch Block::0 bo:141 Volume offset:141 Expected:3 Got:2
58648 Mismatch Block::0 bo:91 Volume offset:91 Expected:1 Got:2
58649 Mismatch Block::0 bo:142 Volume offset:142 Expected:3 Got:2
58650 Mismatch Block::0 bo:92 Volume offset:92 Expected:1 Got:2
58651 Mismatch Block::0 bo:143 Volume offset:143 Expected:3 Got:2
58652 Mismatch Block::0 bo:93 Volume offset:93 Expected:1 Got:2
58653 Mismatch Block::0 bo:144 Volume offset:144 Expected:3 Got:2
58654 Mismatch Block::0 bo:94 Volume offset:94 Expected:1 Got:2
58655 Mismatch Block::0 bo:145 Volume offset:145 Expected:3 Got:2
58656 Mismatch Block::0 bo:95 Volume offset:95 Expected:1 Got:2
58657 Mismatch Block::0 bo:146 Volume offset:146 Expected:3 Got:2
58658 Mismatch Block::0 bo:96 Volume offset:96 Expected:1 Got:2
58659 Mismatch Block::0 bo:147 Volume offset:147 Expected:3 Got:2
58660 Mismatch Block::0 bo:97 Volume offset:97 Expected:1 Got:2
58661 Mismatch Block::0 bo:148 Volume offset:148 Expected:3 Got:2
58662 Mismatch Block::0 bo:98 Volume offset:98 Expected:1 Got:2
58663 Mismatch Block::0 bo:149 Volume offset:149 Expected:3 Got:2
58664 Mismatch Block::0 bo:99 Volume offset:99 Expected:1 Got:2
58665 Mismatch Block::0 bo:150 Volume offset:150 Expected:3 Got:2
58666 Mismatch Block::0 bo:100 Volume offset:100 Expected:1 Got:2
58667 Mismatch Block::0 bo:151 Volume offset:151 Expected:3 Got:2
58668 Mismatch Block::0 bo:101 Volume offset:101 Expected:1 Got:2
58669 Mismatch Block::0 bo:152 Volume offset:152 Expected:3 Got:2
58670 Mismatch Block::0 bo:102 Volume offset:102 Expected:1 Got:2
58671 Mismatch Block::0 bo:153 Volume offset:153 Expected:3 Got:2
58672 Mismatch Block::0 bo:103 Volume offset:103 Expected:1 Got:2
58673 Mismatch Block::0 bo:154 Volume offset:154 Expected:3 Got:2
58674 Mismatch Block::0 bo:104 Volume offset:104 Expected:1 Got:2
58675 Mismatch Block::0 bo:155 Volume offset:155 Expected:3 Got:2
58676 Mismatch Block::0 bo:105 Volume offset:105 Expected:1 Got:2
58677 Mismatch Block::0 bo:156 Volume offset:156 Expected:3 Got:2
58678 Mismatch Block::0 bo:106 Volume offset:106 Expected:1 Got:2
58679 Mismatch Block::0 bo:157 Volume offset:157 Expected:3 Got:2
58680 Mismatch Block::0 bo:107 Volume offset:107 Expected:1 Got:2
58681 Mismatch Block::0 bo:158 Volume offset:158 Expected:3 Got:2
58682 Mismatch Block::0 bo:108 Volume offset:108 Expected:1 Got:2
58683 Mismatch Block::0 bo:159 Volume offset:159 Expected:3 Got:2
58684 Mismatch Block::0 bo:109 Volume offset:109 Expected:1 Got:2
58685 Mismatch Block::0 bo:160 Volume offset:160 Expected:3 Got:2
58686 Mismatch Block::0 bo:110 Volume offset:110 Expected:1 Got:2
58687 Mismatch Block::0 bo:161 Volume offset:161 Expected:3 Got:2
58688 Mismatch Block::0 bo:111 Volume offset:111 Expected:1 Got:2
58689 Mismatch Block::0 bo:162 Volume offset:162 Expected:3 Got:2
58690 Mismatch Block::0 bo:112 Volume offset:112 Expected:1 Got:2
58691 Mismatch Block::0 bo:163 Volume offset:163 Expected:3 Got:2
58692 Mismatch Block::0 bo:113 Volume offset:113 Expected:1 Got:2
58693 Mismatch Block::0 bo:164 Volume offset:164 Expected:3 Got:2
58694 Mismatch Block::0 bo:114 Volume offset:114 Expected:1 Got:2
58695 Mismatch Block::0 bo:165 Volume offset:165 Expected:3 Got:2
58696 Mismatch Block::0 bo:115 Volume offset:115 Expected:1 Got:2
58697 Mismatch Block::0 bo:166 Volume offset:166 Expected:3 Got:2
58698 Mismatch Block::0 bo:116 Volume offset:116 Expected:1 Got:2
58699 Mismatch Block::0 bo:167 Volume offset:167 Expected:3 Got:2
58700 Mismatch Block::0 bo:117 Volume offset:117 Expected:1 Got:2
58701 Mismatch Block::0 bo:168 Volume offset:168 Expected:3 Got:2
58702 Mismatch Block::0 bo:118 Volume offset:118 Expected:1 Got:2
58703 Mismatch Block::0 bo:169 Volume offset:169 Expected:3 Got:2
58704 Mismatch Block::0 bo:119 Volume offset:119 Expected:1 Got:2
58705 Mismatch Block::0 bo:170 Volume offset:170 Expected:3 Got:2
58706 Mismatch Block::0 bo:120 Volume offset:120 Expected:1 Got:2
58707 Mismatch Block::0 bo:171 Volume offset:171 Expected:3 Got:2
58708 Mismatch Block::0 bo:121 Volume offset:121 Expected:1 Got:2
58709 Mismatch Block::0 bo:172 Volume offset:172 Expected:3 Got:2
58710 Mismatch Block::0 bo:122 Volume offset:122 Expected:1 Got:2
58711 Mismatch Block::0 bo:173 Volume offset:173 Expected:3 Got:2
58712 Mismatch Block::0 bo:123 Volume offset:123 Expected:1 Got:2
58713 Mismatch Block::0 bo:174 Volume offset:174 Expected:3 Got:2
58714 Mismatch Block::0 bo:124 Volume offset:124 Expected:1 Got:2
58715 Mismatch Block::0 bo:175 Volume offset:175 Expected:3 Got:2
58716 Mismatch Block::0 bo:125 Volume offset:125 Expected:1 Got:2
58717 Mismatch Block::0 bo:176 Volume offset:176 Expected:3 Got:2
58718 Mismatch Block::0 bo:126 Volume offset:126 Expected:1 Got:2
58719 Mismatch Block::0 bo:177 Volume offset:177 Expected:3 Got:2
58720 Mismatch Block::0 bo:127 Volume offset:127 Expected:1 Got:2
58721 Mismatch Block::0 bo:178 Volume offset:178 Expected:3 Got:2
58722 Mismatch Block::0 bo:128 Volume offset:128 Expected:1 Got:2
58723 Mismatch Block::0 bo:179 Volume offset:179 Expected:3 Got:2
58724 Mismatch Block::0 bo:129 Volume offset:129 Expected:1 Got:2
58725 Mismatch Block::0 bo:180 Volume offset:180 Expected:3 Got:2
58726 Mismatch Block::0 bo:130 Volume offset:130 Expected:1 Got:2
58727 Mismatch Block::0 bo:181 Volume offset:181 Expected:3 Got:2
58728 Mismatch Block::0 bo:131 Volume offset:131 Expected:1 Got:2
58729 Mismatch Block::0 bo:182 Volume offset:182 Expected:3 Got:2
58730 Mismatch Block::0 bo:132 Volume offset:132 Expected:1 Got:2
58731 Mismatch Block::0 bo:183 Volume offset:183 Expected:3 Got:2
58732 Mismatch Block::0 bo:133 Volume offset:133 Expected:1 Got:2
58733 Mismatch Block::0 bo:184 Volume offset:184 Expected:3 Got:2
58734 Mismatch Block::0 bo:134 Volume offset:134 Expected:1 Got:2
58735 Mismatch Block::0 bo:185 Volume offset:185 Expected:3 Got:2
58736 Mismatch Block::0 bo:135 Volume offset:135 Expected:1 Got:2
58737 Mismatch Block::0 bo:186 Volume offset:186 Expected:3 Got:2
58738 Mismatch Block::0 bo:136 Volume offset:136 Expected:1 Got:2
58739 Mismatch Block::0 bo:187 Volume offset:187 Expected:3 Got:2
58740 Mismatch Block::0 bo:137 Volume offset:137 Expected:1 Got:2
58741 Mismatch Block::0 bo:188 Volume offset:188 Expected:3 Got:2
58742 Mismatch Block::0 bo:138 Volume offset:138 Expected:1 Got:2
58743 Mismatch Block::0 bo:189 Volume offset:189 Expected:3 Got:2
58744 Mismatch Block::0 bo:139 Volume offset:139 Expected:1 Got:2
58745 Mismatch Block::0 bo:190 Volume offset:190 Expected:3 Got:2
58746 Mismatch Block::0 bo:140 Volume offset:140 Expected:1 Got:2
58747 Mismatch Block::0 bo:191 Volume offset:191 Expected:3 Got:2
58748 Mismatch Block::0 bo:141 Volume offset:141 Expected:1 Got:2
58749 Mismatch Block::0 bo:192 Volume offset:192 Expected:3 Got:2
58750 Mismatch Block::0 bo:142 Volume offset:142 Expected:1 Got:2
58751 Mismatch Block::0 bo:193 Volume offset:193 Expected:3 Got:2
58752 Mismatch Block::0 bo:143 Volume offset:143 Expected:1 Got:2
58753 Mismatch Block::0 bo:194 Volume offset:194 Expected:3 Got:2
58754 Mismatch Block::0 bo:144 Volume offset:144 Expected:1 Got:2
58755 Mismatch Block::0 bo:195 Volume offset:195 Expected:3 Got:2
58756 Mismatch Block::0 bo:145 Volume offset:145 Expected:1 Got:2
58757 Mismatch Block::0 bo:196 Volume offset:196 Expected:3 Got:2
58758 Mismatch Block::0 bo:146 Volume offset:146 Expected:1 Got:2
58759 Mismatch Block::0 bo:197 Volume offset:197 Expected:3 Got:2
58760 Mismatch Block::0 bo:147 Volume offset:147 Expected:1 Got:2
58761 Mismatch Block::0 bo:198 Volume offset:198 Expected:3 Got:2
58762 Mismatch Block::0 bo:148 Volume offset:148 Expected:1 Got:2
58763 Mismatch Block::0 bo:199 Volume offset:199 Expected:3 Got:2
58764 Mismatch Block::0 bo:149 Volume offset:149 Expected:1 Got:2
58765 Mismatch Block::0 bo:200 Volume offset:200 Expected:3 Got:2
58766 Mismatch Block::0 bo:150 Volume offset:150 Expected:1 Got:2
58767 Mismatch Block::0 bo:201 Volume offset:201 Expected:3 Got:2
58768 Mismatch Block::0 bo:151 Volume offset:151 Expected:1 Got:2
58769 Mismatch Block::0 bo:202 Volume offset:202 Expected:3 Got:2
58770 Mismatch Block::0 bo:152 Volume offset:152 Expected:1 Got:2
58771 Mismatch Block::0 bo:203 Volume offset:203 Expected:3 Got:2
58772 Mismatch Block::0 bo:153 Volume offset:153 Expected:1 Got:2
58773 Mismatch Block::0 bo:204 Volume offset:204 Expected:3 Got:2
58774 Mismatch Block::0 bo:154 Volume offset:154 Expected:1 Got:2
58775 Mismatch Block::0 bo:205 Volume offset:205 Expected:3 Got:2
58776 Mismatch Block::0 bo:155 Volume offset:155 Expected:1 Got:2
58777 Mismatch Block::0 bo:206 Volume offset:206 Expected:3 Got:2
58778 Mismatch Block::0 bo:156 Volume offset:156 Expected:1 Got:2
58779 Mismatch Block::0 bo:207 Volume offset:207 Expected:3 Got:2
58780 Mismatch Block::0 bo:157 Volume offset:157 Expected:1 Got:2
58781 Mismatch Block::0 bo:208 Volume offset:208 Expected:3 Got:2
58782 Mismatch Block::0 bo:158 Volume offset:158 Expected:1 Got:2
58783 Mismatch Block::0 bo:209 Volume offset:209 Expected:3 Got:2
58784 Mismatch Block::0 bo:159 Volume offset:159 Expected:1 Got:2
58785 Mismatch Block::0 bo:210 Volume offset:210 Expected:3 Got:2
58786 Mismatch Block::0 bo:160 Volume offset:160 Expected:1 Got:2
58787 Mismatch Block::0 bo:211 Volume offset:211 Expected:3 Got:2
58788 Mismatch Block::0 bo:161 Volume offset:161 Expected:1 Got:2
58789 Mismatch Block::0 bo:212 Volume offset:212 Expected:3 Got:2
58790 Mismatch Block::0 bo:162 Volume offset:162 Expected:1 Got:2
58791 Mismatch Block::0 bo:213 Volume offset:213 Expected:3 Got:2
58792 Mismatch Block::0 bo:163 Volume offset:163 Expected:1 Got:2
58793 Mismatch Block::0 bo:214 Volume offset:214 Expected:3 Got:2
58794 Mismatch Block::0 bo:164 Volume offset:164 Expected:1 Got:2
58795 Mismatch Block::0 bo:215 Volume offset:215 Expected:3 Got:2
58796 Mismatch Block::0 bo:165 Volume offset:165 Expected:1 Got:2
58797 Mismatch Block::0 bo:216 Volume offset:216 Expected:3 Got:2
58798 Mismatch Block::0 bo:166 Volume offset:166 Expected:1 Got:2
58799 Mismatch Block::0 bo:217 Volume offset:217 Expected:3 Got:2
58800 Mismatch Block::0 bo:167 Volume offset:167 Expected:1 Got:2
58801 Mismatch Block::0 bo:218 Volume offset:218 Expected:3 Got:2
58802 Mismatch Block::0 bo:168 Volume offset:168 Expected:1 Got:2
58803 Mismatch Block::0 bo:219 Volume offset:219 Expected:3 Got:2
58804 Mismatch Block::0 bo:169 Volume offset:169 Expected:1 Got:2
58805 Mismatch Block::0 bo:220 Volume offset:220 Expected:3 Got:2
58806 Mismatch Block::0 bo:170 Volume offset:170 Expected:1 Got:2
58807 Mismatch Block::0 bo:221 Volume offset:221 Expected:3 Got:2
58808 Mismatch Block::0 bo:171 Volume offset:171 Expected:1 Got:2
58809 Mismatch Block::0 bo:222 Volume offset:222 Expected:3 Got:2
58810 Mismatch Block::0 bo:172 Volume offset:172 Expected:1 Got:2
58811 Mismatch Block::0 bo:223 Volume offset:223 Expected:3 Got:2
58812 Mismatch Block::0 bo:173 Volume offset:173 Expected:1 Got:2
58813 Mismatch Block::0 bo:224 Volume offset:224 Expected:3 Got:2
58814 Mismatch Block::0 bo:174 Volume offset:174 Expected:1 Got:2
58815 Mismatch Block::0 bo:225 Volume offset:225 Expected:3 Got:2
58816 Mismatch Block::0 bo:175 Volume offset:175 Expected:1 Got:2
58817 Mismatch Block::0 bo:226 Volume offset:226 Expected:3 Got:2
58818 Mismatch Block::0 bo:176 Volume offset:176 Expected:1 Got:2
58819 Mismatch Block::0 bo:227 Volume offset:227 Expected:3 Got:2
58820 Mismatch Block::0 bo:177 Volume offset:177 Expected:1 Got:2
58821 Mismatch Block::0 bo:228 Volume offset:228 Expected:3 Got:2
58822 Mismatch Block::0 bo:178 Volume offset:178 Expected:1 Got:2
58823 Mismatch Block::0 bo:229 Volume offset:229 Expected:3 Got:2
58824 Mismatch Block::0 bo:179 Volume offset:179 Expected:1 Got:2
58825 Mismatch Block::0 bo:230 Volume offset:230 Expected:3 Got:2
58826 Mismatch Block::0 bo:180 Volume offset:180 Expected:1 Got:2
58827 Mismatch Block::0 bo:231 Volume offset:231 Expected:3 Got:2
58828 Mismatch Block::0 bo:181 Volume offset:181 Expected:1 Got:2
58829 Mismatch Block::0 bo:232 Volume offset:232 Expected:3 Got:2
58830 Mismatch Block::0 bo:182 Volume offset:182 Expected:1 Got:2
58831 Mismatch Block::0 bo:233 Volume offset:233 Expected:3 Got:2
58832 Mismatch Block::0 bo:183 Volume offset:183 Expected:1 Got:2
58833 Mismatch Block::0 bo:234 Volume offset:234 Expected:3 Got:2
58834 Mismatch Block::0 bo:184 Volume offset:184 Expected:1 Got:2
58835 Mismatch Block::0 bo:235 Volume offset:235 Expected:3 Got:2
58836 Mismatch Block::0 bo:185 Volume offset:185 Expected:1 Got:2
58837 Mismatch Block::0 bo:236 Volume offset:236 Expected:3 Got:2
58838 Mismatch Block::0 bo:186 Volume offset:186 Expected:1 Got:2
58839 Mismatch Block::0 bo:237 Volume offset:237 Expected:3 Got:2
58840 Mismatch Block::0 bo:187 Volume offset:187 Expected:1 Got:2
58841 Mismatch Block::0 bo:238 Volume offset:238 Expected:3 Got:2
58842 Mismatch Block::0 bo:188 Volume offset:188 Expected:1 Got:2
58843 Mismatch Block::0 bo:239 Volume offset:239 Expected:3 Got:2
58844 Mismatch Block::0 bo:189 Volume offset:189 Expected:1 Got:2
58845 Mismatch Block::0 bo:240 Volume offset:240 Expected:3 Got:2
58846 Mismatch Block::0 bo:190 Volume offset:190 Expected:1 Got:2
58847 Mismatch Block::0 bo:241 Volume offset:241 Expected:3 Got:2
58848 Mismatch Block::0 bo:191 Volume offset:191 Expected:1 Got:2
58849 Mismatch Block::0 bo:242 Volume offset:242 Expected:3 Got:2
58850 Mismatch Block::0 bo:192 Volume offset:192 Expected:1 Got:2
58851 Mismatch Block::0 bo:243 Volume offset:243 Expected:3 Got:2
58852 Mismatch Block::0 bo:193 Volume offset:193 Expected:1 Got:2
58853 Mismatch Block::0 bo:244 Volume offset:244 Expected:3 Got:2
58854 Mismatch Block::0 bo:194 Volume offset:194 Expected:1 Got:2
58855 Mismatch Block::0 bo:245 Volume offset:245 Expected:3 Got:2
58856 Mismatch Block::0 bo:195 Volume offset:195 Expected:1 Got:2
58857 Mismatch Block::0 bo:246 Volume offset:246 Expected:3 Got:2
58858 Mismatch Block::0 bo:196 Volume offset:196 Expected:1 Got:2
58859 Mismatch Block::0 bo:247 Volume offset:247 Expected:3 Got:2
58860 Mismatch Block::0 bo:197 Volume offset:197 Expected:1 Got:2
58861 Mismatch Block::0 bo:248 Volume offset:248 Expected:3 Got:2
58862 Mismatch Block::0 bo:198 Volume offset:198 Expected:1 Got:2
58863 Mismatch Block::0 bo:249 Volume offset:249 Expected:3 Got:2
58864 Mismatch Block::0 bo:199 Volume offset:199 Expected:1 Got:2
58865 Mismatch Block::0 bo:250 Volume offset:250 Expected:3 Got:2
58866 Mismatch Block::0 bo:200 Volume offset:200 Expected:1 Got:2
58867 Mismatch Block::0 bo:251 Volume offset:251 Expected:3 Got:2
58868 Mismatch Block::0 bo:201 Volume offset:201 Expected:1 Got:2
58869 Mismatch Block::0 bo:252 Volume offset:252 Expected:3 Got:2
58870 Mismatch Block::0 bo:202 Volume offset:202 Expected:1 Got:2
58871 Mismatch Block::0 bo:253 Volume offset:253 Expected:3 Got:2
58872 Mismatch Block::0 bo:203 Volume offset:203 Expected:1 Got:2
58873 Mismatch Block::0 bo:254 Volume offset:254 Expected:3 Got:2
58874 Mismatch Block::0 bo:204 Volume offset:204 Expected:1 Got:2
58875 Mismatch Block::0 bo:255 Volume offset:255 Expected:3 Got:2
58876 Mismatch Block::0 bo:205 Volume offset:205 Expected:1 Got:2
58877 Mismatch Block::0 bo:256 Volume offset:256 Expected:3 Got:2
58878 Mismatch Block::0 bo:206 Volume offset:206 Expected:1 Got:2
58879 Mismatch Block::0 bo:257 Volume offset:257 Expected:3 Got:2
58880 Mismatch Block::0 bo:207 Volume offset:207 Expected:1 Got:2
58881 Mismatch Block::0 bo:258 Volume offset:258 Expected:3 Got:2
58882 Mismatch Block::0 bo:208 Volume offset:208 Expected:1 Got:2
58883 Mismatch Block::0 bo:259 Volume offset:259 Expected:3 Got:2
58884 Mismatch Block::0 bo:209 Volume offset:209 Expected:1 Got:2
58885 Mismatch Block::0 bo:260 Volume offset:260 Expected:3 Got:2
58886 Mismatch Block::0 bo:210 Volume offset:210 Expected:1 Got:2
58887 Mismatch Block::0 bo:261 Volume offset:261 Expected:3 Got:2
58888 Mismatch Block::0 bo:211 Volume offset:211 Expected:1 Got:2
58889 Mismatch Block::0 bo:262 Volume offset:262 Expected:3 Got:2
58890 Mismatch Block::0 bo:212 Volume offset:212 Expected:1 Got:2
58891 Mismatch Block::0 bo:263 Volume offset:263 Expected:3 Got:2
58892 Mismatch Block::0 bo:213 Volume offset:213 Expected:1 Got:2
58893 Mismatch Block::0 bo:264 Volume offset:264 Expected:3 Got:2
58894 Mismatch Block::0 bo:214 Volume offset:214 Expected:1 Got:2
58895 Mismatch Block::0 bo:265 Volume offset:265 Expected:3 Got:2
58896 Mismatch Block::0 bo:215 Volume offset:215 Expected:1 Got:2
58897 Mismatch Block::0 bo:266 Volume offset:266 Expected:3 Got:2
58898 Mismatch Block::0 bo:216 Volume offset:216 Expected:1 Got:2
58899 Mismatch Block::0 bo:267 Volume offset:267 Expected:3 Got:2
58900 Mismatch Block::0 bo:217 Volume offset:217 Expected:1 Got:2
58901 Mismatch Block::0 bo:268 Volume offset:268 Expected:3 Got:2
58902 Mismatch Block::0 bo:218 Volume offset:218 Expected:1 Got:2
58903 Mismatch Block::0 bo:269 Volume offset:269 Expected:3 Got:2
58904 Mismatch Block::0 bo:219 Volume offset:219 Expected:1 Got:2
58905 Mismatch Block::0 bo:270 Volume offset:270 Expected:3 Got:2
58906 Mismatch Block::0 bo:220 Volume offset:220 Expected:1 Got:2
58907 Mismatch Block::0 bo:271 Volume offset:271 Expected:3 Got:2
58908 Mismatch Block::0 bo:221 Volume offset:221 Expected:1 Got:2
58909 Mismatch Block::0 bo:272 Volume offset:272 Expected:3 Got:2
58910 Mismatch Block::0 bo:222 Volume offset:222 Expected:1 Got:2
58911 Mismatch Block::0 bo:273 Volume offset:273 Expected:3 Got:2
58912 Mismatch Block::0 bo:223 Volume offset:223 Expected:1 Got:2
58913 Mismatch Block::0 bo:274 Volume offset:274 Expected:3 Got:2
58914 Mismatch Block::0 bo:224 Volume offset:224 Expected:1 Got:2
58915 Mismatch Block::0 bo:275 Volume offset:275 Expected:3 Got:2
58916 Mismatch Block::0 bo:225 Volume offset:225 Expected:1 Got:2
58917 Mismatch Block::0 bo:276 Volume offset:276 Expected:3 Got:2
58918 Mismatch Block::0 bo:226 Volume offset:226 Expected:1 Got:2
58919 Mismatch Block::0 bo:277 Volume offset:277 Expected:3 Got:2
58920 Mismatch Block::0 bo:227 Volume offset:227 Expected:1 Got:2
58921 Mismatch Block::0 bo:278 Volume offset:278 Expected:3 Got:2
58922 Mismatch Block::0 bo:228 Volume offset:228 Expected:1 Got:2
58923 Mismatch Block::0 bo:279 Volume offset:279 Expected:3 Got:2
58924 Mismatch Block::0 bo:229 Volume offset:229 Expected:1 Got:2
58925 Mismatch Block::0 bo:280 Volume offset:280 Expected:3 Got:2
58926 Mismatch Block::0 bo:230 Volume offset:230 Expected:1 Got:2
58927 Mismatch Block::0 bo:281 Volume offset:281 Expected:3 Got:2
58928 Mismatch Block::0 bo:231 Volume offset:231 Expected:1 Got:2
58929 Mismatch Block::0 bo:282 Volume offset:282 Expected:3 Got:2
58930 Mismatch Block::0 bo:232 Volume offset:232 Expected:1 Got:2
58931 Mismatch Block::0 bo:283 Volume offset:283 Expected:3 Got:2
58932 Mismatch Block::0 bo:233 Volume offset:233 Expected:1 Got:2
58933 Mismatch Block::0 bo:284 Volume offset:284 Expected:3 Got:2
58934 Mismatch Block::0 bo:234 Volume offset:234 Expected:1 Got:2
58935 Mismatch Block::0 bo:285 Volume offset:285 Expected:3 Got:2
58936 Mismatch Block::0 bo:235 Volume offset:235 Expected:1 Got:2
58937 Mismatch Block::0 bo:286 Volume offset:286 Expected:3 Got:2
58938 Mismatch Block::0 bo:236 Volume offset:236 Expected:1 Got:2
58939 Mismatch Block::0 bo:287 Volume offset:287 Expected:3 Got:2
58940 Mismatch Block::0 bo:237 Volume offset:237 Expected:1 Got:2
58941 Mismatch Block::0 bo:288 Volume offset:288 Expected:3 Got:2
58942 Mismatch Block::0 bo:238 Volume offset:238 Expected:1 Got:2
58943 Mismatch Block::0 bo:289 Volume offset:289 Expected:3 Got:2
58944 Mismatch Block::0 bo:239 Volume offset:239 Expected:1 Got:2
58945 Mismatch Block::0 bo:290 Volume offset:290 Expected:3 Got:2
58946 Mismatch Block::0 bo:240 Volume offset:240 Expected:1 Got:2
58947 Mismatch Block::0 bo:291 Volume offset:291 Expected:3 Got:2
58948 Mismatch Block::0 bo:241 Volume offset:241 Expected:1 Got:2
58949 Mismatch Block::0 bo:292 Volume offset:292 Expected:3 Got:2
58950 Mismatch Block::0 bo:242 Volume offset:242 Expected:1 Got:2
58951 Mismatch Block::0 bo:293 Volume offset:293 Expected:3 Got:2
58952 Mismatch Block::0 bo:243 Volume offset:243 Expected:1 Got:2
58953 Mismatch Block::0 bo:294 Volume offset:294 Expected:3 Got:2
58954 Mismatch Block::0 bo:244 Volume offset:244 Expected:1 Got:2
58955 Mismatch Block::0 bo:295 Volume offset:295 Expected:3 Got:2
58956 Mismatch Block::0 bo:245 Volume offset:245 Expected:1 Got:2
58957 Mismatch Block::0 bo:296 Volume offset:296 Expected:3 Got:2
58958 Mismatch Block::0 bo:246 Volume offset:246 Expected:1 Got:2
58959 Mismatch Block::0 bo:297 Volume offset:297 Expected:3 Got:2
58960 Mismatch Block::0 bo:247 Volume offset:247 Expected:1 Got:2
58961 Mismatch Block::0 bo:298 Volume offset:298 Expected:3 Got:2
58962 Mismatch Block::0 bo:248 Volume offset:248 Expected:1 Got:2
58963 Mismatch Block::0 bo:299 Volume offset:299 Expected:3 Got:2
58964 Mismatch Block::0 bo:249 Volume offset:249 Expected:1 Got:2
58965 Mismatch Block::0 bo:300 Volume offset:300 Expected:3 Got:2
58966 Mismatch Block::0 bo:250 Volume offset:250 Expected:1 Got:2
58967 Mismatch Block::0 bo:301 Volume offset:301 Expected:3 Got:2
58968 Mismatch Block::0 bo:251 Volume offset:251 Expected:1 Got:2
58969 Mismatch Block::0 bo:302 Volume offset:302 Expected:3 Got:2
58970 Mismatch Block::0 bo:252 Volume offset:252 Expected:1 Got:2
58971 Mismatch Block::0 bo:303 Volume offset:303 Expected:3 Got:2
58972 Mismatch Block::0 bo:253 Volume offset:253 Expected:1 Got:2
58973 Mismatch Block::0 bo:304 Volume offset:304 Expected:3 Got:2
58974 Mismatch Block::0 bo:254 Volume offset:254 Expected:1 Got:2
58975 Mismatch Block::0 bo:305 Volume offset:305 Expected:3 Got:2
58976 Mismatch Block::0 bo:255 Volume offset:255 Expected:1 Got:2
58977 Mismatch Block::0 bo:306 Volume offset:306 Expected:3 Got:2
58978 Mismatch Block::0 bo:256 Volume offset:256 Expected:1 Got:2
58979 Mismatch Block::0 bo:307 Volume offset:307 Expected:3 Got:2
58980 Mismatch Block::0 bo:257 Volume offset:257 Expected:1 Got:2
58981 Mismatch Block::0 bo:308 Volume offset:308 Expected:3 Got:2
58982 Mismatch Block::0 bo:258 Volume offset:258 Expected:1 Got:2
58983 Mismatch Block::0 bo:309 Volume offset:309 Expected:3 Got:2
58984 Mismatch Block::0 bo:259 Volume offset:259 Expected:1 Got:2
58985 Mismatch Block::0 bo:310 Volume offset:310 Expected:3 Got:2
58986 Mismatch Block::0 bo:260 Volume offset:260 Expected:1 Got:2
58987 Mismatch Block::0 bo:311 Volume offset:311 Expected:3 Got:2
58988 Mismatch Block::0 bo:261 Volume offset:261 Expected:1 Got:2
58989 Mismatch Block::0 bo:312 Volume offset:312 Expected:3 Got:2
58990 Mismatch Block::0 bo:262 Volume offset:262 Expected:1 Got:2
58991 Mismatch Block::0 bo:313 Volume offset:313 Expected:3 Got:2
58992 Mismatch Block::0 bo:263 Volume offset:263 Expected:1 Got:2
58993 Mismatch Block::0 bo:314 Volume offset:314 Expected:3 Got:2
58994 Mismatch Block::0 bo:264 Volume offset:264 Expected:1 Got:2
58995 Mismatch Block::0 bo:315 Volume offset:315 Expected:3 Got:2
58996 Mismatch Block::0 bo:265 Volume offset:265 Expected:1 Got:2
58997 Mismatch Block::0 bo:316 Volume offset:316 Expected:3 Got:2
58998 Mismatch Block::0 bo:266 Volume offset:266 Expected:1 Got:2
58999 Mismatch Block::0 bo:317 Volume offset:317 Expected:3 Got:2
59000 Mismatch Block::0 bo:267 Volume offset:267 Expected:1 Got:2
59001 Mismatch Block::0 bo:318 Volume offset:318 Expected:3 Got:2
59002 Mismatch Block::0 bo:268 Volume offset:268 Expected:1 Got:2
59003 Mismatch Block::0 bo:319 Volume offset:319 Expected:3 Got:2
59004 Mismatch Block::0 bo:269 Volume offset:269 Expected:1 Got:2
59005 Mismatch Block::0 bo:320 Volume offset:320 Expected:3 Got:2
59006 Mismatch Block::0 bo:270 Volume offset:270 Expected:1 Got:2
59007 Mismatch Block::0 bo:321 Volume offset:321 Expected:3 Got:2
59008 Mismatch Block::0 bo:271 Volume offset:271 Expected:1 Got:2
59009 Mismatch Block::0 bo:322 Volume offset:322 Expected:3 Got:2
59010 Mismatch Block::0 bo:272 Volume offset:272 Expected:1 Got:2
59011 Mismatch Block::0 bo:323 Volume offset:323 Expected:3 Got:2
59012 Mismatch Block::0 bo:273 Volume offset:273 Expected:1 Got:2
59013 Mismatch Block::0 bo:324 Volume offset:324 Expected:3 Got:2
59014 Mismatch Block::0 bo:274 Volume offset:274 Expected:1 Got:2
59015 Mismatch Block::0 bo:325 Volume offset:325 Expected:3 Got:2
59016 Mismatch Block::0 bo:275 Volume offset:275 Expected:1 Got:2
59017 Mismatch Block::0 bo:326 Volume offset:326 Expected:3 Got:2
59018 Mismatch Block::0 bo:276 Volume offset:276 Expected:1 Got:2
59019 Mismatch Block::0 bo:327 Volume offset:327 Expected:3 Got:2
59020 Mismatch Block::0 bo:277 Volume offset:277 Expected:1 Got:2
59021 Mismatch Block::0 bo:328 Volume offset:328 Expected:3 Got:2
59022 Mismatch Block::0 bo:278 Volume offset:278 Expected:1 Got:2
59023 Mismatch Block::0 bo:329 Volume offset:329 Expected:3 Got:2
59024 Mismatch Block::0 bo:279 Volume offset:279 Expected:1 Got:2
59025 Mismatch Block::0 bo:330 Volume offset:330 Expected:3 Got:2
59026 Mismatch Block::0 bo:280 Volume offset:280 Expected:1 Got:2
59027 Mismatch Block::0 bo:331 Volume offset:331 Expected:3 Got:2
59028 Mismatch Block::0 bo:281 Volume offset:281 Expected:1 Got:2
59029 Mismatch Block::0 bo:332 Volume offset:332 Expected:3 Got:2
59030 Mismatch Block::0 bo:282 Volume offset:282 Expected:1 Got:2
59031 Mismatch Block::0 bo:333 Volume offset:333 Expected:3 Got:2
59032 Mismatch Block::0 bo:283 Volume offset:283 Expected:1 Got:2
59033 Mismatch Block::0 bo:334 Volume offset:334 Expected:3 Got:2
59034 Mismatch Block::0 bo:284 Volume offset:284 Expected:1 Got:2
59035 Mismatch Block::0 bo:335 Volume offset:335 Expected:3 Got:2
59036 Mismatch Block::0 bo:285 Volume offset:285 Expected:1 Got:2
59037 Mismatch Block::0 bo:336 Volume offset:336 Expected:3 Got:2
59038 Mismatch Block::0 bo:286 Volume offset:286 Expected:1 Got:2
59039 Mismatch Block::0 bo:337 Volume offset:337 Expected:3 Got:2
59040 Mismatch Block::0 bo:287 Volume offset:287 Expected:1 Got:2
59041 Mismatch Block::0 bo:338 Volume offset:338 Expected:3 Got:2
59042 Mismatch Block::0 bo:288 Volume offset:288 Expected:1 Got:2
59043 Mismatch Block::0 bo:339 Volume offset:339 Expected:3 Got:2
59044 Mismatch Block::0 bo:289 Volume offset:289 Expected:1 Got:2
59045 Mismatch Block::0 bo:340 Volume offset:340 Expected:3 Got:2
59046 Mismatch Block::0 bo:290 Volume offset:290 Expected:1 Got:2
59047 Mismatch Block::0 bo:341 Volume offset:341 Expected:3 Got:2
59048 Mismatch Block::0 bo:291 Volume offset:291 Expected:1 Got:2
59049 Mismatch Block::0 bo:342 Volume offset:342 Expected:3 Got:2
59050 Mismatch Block::0 bo:292 Volume offset:292 Expected:1 Got:2
59051 Mismatch Block::0 bo:343 Volume offset:343 Expected:3 Got:2
59052 Mismatch Block::0 bo:293 Volume offset:293 Expected:1 Got:2
59053 Mismatch Block::0 bo:344 Volume offset:344 Expected:3 Got:2
59054 Mismatch Block::0 bo:294 Volume offset:294 Expected:1 Got:2
59055 Mismatch Block::0 bo:345 Volume offset:345 Expected:3 Got:2
59056 Mismatch Block::0 bo:295 Volume offset:295 Expected:1 Got:2
59057 Mismatch Block::0 bo:346 Volume offset:346 Expected:3 Got:2
59058 Mismatch Block::0 bo:296 Volume offset:296 Expected:1 Got:2
59059 Mismatch Block::0 bo:347 Volume offset:347 Expected:3 Got:2
59060 Mismatch Block::0 bo:297 Volume offset:297 Expected:1 Got:2
59061 Mismatch Block::0 bo:348 Volume offset:348 Expected:3 Got:2
59062 Mismatch Block::0 bo:298 Volume offset:298 Expected:1 Got:2
59063 Mismatch Block::0 bo:349 Volume offset:349 Expected:3 Got:2
59064 Mismatch Block::0 bo:299 Volume offset:299 Expected:1 Got:2
59065 Mismatch Block::0 bo:350 Volume offset:350 Expected:3 Got:2
59066 Mismatch Block::0 bo:300 Volume offset:300 Expected:1 Got:2
59067 Mismatch Block::0 bo:351 Volume offset:351 Expected:3 Got:2
59068 Mismatch Block::0 bo:301 Volume offset:301 Expected:1 Got:2
59069 Mismatch Block::0 bo:352 Volume offset:352 Expected:3 Got:2
59070 Mismatch Block::0 bo:302 Volume offset:302 Expected:1 Got:2
59071 Mismatch Block::0 bo:353 Volume offset:353 Expected:3 Got:2
59072 Mismatch Block::0 bo:303 Volume offset:303 Expected:1 Got:2
59073 Mismatch Block::0 bo:354 Volume offset:354 Expected:3 Got:2
59074 Mismatch Block::0 bo:304 Volume offset:304 Expected:1 Got:2
59075 Mismatch Block::0 bo:355 Volume offset:355 Expected:3 Got:2
59076 Mismatch Block::0 bo:305 Volume offset:305 Expected:1 Got:2
59077 Mismatch Block::0 bo:356 Volume offset:356 Expected:3 Got:2
59078 Mismatch Block::0 bo:306 Volume offset:306 Expected:1 Got:2
59079 Mismatch Block::0 bo:357 Volume offset:357 Expected:3 Got:2
59080 Mismatch Block::0 bo:307 Volume offset:307 Expected:1 Got:2
59081 Mismatch Block::0 bo:358 Volume offset:358 Expected:3 Got:2
59082 Mismatch Block::0 bo:308 Volume offset:308 Expected:1 Got:2
59083 Mismatch Block::0 bo:359 Volume offset:359 Expected:3 Got:2
59084 Mismatch Block::0 bo:309 Volume offset:309 Expected:1 Got:2
59085 Mismatch Block::0 bo:360 Volume offset:360 Expected:3 Got:2
59086 Mismatch Block::0 bo:310 Volume offset:310 Expected:1 Got:2
59087 Mismatch Block::0 bo:361 Volume offset:361 Expected:3 Got:2
59088 Mismatch Block::0 bo:311 Volume offset:311 Expected:1 Got:2
59089 Mismatch Block::0 bo:362 Volume offset:362 Expected:3 Got:2
59090 Mismatch Block::0 bo:312 Volume offset:312 Expected:1 Got:2
59091 Mismatch Block::0 bo:363 Volume offset:363 Expected:3 Got:2
59092 Mismatch Block::0 bo:313 Volume offset:313 Expected:1 Got:2
59093 Mismatch Block::0 bo:364 Volume offset:364 Expected:3 Got:2
59094 Mismatch Block::0 bo:314 Volume offset:314 Expected:1 Got:2
59095 Mismatch Block::0 bo:365 Volume offset:365 Expected:3 Got:2
59096 Mismatch Block::0 bo:315 Volume offset:315 Expected:1 Got:2
59097 Mismatch Block::0 bo:366 Volume offset:366 Expected:3 Got:2
59098 Mismatch Block::0 bo:316 Volume offset:316 Expected:1 Got:2
59099 Mismatch Block::0 bo:367 Volume offset:367 Expected:3 Got:2
59100 Mismatch Block::0 bo:317 Volume offset:317 Expected:1 Got:2
59101 Mismatch Block::0 bo:368 Volume offset:368 Expected:3 Got:2
59102 Mismatch Block::0 bo:318 Volume offset:318 Expected:1 Got:2
59103 Mismatch Block::0 bo:369 Volume offset:369 Expected:3 Got:2
59104 Mismatch Block::0 bo:319 Volume offset:319 Expected:1 Got:2
59105 Mismatch Block::0 bo:370 Volume offset:370 Expected:3 Got:2
59106 Mismatch Block::0 bo:320 Volume offset:320 Expected:1 Got:2
59107 Mismatch Block::0 bo:371 Volume offset:371 Expected:3 Got:2
59108 Mismatch Block::0 bo:321 Volume offset:321 Expected:1 Got:2
59109 Mismatch Block::0 bo:372 Volume offset:372 Expected:3 Got:2
59110 Mismatch Block::0 bo:322 Volume offset:322 Expected:1 Got:2
59111 Mismatch Block::0 bo:373 Volume offset:373 Expected:3 Got:2
59112 Mismatch Block::0 bo:323 Volume offset:323 Expected:1 Got:2
59113 Mismatch Block::0 bo:374 Volume offset:374 Expected:3 Got:2
59114 Mismatch Block::0 bo:324 Volume offset:324 Expected:1 Got:2
59115 Mismatch Block::0 bo:375 Volume offset:375 Expected:3 Got:2
59116 Mismatch Block::0 bo:325 Volume offset:325 Expected:1 Got:2
59117 Mismatch Block::0 bo:376 Volume offset:376 Expected:3 Got:2
59118 Mismatch Block::0 bo:326 Volume offset:326 Expected:1 Got:2
59119 Mismatch Block::0 bo:377 Volume offset:377 Expected:3 Got:2
59120 Mismatch Block::0 bo:327 Volume offset:327 Expected:1 Got:2
59121 Mismatch Block::0 bo:378 Volume offset:378 Expected:3 Got:2
59122 Mismatch Block::0 bo:328 Volume offset:328 Expected:1 Got:2
59123 Mismatch Block::0 bo:379 Volume offset:379 Expected:3 Got:2
59124 Mismatch Block::0 bo:329 Volume offset:329 Expected:1 Got:2
59125 Mismatch Block::0 bo:380 Volume offset:380 Expected:3 Got:2
59126 Mismatch Block::0 bo:330 Volume offset:330 Expected:1 Got:2
59127 Mismatch Block::0 bo:381 Volume offset:381 Expected:3 Got:2
59128 Mismatch Block::0 bo:331 Volume offset:331 Expected:1 Got:2
59129 Mismatch Block::0 bo:382 Volume offset:382 Expected:3 Got:2
59130 Mismatch Block::0 bo:332 Volume offset:332 Expected:1 Got:2
59131 Mismatch Block::0 bo:383 Volume offset:383 Expected:3 Got:2
59132 Mismatch Block::0 bo:333 Volume offset:333 Expected:1 Got:2
59133 Mismatch Block::0 bo:384 Volume offset:384 Expected:3 Got:2
59134 Mismatch Block::0 bo:334 Volume offset:334 Expected:1 Got:2
59135 Mismatch Block::0 bo:385 Volume offset:385 Expected:3 Got:2
59136 Mismatch Block::0 bo:335 Volume offset:335 Expected:1 Got:2
59137 Mismatch Block::0 bo:386 Volume offset:386 Expected:3 Got:2
59138 Mismatch Block::0 bo:336 Volume offset:336 Expected:1 Got:2
59139 Mismatch Block::0 bo:387 Volume offset:387 Expected:3 Got:2
59140 Mismatch Block::0 bo:337 Volume offset:337 Expected:1 Got:2
59141 Mismatch Block::0 bo:388 Volume offset:388 Expected:3 Got:2
59142 Mismatch Block::0 bo:338 Volume offset:338 Expected:1 Got:2
59143 Mismatch Block::0 bo:389 Volume offset:389 Expected:3 Got:2
59144 Mismatch Block::0 bo:339 Volume offset:339 Expected:1 Got:2
59145 Mismatch Block::0 bo:390 Volume offset:390 Expected:3 Got:2
59146 Mismatch Block::0 bo:340 Volume offset:340 Expected:1 Got:2
59147 Mismatch Block::0 bo:391 Volume offset:391 Expected:3 Got:2
59148 Mismatch Block::0 bo:341 Volume offset:341 Expected:1 Got:2
59149 Mismatch Block::0 bo:392 Volume offset:392 Expected:3 Got:2
59150 Mismatch Block::0 bo:342 Volume offset:342 Expected:1 Got:2
59151 Mismatch Block::0 bo:393 Volume offset:393 Expected:3 Got:2
59152 Mismatch Block::0 bo:343 Volume offset:343 Expected:1 Got:2
59153 Mismatch Block::0 bo:394 Volume offset:394 Expected:3 Got:2
59154 Mismatch Block::0 bo:344 Volume offset:344 Expected:1 Got:2
59155 Mismatch Block::0 bo:395 Volume offset:395 Expected:3 Got:2
59156 Mismatch Block::0 bo:345 Volume offset:345 Expected:1 Got:2
59157 Mismatch Block::0 bo:396 Volume offset:396 Expected:3 Got:2
59158 Mismatch Block::0 bo:346 Volume offset:346 Expected:1 Got:2
59159 Mismatch Block::0 bo:397 Volume offset:397 Expected:3 Got:2
59160 Mismatch Block::0 bo:347 Volume offset:347 Expected:1 Got:2
59161 Mismatch Block::0 bo:398 Volume offset:398 Expected:3 Got:2
59162 Mismatch Block::0 bo:348 Volume offset:348 Expected:1 Got:2
59163 Mismatch Block::0 bo:399 Volume offset:399 Expected:3 Got:2
59164 Mismatch Block::0 bo:349 Volume offset:349 Expected:1 Got:2
59165 Mismatch Block::0 bo:400 Volume offset:400 Expected:3 Got:2
59166 Mismatch Block::0 bo:350 Volume offset:350 Expected:1 Got:2
59167 Mismatch Block::0 bo:401 Volume offset:401 Expected:3 Got:2
59168 Mismatch Block::0 bo:351 Volume offset:351 Expected:1 Got:2
59169 Mismatch Block::0 bo:402 Volume offset:402 Expected:3 Got:2
59170 Mismatch Block::0 bo:352 Volume offset:352 Expected:1 Got:2
59171 Mismatch Block::0 bo:403 Volume offset:403 Expected:3 Got:2
59172 Mismatch Block::0 bo:353 Volume offset:353 Expected:1 Got:2
59173 Mismatch Block::0 bo:404 Volume offset:404 Expected:3 Got:2
59174 Mismatch Block::0 bo:354 Volume offset:354 Expected:1 Got:2
59175 Mismatch Block::0 bo:405 Volume offset:405 Expected:3 Got:2
59176 Mismatch Block::0 bo:355 Volume offset:355 Expected:1 Got:2
59177 Mismatch Block::0 bo:406 Volume offset:406 Expected:3 Got:2
59178 Mismatch Block::0 bo:356 Volume offset:356 Expected:1 Got:2
59179 Mismatch Block::0 bo:407 Volume offset:407 Expected:3 Got:2
59180 Mismatch Block::0 bo:357 Volume offset:357 Expected:1 Got:2
59181 Mismatch Block::0 bo:408 Volume offset:408 Expected:3 Got:2
59182 Mismatch Block::0 bo:358 Volume offset:358 Expected:1 Got:2
59183 Mismatch Block::0 bo:409 Volume offset:409 Expected:3 Got:2
59184 Mismatch Block::0 bo:359 Volume offset:359 Expected:1 Got:2
59185 Mismatch Block::0 bo:410 Volume offset:410 Expected:3 Got:2
59186 Mismatch Block::0 bo:360 Volume offset:360 Expected:1 Got:2
59187 Mismatch Block::0 bo:411 Volume offset:411 Expected:3 Got:2
59188 Mismatch Block::0 bo:361 Volume offset:361 Expected:1 Got:2
59189 Mismatch Block::0 bo:412 Volume offset:412 Expected:3 Got:2
59190 Mismatch Block::0 bo:362 Volume offset:362 Expected:1 Got:2
59191 Mismatch Block::0 bo:413 Volume offset:413 Expected:3 Got:2
59192 Mismatch Block::0 bo:363 Volume offset:363 Expected:1 Got:2
59193 Mismatch Block::0 bo:414 Volume offset:414 Expected:3 Got:2
59194 Mismatch Block::0 bo:364 Volume offset:364 Expected:1 Got:2
59195 Mismatch Block::0 bo:415 Volume offset:415 Expected:3 Got:2
59196 Mismatch Block::0 bo:365 Volume offset:365 Expected:1 Got:2
59197 Mismatch Block::0 bo:416 Volume offset:416 Expected:3 Got:2
59198 Mismatch Block::0 bo:366 Volume offset:366 Expected:1 Got:2
59199 Mismatch Block::0 bo:417 Volume offset:417 Expected:3 Got:2
59200 Mismatch Block::0 bo:367 Volume offset:367 Expected:1 Got:2
59201 Mismatch Block::0 bo:418 Volume offset:418 Expected:3 Got:2
59202 Mismatch Block::0 bo:368 Volume offset:368 Expected:1 Got:2
59203 Mismatch Block::0 bo:419 Volume offset:419 Expected:3 Got:2
59204 Mismatch Block::0 bo:369 Volume offset:369 Expected:1 Got:2
59205 Mismatch Block::0 bo:420 Volume offset:420 Expected:3 Got:2
59206 Mismatch Block::0 bo:370 Volume offset:370 Expected:1 Got:2
59207 Mismatch Block::0 bo:421 Volume offset:421 Expected:3 Got:2
59208 Mismatch Block::0 bo:371 Volume offset:371 Expected:1 Got:2
59209 Mismatch Block::0 bo:422 Volume offset:422 Expected:3 Got:2
59210 Mismatch Block::0 bo:372 Volume offset:372 Expected:1 Got:2
59211 Mismatch Block::0 bo:423 Volume offset:423 Expected:3 Got:2
59212 Mismatch Block::0 bo:373 Volume offset:373 Expected:1 Got:2
59213 Mismatch Block::0 bo:424 Volume offset:424 Expected:3 Got:2
59214 Mismatch Block::0 bo:374 Volume offset:374 Expected:1 Got:2
59215 Mismatch Block::0 bo:425 Volume offset:425 Expected:3 Got:2
59216 Mismatch Block::0 bo:375 Volume offset:375 Expected:1 Got:2
59217 Mismatch Block::0 bo:426 Volume offset:426 Expected:3 Got:2
59218 Mismatch Block::0 bo:376 Volume offset:376 Expected:1 Got:2
59219 Mismatch Block::0 bo:427 Volume offset:427 Expected:3 Got:2
59220 Mismatch Block::0 bo:377 Volume offset:377 Expected:1 Got:2
59221 Mismatch Block::0 bo:428 Volume offset:428 Expected:3 Got:2
59222 Mismatch Block::0 bo:378 Volume offset:378 Expected:1 Got:2
59223 Mismatch Block::0 bo:429 Volume offset:429 Expected:3 Got:2
59224 Mismatch Block::0 bo:379 Volume offset:379 Expected:1 Got:2
59225 Mismatch Block::0 bo:430 Volume offset:430 Expected:3 Got:2
59226 Mismatch Block::0 bo:380 Volume offset:380 Expected:1 Got:2
59227 Mismatch Block::0 bo:431 Volume offset:431 Expected:3 Got:2
59228 Mismatch Block::0 bo:381 Volume offset:381 Expected:1 Got:2
59229 Mismatch Block::0 bo:432 Volume offset:432 Expected:3 Got:2
59230 Mismatch Block::0 bo:382 Volume offset:382 Expected:1 Got:2
59231 Mismatch Block::0 bo:433 Volume offset:433 Expected:3 Got:2
59232 Mismatch Block::0 bo:383 Volume offset:383 Expected:1 Got:2
59233 Mismatch Block::0 bo:434 Volume offset:434 Expected:3 Got:2
59234 Mismatch Block::0 bo:384 Volume offset:384 Expected:1 Got:2
59235 Mismatch Block::0 bo:435 Volume offset:435 Expected:3 Got:2
59236 Mismatch Block::0 bo:385 Volume offset:385 Expected:1 Got:2
59237 Mismatch Block::0 bo:436 Volume offset:436 Expected:3 Got:2
59238 Mismatch Block::0 bo:386 Volume offset:386 Expected:1 Got:2
59239 Mismatch Block::0 bo:437 Volume offset:437 Expected:3 Got:2
59240 Mismatch Block::0 bo:387 Volume offset:387 Expected:1 Got:2
59241 Mismatch Block::0 bo:438 Volume offset:438 Expected:3 Got:2
59242 Mismatch Block::0 bo:388 Volume offset:388 Expected:1 Got:2
59243 Mismatch Block::0 bo:439 Volume offset:439 Expected:3 Got:2
59244 Mismatch Block::0 bo:389 Volume offset:389 Expected:1 Got:2
59245 Mismatch Block::0 bo:440 Volume offset:440 Expected:3 Got:2
59246 Mismatch Block::0 bo:390 Volume offset:390 Expected:1 Got:2
59247 Mismatch Block::0 bo:441 Volume offset:441 Expected:3 Got:2
59248 Mismatch Block::0 bo:391 Volume offset:391 Expected:1 Got:2
59249 Mismatch Block::0 bo:442 Volume offset:442 Expected:3 Got:2
59250 Mismatch Block::0 bo:392 Volume offset:392 Expected:1 Got:2
59251 Mismatch Block::0 bo:443 Volume offset:443 Expected:3 Got:2
59252 Mismatch Block::0 bo:393 Volume offset:393 Expected:1 Got:2
59253 Mismatch Block::0 bo:444 Volume offset:444 Expected:3 Got:2
59254 Mismatch Block::0 bo:394 Volume offset:394 Expected:1 Got:2
59255 Mismatch Block::0 bo:445 Volume offset:445 Expected:3 Got:2
59256 Mismatch Block::0 bo:395 Volume offset:395 Expected:1 Got:2
59257 Mismatch Block::0 bo:446 Volume offset:446 Expected:3 Got:2
59258 Mismatch Block::0 bo:396 Volume offset:396 Expected:1 Got:2
59259 Mismatch Block::0 bo:447 Volume offset:447 Expected:3 Got:2
59260 Mismatch Block::0 bo:397 Volume offset:397 Expected:1 Got:2
59261 Mismatch Block::0 bo:448 Volume offset:448 Expected:3 Got:2
59262 Mismatch Block::0 bo:398 Volume offset:398 Expected:1 Got:2
59263 Mismatch Block::0 bo:449 Volume offset:449 Expected:3 Got:2
59264 Mismatch Block::0 bo:399 Volume offset:399 Expected:1 Got:2
59265 Mismatch Block::0 bo:450 Volume offset:450 Expected:3 Got:2
59266 Mismatch Block::0 bo:400 Volume offset:400 Expected:1 Got:2
59267 Mismatch Block::0 bo:451 Volume offset:451 Expected:3 Got:2
59268 Mismatch Block::0 bo:401 Volume offset:401 Expected:1 Got:2
59269 Mismatch Block::0 bo:452 Volume offset:452 Expected:3 Got:2
59270 Mismatch Block::0 bo:402 Volume offset:402 Expected:1 Got:2
59271 Mismatch Block::0 bo:453 Volume offset:453 Expected:3 Got:2
59272 Mismatch Block::0 bo:403 Volume offset:403 Expected:1 Got:2
59273 Mismatch Block::0 bo:454 Volume offset:454 Expected:3 Got:2
59274 Mismatch Block::0 bo:404 Volume offset:404 Expected:1 Got:2
59275 Mismatch Block::0 bo:455 Volume offset:455 Expected:3 Got:2
59276 Mismatch Block::0 bo:405 Volume offset:405 Expected:1 Got:2
59277 Mismatch Block::0 bo:456 Volume offset:456 Expected:3 Got:2
59278 Mismatch Block::0 bo:406 Volume offset:406 Expected:1 Got:2
59279 Mismatch Block::0 bo:457 Volume offset:457 Expected:3 Got:2
59280 Mismatch Block::0 bo:407 Volume offset:407 Expected:1 Got:2
59281 Mismatch Block::0 bo:458 Volume offset:458 Expected:3 Got:2
59282 Mismatch Block::0 bo:408 Volume offset:408 Expected:1 Got:2
59283 Mismatch Block::0 bo:459 Volume offset:459 Expected:3 Got:2
59284 Mismatch Block::0 bo:409 Volume offset:409 Expected:1 Got:2
59285 Mismatch Block::0 bo:460 Volume offset:460 Expected:3 Got:2
59286 Mismatch Block::0 bo:410 Volume offset:410 Expected:1 Got:2
59287 Mismatch Block::0 bo:461 Volume offset:461 Expected:3 Got:2
59288 Mismatch Block::0 bo:411 Volume offset:411 Expected:1 Got:2
59289 Mismatch Block::0 bo:462 Volume offset:462 Expected:3 Got:2
59290 Mismatch Block::0 bo:412 Volume offset:412 Expected:1 Got:2
59291 Mismatch Block::0 bo:463 Volume offset:463 Expected:3 Got:2
59292 Mismatch Block::0 bo:413 Volume offset:413 Expected:1 Got:2
59293 Mismatch Block::0 bo:464 Volume offset:464 Expected:3 Got:2
59294 Mismatch Block::0 bo:414 Volume offset:414 Expected:1 Got:2
59295 Mismatch Block::0 bo:465 Volume offset:465 Expected:3 Got:2
59296 Mismatch Block::0 bo:415 Volume offset:415 Expected:1 Got:2
59297 Mismatch Block::0 bo:466 Volume offset:466 Expected:3 Got:2
59298 Mismatch Block::0 bo:416 Volume offset:416 Expected:1 Got:2
59299 Mismatch Block::0 bo:467 Volume offset:467 Expected:3 Got:2
59300 Mismatch Block::0 bo:417 Volume offset:417 Expected:1 Got:2
59301 Mismatch Block::0 bo:468 Volume offset:468 Expected:3 Got:2
59302 Mismatch Block::0 bo:418 Volume offset:418 Expected:1 Got:2
59303 Mismatch Block::0 bo:469 Volume offset:469 Expected:3 Got:2
59304 Mismatch Block::0 bo:419 Volume offset:419 Expected:1 Got:2
59305 Mismatch Block::0 bo:470 Volume offset:470 Expected:3 Got:2
59306 Mismatch Block::0 bo:420 Volume offset:420 Expected:1 Got:2
59307 Mismatch Block::0 bo:471 Volume offset:471 Expected:3 Got:2
59308 Mismatch Block::0 bo:421 Volume offset:421 Expected:1 Got:2
59309 Mismatch Block::0 bo:472 Volume offset:472 Expected:3 Got:2
59310 Mismatch Block::0 bo:422 Volume offset:422 Expected:1 Got:2
59311 Mismatch Block::0 bo:473 Volume offset:473 Expected:3 Got:2
59312 Mismatch Block::0 bo:423 Volume offset:423 Expected:1 Got:2
59313 Mismatch Block::0 bo:474 Volume offset:474 Expected:3 Got:2
59314 Mismatch Block::0 bo:424 Volume offset:424 Expected:1 Got:2
59315 Mismatch Block::0 bo:475 Volume offset:475 Expected:3 Got:2
59316 Mismatch Block::0 bo:425 Volume offset:425 Expected:1 Got:2
59317 Mismatch Block::0 bo:476 Volume offset:476 Expected:3 Got:2
59318 Mismatch Block::0 bo:426 Volume offset:426 Expected:1 Got:2
59319 Mismatch Block::0 bo:477 Volume offset:477 Expected:3 Got:2
59320 Mismatch Block::0 bo:427 Volume offset:427 Expected:1 Got:2
59321 Mismatch Block::0 bo:478 Volume offset:478 Expected:3 Got:2
59322 Mismatch Block::0 bo:428 Volume offset:428 Expected:1 Got:2
59323 Mismatch Block::0 bo:479 Volume offset:479 Expected:3 Got:2
59324 Mismatch Block::0 bo:429 Volume offset:429 Expected:1 Got:2
59325 Mismatch Block::0 bo:480 Volume offset:480 Expected:3 Got:2
59326 Mismatch Block::0 bo:430 Volume offset:430 Expected:1 Got:2
59327 Mismatch Block::0 bo:481 Volume offset:481 Expected:3 Got:2
59328 Mismatch Block::0 bo:431 Volume offset:431 Expected:1 Got:2
59329 Mismatch Block::0 bo:482 Volume offset:482 Expected:3 Got:2
59330 Mismatch Block::0 bo:432 Volume offset:432 Expected:1 Got:2
59331 Mismatch Block::0 bo:483 Volume offset:483 Expected:3 Got:2
59332 Mismatch Block::0 bo:433 Volume offset:433 Expected:1 Got:2
59333 Mismatch Block::0 bo:484 Volume offset:484 Expected:3 Got:2
59334 Mismatch Block::0 bo:434 Volume offset:434 Expected:1 Got:2
59335 Mismatch Block::0 bo:485 Volume offset:485 Expected:3 Got:2
59336 Mismatch Block::0 bo:435 Volume offset:435 Expected:1 Got:2
59337 Mismatch Block::0 bo:486 Volume offset:486 Expected:3 Got:2
59338 Mismatch Block::0 bo:436 Volume offset:436 Expected:1 Got:2
59339 Mismatch Block::0 bo:487 Volume offset:487 Expected:3 Got:2
59340 Mismatch Block::0 bo:437 Volume offset:437 Expected:1 Got:2
59341 Mismatch Block::0 bo:488 Volume offset:488 Expected:3 Got:2
59342 Mismatch Block::0 bo:438 Volume offset:438 Expected:1 Got:2
59343 Mismatch Block::0 bo:489 Volume offset:489 Expected:3 Got:2
59344 Mismatch Block::0 bo:439 Volume offset:439 Expected:1 Got:2
59345 Mismatch Block::0 bo:490 Volume offset:490 Expected:3 Got:2
59346 Mismatch Block::0 bo:440 Volume offset:440 Expected:1 Got:2
59347 Mismatch Block::0 bo:491 Volume offset:491 Expected:3 Got:2
59348 Mismatch Block::0 bo:441 Volume offset:441 Expected:1 Got:2
59349 Mismatch Block::0 bo:492 Volume offset:492 Expected:3 Got:2
59350 Mismatch Block::0 bo:442 Volume offset:442 Expected:1 Got:2
59351 Mismatch Block::0 bo:493 Volume offset:493 Expected:3 Got:2
59352 Mismatch Block::0 bo:443 Volume offset:443 Expected:1 Got:2
59353 Mismatch Block::0 bo:494 Volume offset:494 Expected:3 Got:2
59354 Mismatch Block::0 bo:444 Volume offset:444 Expected:1 Got:2
59355 Mismatch Block::0 bo:495 Volume offset:495 Expected:3 Got:2
59356 Mismatch Block::0 bo:445 Volume offset:445 Expected:1 Got:2
59357 Mismatch Block::0 bo:496 Volume offset:496 Expected:3 Got:2
59358 Mismatch Block::0 bo:446 Volume offset:446 Expected:1 Got:2
59359 Mismatch Block::0 bo:497 Volume offset:497 Expected:3 Got:2
59360 Mismatch Block::0 bo:447 Volume offset:447 Expected:1 Got:2
59361 Mismatch Block::0 bo:498 Volume offset:498 Expected:3 Got:2
59362 Mismatch Block::0 bo:448 Volume offset:448 Expected:1 Got:2
59363 Mismatch Block::0 bo:499 Volume offset:499 Expected:3 Got:2
59364 Mismatch Block::0 bo:449 Volume offset:449 Expected:1 Got:2
59365 Mismatch Block::0 bo:500 Volume offset:500 Expected:3 Got:2
59366 Mismatch Block::0 bo:450 Volume offset:450 Expected:1 Got:2
59367 Mismatch Block::0 bo:501 Volume offset:501 Expected:3 Got:2
59368 Mismatch Block::0 bo:451 Volume offset:451 Expected:1 Got:2
59369 Mismatch Block::0 bo:502 Volume offset:502 Expected:3 Got:2
59370 Mismatch Block::0 bo:452 Volume offset:452 Expected:1 Got:2
59371 Mismatch Block::0 bo:503 Volume offset:503 Expected:3 Got:2
59372 Mismatch Block::0 bo:453 Volume offset:453 Expected:1 Got:2
59373 Mismatch Block::0 bo:504 Volume offset:504 Expected:3 Got:2
59374 Mismatch Block::0 bo:454 Volume offset:454 Expected:1 Got:2
59375 Mismatch Block::0 bo:505 Volume offset:505 Expected:3 Got:2
59376 Mismatch Block::0 bo:455 Volume offset:455 Expected:1 Got:2
59377 Mismatch Block::0 bo:506 Volume offset:506 Expected:3 Got:2
59378 Mismatch Block::0 bo:456 Volume offset:456 Expected:1 Got:2
59379 Mismatch Block::0 bo:507 Volume offset:507 Expected:3 Got:2
59380 Mismatch Block::0 bo:457 Volume offset:457 Expected:1 Got:2
59381 Mismatch Block::0 bo:508 Volume offset:508 Expected:3 Got:2
59382 Mismatch Block::0 bo:458 Volume offset:458 Expected:1 Got:2
59383 Mismatch Block::0 bo:509 Volume offset:509 Expected:3 Got:2
59384 Mismatch Block::0 bo:459 Volume offset:459 Expected:1 Got:2
59385 Mismatch Block::0 bo:510 Volume offset:510 Expected:3 Got:2
59386 Mismatch Block::0 bo:460 Volume offset:460 Expected:1 Got:2
59387 Mismatch Block::0 bo:511 Volume offset:511 Expected:3 Got:2
59388 Mismatch Block::0 bo:461 Volume offset:461 Expected:1 Got:2
59389 Mismatch Block::0 bo:462 Volume offset:462 Expected:1 Got:2
59390 
59391 Mismatch Block::0 bo:463 Volume offset:463 Expected:1 Got:2
59392 Mismatch Block::0 bo:464 Volume offset:464 Expected:1 Got:2
59393 Mismatch Block::0 bo:465 Volume offset:465 Expected:1 Got:2
59394 Mismatch Block::0 bo:466 Volume offset:466 Expected:1 Got:2
59395 Mismatch Block::0 bo:467 Volume offset:467 Expected:1 Got:2
59396 Mismatch Block::0 bo:468 Volume offset:468 Expected:1 Got:2
59397 Mismatch Block::0 bo:469 Volume offset:469 Expected:1 Got:2
59398 test test::test_wl_commit_1024_range_rollover_range ... ok
59399 Mismatch Block::0 bo:470 Volume offset:470 Expected:1 Got:2
59400 SPEC v:253 min_av:254 cur_av:1 cm:1022 cc:1025
59401 SPEC v:254 min_av:254 cur_av:1 cm:1022 cc:1025
59402 new cur is 1022 from min
59403 Adjusting new cur to 1022
59404 new cur is 1024 from cur
59405 Shift 3, v:255 sv:1023 min:1022 cur:1022
59406 Mismatch Block::99 bo:511 Volume offset:51199 Expected:1 Got:9
59407 Shift 3, v:0 sv:768 min:1022 cur:1022
59408 Shift 3, v:1 sv:769 min:1022 cur:1022
59409 test test::test_wl_commit_1024_range_no_update_below_rollover ... Shift 0, v:1 sv:1 min:2 cur:4
59410 Shift 0, v:2 sv:2 min:2 cur:4
59411 Shift 0, v:3 sv:3 min:2 cur:4
59412 Shift 0, v:4 sv:4 min:2 cur:4
59413 Shift 0, v:5 sv:5 min:2 cur:4
59414 Shift 3, v:2 sv:770 min:1022 cur:1022
59415 ok
59416 Adjusting new cur to 1024
59417 Mismatch Block::0 bo:471 Volume offset:471 Expected:1 Got:2
59418 SPEC v:254 min_av:254 cur_av:0 cm:1022 cc:1024
59419 Mismatch Block::0 bo:472 Volume offset:472 Expected:1 Got:2
59420 new cur is 1022 from min
59421 Mismatch Block::0 bo:473 Volume offset:473 Expected:1 Got:2
59422 SPEC v:255 min_av:254 cur_av:0 cm:1022 cc:1024
59423 new cur is 1023 from min
59424 Mismatch Block::0 bo:474 Volume offset:474 Expected:1 Got:2
59425 SPEC v:1 min_av:254 cur_av:0 cm:1022 cc:1024
59426 Mismatch Block::0 bo:475 Volume offset:475 Expected:1 Got:2
59427 SPEC v:2 min_av:254 cur_av:0 cm:1022 cc:1024
59428 test test::test_read_compare_fail ... Mismatch Block::0 bo:476 Volume offset:476 Expected:1 Got:2
59429 ok
59430 Mismatch Block::0 bo:477 Volume offset:477 Expected:1 Got:2
59431 Mismatch Block::0 bo:478 Volume offset:478 Expected:1 Got:2
59432 Mismatch Block::0 bo:479 Volume offset:479 Expected:1 Got:2
59433 SPEC v:2 min_av:254 cur_av:1 cm:254 cc:257
59434 SPEC v:254 min_av:254 cur_av:1 cm:254 cc:257
59435 new cur is 254 from min
59436 Mismatch Block::0 bo:480 Volume offset:480 Expected:1 Got:2
59437 Mismatch Block::0 bo:481 Volume offset:481 Expected:1 Got:2
59438 Mismatch Block::0 bo:482 Volume offset:482 Expected:1 Got:2
59439 SPEC v:253 min_av:254 cur_av:1 cm:254 cc:257
59440 SPEC v:255 min_av:254 cur_av:1 cm:254 cc:257
59441 new cur is 255 from min
59442 SPEC v:0 min_av:254 cur_av:1 cm:254 cc:257
59443 new cur is 256 from cur
59444 SPEC v:1 min_av:254 cur_av:1 cm:254 cc:257
59445 test test::test_read_compare_large_fail ... new cur is 257 from cur
59446 SPEC v:254 min_av:254 cur_av:1 cm:254 cc:257
59447 new cur is 254 from min
59448 SPEC v:255 min_av:254 cur_av:1 cm:254 cc:257
59449 new cur is 255 from min
59450 SPEC v:0 min_av:254 cur_av:1 cm:254 cc:257
59451 ok
59452 new cur is 256 from cur
59453 SPEC v:1 min_av:254 cur_av:1 cm:254 cc:257
59454 new cur is 257 from cur
59455 SPEC v:2 min_av:254 cur_av:1 cm:254 cc:257
59456 Mismatch Block::0 bo:483 Volume offset:483 Expected:1 Got:2
59457 Mismatch Block::0 bo:484 Volume offset:484 Expected:1 Got:2
59458 Mismatch Block::0 bo:485 Volume offset:485 Expected:1 Got:2
59459 Mismatch Block::0 bo:486 Volume offset:486 Expected:1 Got:2
59460 Mismatch Block::0 bo:487 Volume offset:487 Expected:1 Got:2
59461 Mismatch Block::0 bo:488 Volume offset:488 Expected:1 Got:2
59462 Mismatch Block::0 bo:489 Volume offset:489 Expected:1 Got:2
59463 Mismatch Block::0 bo:490 Volume offset:490 Expected:1 Got:2
59464 Mismatch Block::0 bo:491 Volume offset:491 Expected:1 Got:2
59465 Mismatch Block::0 bo:492 Volume offset:492 Expected:1 Got:2
59466 Mismatch Block::0 bo:493 Volume offset:493 Expected:1 Got:2
59467 Mismatch Block::0 bo:494 Volume offset:494 Expected:1 Got:2
59468 Mismatch Block::0 bo:495 Volume offset:495 Expected:1 Got:2
59469 Mismatch Block::0 bo:496 Volume offset:496 Expected:1 Got:2
59470 Mismatch Block::0 bo:497 Volume offset:497 Expected:1 Got:2
59471 Mismatch Block::0 bo:498 Volume offset:498 Expected:1 Got:2
59472 Mismatch Block::0 bo:499 Volume offset:499 Expected:1 Got:2
59473 Mismatch Block::0 bo:500 Volume offset:500 Expected:1 Got:2
59474 Mismatch Block::0 bo:501 Volume offset:501 Expected:1 Got:2
59475 Mismatch Block::0 bo:502 Volume offset:502 Expected:1 Got:2
59476 Mismatch Block::0 bo:503 Volume offset:503 Expected:1 Got:2
59477 test test::test_wl_commit_range ... Mismatch Block::0 bo:504 Volume offset:504 Expected:1 Got:2
59478 ok
59479 Mismatch Block::0 bo:505 Volume offset:505 Expected:1 Got:2
59480 Mismatch Block::0 bo:506 Volume offset:506 Expected:1 Got:2
59481 SPEC v:252 min_av:253 cur_av:0 cm:253 cc:256
59482 Mismatch Block::0 bo:507 Volume offset:507 Expected:1 Got:2
59483 SPEC v:253 min_av:253 cur_av:0 cm:253 cc:256
59484 Mismatch Block::0 bo:508 Volume offset:508 Expected:1 Got:2
59485 new cur is 253 from min
59486 Mismatch Block::0 bo:509 Volume offset:509 Expected:1 Got:2
59487 SPEC v:254 min_av:253 cur_av:0 cm:253 cc:256
59488 new cur is 254 from min
59489 Mismatch Block::0 bo:510 Volume offset:510 Expected:1 Got:2
59490 SPEC v:255 min_av:253 cur_av:0 cm:253 cc:256
59491 test test::test_wl_commit_1024_range_update_rollover_below ... ok
59492 new cur is 255 from min
59493 Mismatch Block::0 bo:511 Volume offset:511 Expected:1 Got:2
59494 SPEC v:0 min_av:253 cur_av:0 cm:253 cc:256
59495 new cur is 256 from cur
59496 SPEC v:254 min_av:255 cur_av:1 cm:255 cc:257
59497 SPEC v:255 min_av:255 cur_av:1 cm:255 cc:257
59498 new cur is 255 from min
59499 SPEC v:0 min_av:255 cur_av:1 cm:255 cc:257
59500 new cur is 256 from cur
59501 SPEC v:1 min_av:253 cur_av:0 cm:253 cc:256
59502 SPEC v:1 min_av:255 cur_av:1 cm:255 cc:257
59503 new cur is 257 from cur
59504 SPEC v:2 min_av:255 cur_av:1 cm:255 cc:257
59505 test test::test_wl_commit_1024_range_update_rollover_above ... ok
59506 SPEC v:253 min_av:254 cur_av:1 cm:254 cc:257
59507 SPEC v:254 min_av:254 cur_av:1 cm:254 cc:257
59508 new cur is 254 from min
59509 SPEC v:255 min_av:254 cur_av:1 cm:254 cc:257
59510 new cur is 255 from min
59511 SPEC v:0 min_av:254 cur_av:1 cm:254 cc:257
59512 new cur is 256 from cur
59513 SPEC v:1 min_av:254 cur_av:1 cm:254 cc:257
59514 new cur is 257 from cur
59515 SPEC v:2 min_av:254 cur_av:1 cm:254 cc:257
59516 test test::test_wl_commit_range_no_update_above_rollover ... ok
59517 Shift 0, v:3 sv:3 min:2 cur:4
59518 Update block 1 to 3 (min:2 max:4 res:true)
59519 test test::test_wl_commit_range_no_update_below_rollover ... ok
59520 Shift 0, v:4 sv:4 min:2 cur:4
59521 test test::test_read_compare_fail_under ... ok
59522 Shift 0, v:2 sv:2 min:2 cur:4
59523 Update block 1 to 2 (min:2 max:4 res:true)
59524 test test::test_wl_commit_range_rollover_max_at ... ok
59525 SPEC v:253 min_av:254 cur_av:1 cm:254 cc:257
59526 SPEC v:0 min_av:254 cur_av:1 cm:254 cc:257
59527 new cur is 256 from cur
59528 Adjusting new cur to 256
59529 SPEC v:254 min_av:254 cur_av:0 cm:254 cc:256
59530 new cur is 254 from min
59531 SPEC v:255 min_av:254 cur_av:0 cm:254 cc:256
59532 new cur is 255 from min
59533 SPEC v:253 min_av:254 cur_av:1 cm:254 cc:257
59534 SPEC v:254 min_av:254 cur_av:1 cm:254 cc:257
59535 new cur is 254 from min
59536 Adjusting new cur to 254
59537 Shift 0, v:255 sv:255 min:254 cur:254
59538 Shift 0, v:0 sv:0 min:254 cur:254
59539 Shift 0, v:1 sv:1 min:254 cur:254
59540 Shift 0, v:2 sv:2 min:254 cur:254
59541 test test::test_wl_commit_range_rollover_min_at ... ok
59542 SPEC v:1 min_av:254 cur_av:0 cm:254 cc:256
59543 SPEC v:2 min_av:254 cur_av:0 cm:254 cc:256
59544 test test::test_wl_commit_range_rollover_range ... ok
59545 test test::test_wl_commit_range_update ... ok
59546 test test::test_wl_commit_range_update_max ... ok
59547 Shift 0, v:1 sv:1 min:2 cur:4
59548 Out of Range Block::1 bo:1 Volume offset:513 Expected:4 Got:1
59549 Shift 0, v:1 sv:1 min:2 cur:4
59550 Out of Range Block::1 bo:2 Volume offset:514 Expected:4 Got:1
59551 Shift 0, v:1 sv:1 min:2 cur:4
59552 Out of Range Block::1 bo:3 Volume offset:515 Expected:4 Got:1
59553 Shift 0, v:1 sv:1 min:2 cur:4
59554 Out of Range Block::1 bo:4 Volume offset:516 Expected:4 Got:1
59555 Shift 0, v:1 sv:1 min:2 cur:4
59556 Out of Range Block::1 bo:5 Volume offset:517 Expected:4 Got:1
59557 Shift 0, v:1 sv:1 min:2 cur:4
59558 Out of Range Block::1 bo:6 Volume offset:518 Expected:4 Got:1
59559 Shift 0, v:1 sv:1 min:2 cur:4
59560 Out of Range Block::1 bo:7 Volume offset:519 Expected:4 Got:1
59561 Shift 0, v:1 sv:1 min:2 cur:4
59562 Out of Range Block::1 bo:8 Volume offset:520 Expected:4 Got:1
59563 test test::test_wl_commit_range_update_min ... Shift 0, v:1 sv:1 min:2 cur:4
59564 Out of Range Block::1 bo:9 Volume offset:521 Expected:4 Got:1
59565 Shift 0, v:1 sv:1 min:2 cur:4
59566 Out of Range Block::1 bo:10 Volume offset:522 Expected:4 Got:1
59567 Shift 0, v:1 sv:1 min:2 cur:4
59568 Out of Range Block::1 bo:11 Volume offset:523 Expected:4 Got:1
59569 Shift 0, v:1 sv:1 min:2 cur:4
59570 Out of Range Block::1 bo:12 Volume offset:524 Expected:4 Got:1
59571 Shift 0, v:1 sv:1 min:2 cur:4
59572 Out of Range Block::1 bo:13 Volume offset:525 Expected:4 Got:1
59573 Shift 0, v:1 sv:1 min:2 cur:4
59574 Out of Range Block::1 bo:14 Volume offset:526 Expected:4 Got:1
59575 Shift 0, v:1 sv:1 min:2 cur:4
59576 Out of Range Block::1 bo:15 Volume offset:527 Expected:4 Got:1
59577 Shift 0, v:1 sv:1 min:2 cur:4
59578 Out of Range Block::1 bo:16 Volume offset:528 Expected:4 Got:1
59579 Shift 0, v:1 sv:1 min:2 cur:4
59580 Out of Range Block::1 bo:17 Volume offset:529 Expected:4 Got:1
59581 Shift 0, v:1 sv:1 min:2 cur:4
59582 Out of Range Block::1 bo:18 Volume offset:530 Expected:4 Got:1
59583 Shift 0, v:1 sv:1 min:2 cur:4
59584 Out of Range Block::1 bo:19 Volume offset:531 Expected:4 Got:1
59585 Shift 0, v:1 sv:1 min:2 cur:4
59586 Out of Range Block::1 bo:20 Volume offset:532 Expected:4 Got:1
59587 Shift 0, v:1 sv:1 min:2 cur:4
59588 Out of Range Block::1 bo:21 Volume offset:533 Expected:4 Got:1
59589 Shift 0, v:1 sv:1 min:2 cur:4
59590 Out of Range Block::1 bo:22 Volume offset:534 Expected:4 Got:1
59591 Shift 0, v:1 sv:1 min:2 cur:4
59592 Out of Range Block::1 bo:23 Volume offset:535 Expected:4 Got:1
59593 Shift 0, v:1 sv:1 min:2 cur:4
59594 Out of Range Block::1 bo:24 Volume offset:536 Expected:4 Got:1
59595 Shift 0, v:1 sv:1 min:2 cur:4
59596 Out of Range Block::1 bo:25 Volume offset:537 Expected:4 Got:1
59597 Shift 0, v:1 sv:1 min:2 cur:4
59598 Out of Range Block::1 bo:26 Volume offset:538 Expected:4 Got:1
59599 Shift 0, v:1 sv:1 min:2 cur:4
59600 Out of Range Block::1 bo:27 Volume offset:539 Expected:4 Got:1
59601 Shift 0, v:1 sv:1 min:2 cur:4
59602 Out of Range Block::1 bo:28 Volume offset:540 Expected:4 Got:1
59603 Shift 0, v:1 sv:1 min:2 cur:4
59604 Out of Range Block::1 bo:29 Volume offset:541 Expected:4 Got:1
59605 Shift 0, v:1 sv:1 min:2 cur:4
59606 Out of Range Block::1 bo:30 Volume offset:542 Expected:4 Got:1
59607 Shift 0, v:1 sv:1 min:2 cur:4
59608 Out of Range Block::1 bo:31 Volume offset:543 Expected:4 Got:1
59609 Shift 0, v:1 sv:1 min:2 cur:4
59610 Out of Range Block::1 bo:32 Volume offset:544 Expected:4 Got:1
59611 Shift 0, v:1 sv:1 min:2 cur:4
59612 Out of Range Block::1 bo:33 Volume offset:545 Expected:4 Got:1
59613 Shift 0, v:1 sv:1 min:2 cur:4
59614 Out of Range Block::1 bo:34 Volume offset:546 Expected:4 Got:1
59615 Shift 0, v:1 sv:1 min:2 cur:4
59616 Out of Range Block::1 bo:35 Volume offset:547 Expected:4 Got:1
59617 Shift 0, v:1 sv:1 min:2 cur:4
59618 Out of Range Block::1 bo:36 Volume offset:548 Expected:4 Got:1
59619 Shift 0, v:1 sv:1 min:2 cur:4
59620 Out of Range Block::1 bo:37 Volume offset:549 Expected:4 Got:1
59621 Shift 0, v:1 sv:1 min:2 cur:4
59622 Out of Range Block::1 bo:38 Volume offset:550 Expected:4 Got:1
59623 Shift 0, v:1 sv:1 min:2 cur:4
59624 Out of Range Block::1 bo:39 Volume offset:551 Expected:4 Got:1
59625 Shift 0, v:1 sv:1 min:2 cur:4
59626 Out of Range Block::1 bo:40 Volume offset:552 Expected:4 Got:1
59627 Shift 0, v:1 sv:1 min:2 cur:4
59628 Out of Range Block::1 bo:41 Volume offset:553 Expected:4 Got:1
59629 Shift 0, v:1 sv:1 min:2 cur:4
59630 Out of Range Block::1 bo:42 Volume offset:554 Expected:4 Got:1
59631 Shift 0, v:1 sv:1 min:2 cur:4
59632 Out of Range Block::1 bo:43 Volume offset:555 Expected:4 Got:1
59633 Shift 0, v:1 sv:1 min:2 cur:4
59634 Out of Range Block::1 bo:44 Volume offset:556 Expected:4 Got:1
59635 Shift 0, v:1 sv:1 min:2 cur:4
59636 Out of Range Block::1 bo:45 Volume offset:557 Expected:4 Got:1
59637 Shift 0, v:1 sv:1 min:2 cur:4
59638 Out of Range Block::1 bo:46 Volume offset:558 Expected:4 Got:1
59639 Shift 0, v:1 sv:1 min:2 cur:4
59640 Out of Range Block::1 bo:47 Volume offset:559 Expected:4 Got:1
59641 Shift 0, v:1 sv:1 min:2 cur:4
59642 Out of Range Block::1 bo:48 Volume offset:560 Expected:4 Got:1
59643 Shift 0, v:1 sv:1 min:2 cur:4
59644 Out of Range Block::1 bo:49 Volume offset:561 Expected:4 Got:1
59645 Shift 0, v:1 sv:1 min:2 cur:4
59646 Out of Range Block::1 bo:50 Volume offset:562 Expected:4 Got:1
59647 Shift 0, v:1 sv:1 min:2 cur:4
59648 Out of Range Block::1 bo:51 Volume offset:563 Expected:4 Got:1
59649 Shift 0, v:1 sv:1 min:2 cur:4
59650 Out of Range Block::1 bo:52 Volume offset:564 Expected:4 Got:1
59651 Shift 0, v:1 sv:1 min:2 cur:4
59652 Out of Range Block::1 bo:53 Volume offset:565 Expected:4 Got:1
59653 Shift 0, v:1 sv:1 min:2 cur:4
59654 Out of Range Block::1 bo:54 Volume offset:566 Expected:4 Got:1
59655 Shift 0, v:1 sv:1 min:2 cur:4
59656 Out of Range Block::1 bo:55 Volume offset:567 Expected:4 Got:1
59657 Shift 0, v:1 sv:1 min:2 cur:4
59658 Out of Range Block::1 bo:56 Volume offset:568 Expected:4 Got:1
59659 Shift 0, v:1 sv:1 min:2 cur:4
59660 Out of Range Block::1 bo:57 Volume offset:569 Expected:4 Got:1
59661 Shift 0, v:1 sv:1 min:2 cur:4
59662 Out of Range Block::1 bo:58 Volume offset:570 Expected:4 Got:1
59663 Shift 0, v:1 sv:1 min:2 cur:4
59664 Out of Range Block::1 bo:59 Volume offset:571 Expected:4 Got:1
59665 Shift 0, v:1 sv:1 min:2 cur:4
59666 Out of Range Block::1 bo:60 Volume offset:572 Expected:4 Got:1
59667 Shift 0, v:1 sv:1 min:2 cur:4
59668 Out of Range Block::1 bo:61 Volume offset:573 Expected:4 Got:1
59669 Shift 0, v:1 sv:1 min:2 cur:4
59670 Out of Range Block::1 bo:62 Volume offset:574 Expected:4 Got:1
59671 Shift 0, v:1 sv:1 min:2 cur:4
59672 Out of Range Block::1 bo:63 Volume offset:575 Expected:4 Got:1
59673 Shift 0, v:1 sv:1 min:2 cur:4
59674 Out of Range Block::1 bo:64 Volume offset:576 Expected:4 Got:1
59675 Shift 0, v:1 sv:1 min:2 cur:4
59676 Out of Range Block::1 bo:65 Volume offset:577 Expected:4 Got:1
59677 Shift 0, v:1 sv:1 min:2 cur:4
59678 Out of Range Block::1 bo:66 Volume offset:578 Expected:4 Got:1
59679 Shift 0, v:1 sv:1 min:2 cur:4
59680 Out of Range Block::1 bo:67 Volume offset:579 Expected:4 Got:1
59681 Shift 0, v:1 sv:1 min:2 cur:4
59682 Out of Range Block::1 bo:68 Volume offset:580 Expected:4 Got:1
59683 Shift 0, v:1 sv:1 min:2 cur:4
59684 Out of Range Block::1 bo:69 Volume offset:581 Expected:4 Got:1
59685 Shift 0, v:1 sv:1 min:2 cur:4
59686 Out of Range Block::1 bo:70 Volume offset:582 Expected:4 Got:1
59687 Shift 0, v:1 sv:1 min:2 cur:4
59688 Out of Range Block::1 bo:71 Volume offset:583 Expected:4 Got:1
59689 Shift 0, v:1 sv:1 min:2 cur:4
59690 Out of Range Block::1 bo:72 Volume offset:584 Expected:4 Got:1
59691 Shift 0, v:1 sv:1 min:2 cur:4
59692 Out of Range Block::1 bo:73 Volume offset:585 Expected:4 Got:1
59693 Shift 0, v:1 sv:1 min:2 cur:4
59694 Out of Range Block::1 bo:74 Volume offset:586 Expected:4 Got:1
59695 Shift 0, v:1 sv:1 min:2 cur:4
59696 Out of Range Block::1 bo:75 Volume offset:587 Expected:4 Got:1
59697 Shift 0, v:1 sv:1 min:2 cur:4
59698 Out of Range Block::1 bo:76 Volume offset:588 Expected:4 Got:1
59699 Shift 0, v:1 sv:1 min:2 cur:4
59700 Out of Range Block::1 bo:77 Volume offset:589 Expected:4 Got:1
59701 Shift 0, v:1 sv:1 min:2 cur:4
59702 Out of Range Block::1 bo:78 Volume offset:590 Expected:4 Got:1
59703 Shift 0, v:1 sv:1 min:2 cur:4
59704 Out of Range Block::1 bo:79 Volume offset:591 Expected:4 Got:1
59705 Shift 0, v:1 sv:1 min:2 cur:4
59706 Out of Range Block::1 bo:80 Volume offset:592 Expected:4 Got:1
59707 Shift 0, v:1 sv:1 min:2 cur:4
59708 Out of Range Block::1 bo:81 Volume offset:593 Expected:4 Got:1
59709 Shift 0, v:1 sv:1 min:2 cur:4
59710 Out of Range Block::1 bo:82 Volume offset:594 Expected:4 Got:1
59711 Shift 0, v:1 sv:1 min:2 cur:4
59712 Out of Range Block::1 bo:83 Volume offset:595 Expected:4 Got:1
59713 Shift 0, v:1 sv:1 min:2 cur:4
59714 Out of Range Block::1 bo:84 Volume offset:596 Expected:4 Got:1
59715 Shift 0, v:1 sv:1 min:2 cur:4
59716 Out of Range Block::1 bo:85 Volume offset:597 Expected:4 Got:1
59717 Shift 0, v:1 sv:1 min:2 cur:4
59718 Out of Range Block::1 bo:86 Volume offset:598 Expected:4 Got:1
59719 Shift 0, v:1 sv:1 min:2 cur:4
59720 Out of Range Block::1 bo:87 Volume offset:599 Expected:4 Got:1
59721 Shift 0, v:1 sv:1 min:2 cur:4
59722 Out of Range Block::1 bo:88 Volume offset:600 Expected:4 Got:1
59723 Shift 0, v:1 sv:1 min:2 cur:4
59724 Out of Range Block::1 bo:89 Volume offset:601 Expected:4 Got:1
59725 Shift 0, v:1 sv:1 min:2 cur:4
59726 Out of Range Block::1 bo:90 Volume offset:602 Expected:4 Got:1
59727 Shift 0, v:1 sv:1 min:2 cur:4
59728 Out of Range Block::1 bo:91 Volume offset:603 Expected:4 Got:1
59729 Shift 0, v:1 sv:1 min:2 cur:4
59730 Out of Range Block::1 bo:92 Volume offset:604 Expected:4 Got:1
59731 Shift 0, v:1 sv:1 min:2 cur:4
59732 Out of Range Block::1 bo:93 Volume offset:605 Expected:4 Got:1
59733 Shift 0, v:1 sv:1 min:2 cur:4
59734 Out of Range Block::1 bo:94 Volume offset:606 Expected:4 Got:1
59735 Shift 0, v:1 sv:1 min:2 cur:4
59736 Out of Range Block::1 bo:95 Volume offset:607 Expected:4 Got:1
59737 Shift 0, v:1 sv:1 min:2 cur:4
59738 Out of Range Block::1 bo:96 Volume offset:608 Expected:4 Got:1
59739 Shift 0, v:1 sv:1 min:2 cur:4
59740 Out of Range Block::1 bo:97 Volume offset:609 Expected:4 Got:1
59741 Shift 0, v:1 sv:1 min:2 cur:4
59742 Out of Range Block::1 bo:98 Volume offset:610 Expected:4 Got:1
59743 Shift 0, v:1 sv:1 min:2 cur:4
59744 Out of Range Block::1 bo:99 Volume offset:611 Expected:4 Got:1
59745 Shift 0, v:1 sv:1 min:2 cur:4
59746 Out of Range Block::1 bo:100 Volume offset:612 Expected:4 Got:1
59747 Shift 0, v:1 sv:1 min:2 cur:4
59748 Out of Range Block::1 bo:101 Volume offset:613 Expected:4 Got:1
59749 Shift 0, v:1 sv:1 min:2 cur:4
59750 Out of Range Block::1 bo:102 Volume offset:614 Expected:4 Got:1
59751 Shift 0, v:1 sv:1 min:2 cur:4
59752 Out of Range Block::1 bo:103 Volume offset:615 Expected:4 Got:1
59753 Shift 0, v:1 sv:1 min:2 cur:4
59754 Out of Range Block::1 bo:104 Volume offset:616 Expected:4 Got:1
59755 Shift 0, v:1 sv:1 min:2 cur:4
59756 Out of Range Block::1 bo:105 Volume offset:617 Expected:4 Got:1
59757 Shift 0, v:1 sv:1 min:2 cur:4
59758 Out of Range Block::1 bo:106 Volume offset:618 Expected:4 Got:1
59759 Shift 0, v:1 sv:1 min:2 cur:4
59760 Out of Range Block::1 bo:107 Volume offset:619 Expected:4 Got:1
59761 Shift 0, v:1 sv:1 min:2 cur:4
59762 Out of Range Block::1 bo:108 Volume offset:620 Expected:4 Got:1
59763 Shift 0, v:1 sv:1 min:2 cur:4
59764 Out of Range Block::1 bo:109 Volume offset:621 Expected:4 Got:1
59765 Shift 0, v:1 sv:1 min:2 cur:4
59766 Out of Range Block::1 bo:110 Volume offset:622 Expected:4 Got:1
59767 Shift 0, v:1 sv:1 min:2 cur:4
59768 Out of Range Block::1 bo:111 Volume offset:623 Expected:4 Got:1
59769 Shift 0, v:1 sv:1 min:2 cur:4
59770 Out of Range Block::1 bo:112 Volume offset:624 Expected:4 Got:1
59771 Shift 0, v:1 sv:1 min:2 cur:4
59772 Out of Range Block::1 bo:113 Volume offset:625 Expected:4 Got:1
59773 Shift 0, v:1 sv:1 min:2 cur:4
59774 Out of Range Block::1 bo:114 Volume offset:626 Expected:4 Got:1
59775 Shift 0, v:1 sv:1 min:2 cur:4
59776 Out of Range Block::1 bo:115 Volume offset:627 Expected:4 Got:1
59777 Shift 0, v:1 sv:1 min:2 cur:4
59778 Out of Range Block::1 bo:116 Volume offset:628 Expected:4 Got:1
59779 Shift 0, v:1 sv:1 min:2 cur:4
59780 Out of Range Block::1 bo:117 Volume offset:629 Expected:4 Got:1
59781 Shift 0, v:1 sv:1 min:2 cur:4
59782 Out of Range Block::1 bo:118 Volume offset:630 Expected:4 Got:1
59783 Shift 0, v:1 sv:1 min:2 cur:4
59784 Out of Range Block::1 bo:119 Volume offset:631 Expected:4 Got:1
59785 Shift 0, v:1 sv:1 min:2 cur:4
59786 Out of Range Block::1 bo:120 Volume offset:632 Expected:4 Got:1
59787 Shift 0, v:1 sv:1 min:2 cur:4
59788 Out of Range Block::1 bo:121 Volume offset:633 Expected:4 Got:1
59789 Shift 0, v:1 sv:1 min:2 cur:4
59790 Out of Range Block::1 bo:122 Volume offset:634 Expected:4 Got:1
59791 Shift 0, v:1 sv:1 min:2 cur:4
59792 Out of Range Block::1 bo:123 Volume offset:635 Expected:4 Got:1
59793 Shift 0, v:1 sv:1 min:2 cur:4
59794 Out of Range Block::1 bo:124 Volume offset:636 Expected:4 Got:1
59795 Shift 0, v:1 sv:1 min:2 cur:4
59796 Out of Range Block::1 bo:125 Volume offset:637 Expected:4 Got:1
59797 Shift 0, v:1 sv:1 min:2 cur:4
59798 Out of Range Block::1 bo:126 Volume offset:638 Expected:4 Got:1
59799 Shift 0, v:1 sv:1 min:2 cur:4
59800 Out of Range Block::1 bo:127 Volume offset:639 Expected:4 Got:1
59801 Shift 0, v:1 sv:1 min:2 cur:4
59802 okOut of Range Block::1 bo:128 Volume offset:640 Expected:4 Got:1
59803 Shift 0, v:1 sv:1 min:2 cur:4
59804 Out of Range Block::1 bo:129 Volume offset:641 Expected:4 Got:1
59805 Shift 0, v:1 sv:1 min:2 cur:4
59806 Out of Range Block::1 bo:130 Volume offset:642 Expected:4 Got:1
59807 Shift 0, v:1 sv:1 min:2 cur:4
59808 Out of Range Block::1 bo:131 Volume offset:643 Expected:4 Got:1
59809 Shift 0, v:1 sv:1 min:2 cur:4
59810 Out of Range Block::1 bo:132 Volume offset:644 Expected:4 Got:1
59811 Shift 0, v:1 sv:1 min:2 cur:4
59812 Out of Range Block::1 bo:133 Volume offset:645 Expected:4 Got:1
59813 Shift 0, v:1 sv:1 min:2 cur:4
59814 Out of Range Block::1 bo:134 Volume offset:646 Expected:4 Got:1
59815 Shift 0, v:1 sv:1 min:2 cur:4
59816 Out of Range Block::1 bo:135 Volume offset:647 Expected:4 Got:1
59817 Shift 0, v:1 sv:1 min:2 cur:4
59818 Out of Range Block::1 bo:136 Volume offset:648 Expected:4 Got:1
59819 Shift 0, v:1 sv:1 min:2 cur:4
59820 Out of Range Block::1 bo:137 Volume offset:649 Expected:4 Got:1
59821 Shift 0, v:1 sv:1 min:2 cur:4
59822 Out of Range Block::1 bo:138 Volume offset:650 Expected:4 Got:1
59823 Shift 0, v:1 sv:1 min:2 cur:4
59824 Out of Range Block::1 bo:139 Volume offset:651 Expected:4 Got:1
59825 Shift 0, v:1 sv:1 min:2 cur:4
59826 Out of Range Block::1 bo:140 Volume offset:652 Expected:4 Got:1
59827 Shift 0, v:1 sv:1 min:2 cur:4
59828 Out of Range Block::1 bo:141 Volume offset:653 Expected:4 Got:1
59829 Shift 0, v:1 sv:1 min:2 cur:4
59830 Out of Range Block::1 bo:142 Volume offset:654 Expected:4 Got:1
59831 Shift 0, v:1 sv:1 min:2 cur:4
59832 Out of Range Block::1 bo:143 Volume offset:655 Expected:4 Got:1
59833 Shift 0, v:1 sv:1 min:2 cur:4
59834 Out of Range Block::1 bo:144 Volume offset:656 Expected:4 Got:1
59835 Shift 0, v:1 sv:1 min:2 cur:4
59836 Out of Range Block::1 bo:145 Volume offset:657 Expected:4 Got:1
59837 Shift 0, v:1 sv:1 min:2 cur:4
59838 Out of Range Block::1 bo:146 Volume offset:658 Expected:4 Got:1
59839 Shift 0, v:1 sv:1 min:2 cur:4
59840 Out of Range Block::1 bo:147 Volume offset:659 Expected:4 Got:1
59841 Shift 0, v:1 sv:1 min:2 cur:4
59842 Out of Range Block::1 bo:148 Volume offset:660 Expected:4 Got:1
59843 Shift 0, v:1 sv:1 min:2 cur:4
59844 Out of Range Block::1 bo:149 Volume offset:661 Expected:4 Got:1
59845 Shift 0, v:1 sv:1 min:2 cur:4
59846 Out of Range Block::1 bo:150 Volume offset:662 Expected:4 Got:1
59847 Shift 0, v:1 sv:1 min:2 cur:4
59848 Out of Range Block::1 bo:151 Volume offset:663 Expected:4 Got:1
59849 Shift 0, v:1 sv:1 min:2 cur:4
59850 Out of Range Block::1 bo:152 Volume offset:664 Expected:4 Got:1
59851 Shift 0, v:1 sv:1 min:2 cur:4
59852 Out of Range Block::1 bo:153 Volume offset:665 Expected:4 Got:1
59853 Shift 0, v:1 sv:1 min:2 cur:4
59854 Out of Range Block::1 bo:154 Volume offset:666 Expected:4 Got:1
59855 Shift 0, v:1 sv:1 min:2 cur:4
59856 Out of Range Block::1 bo:155 Volume offset:667 Expected:4 Got:1
59857 Shift 0, v:1 sv:1 min:2 cur:4
59858 Out of Range Block::1 bo:156 Volume offset:668 Expected:4 Got:1
59859 Shift 0, v:1 sv:1 min:2 cur:4
59860 Out of Range Block::1 bo:157 Volume offset:669 Expected:4 Got:1
59861 Shift 0, v:1 sv:1 min:2 cur:4
59862 Out of Range Block::1 bo:158 Volume offset:670 Expected:4 Got:1
59863 Shift 0, v:1 sv:1 min:2 cur:4
59864 Out of Range Block::1 bo:159 Volume offset:671 Expected:4 Got:1
59865 Shift 0, v:1 sv:1 min:2 cur:4
59866 Out of Range Block::1 bo:160 Volume offset:672 Expected:4 Got:1
59867 Shift 0, v:1 sv:1 min:2 cur:4
59868 Out of Range Block::1 bo:161 Volume offset:673 Expected:4 Got:1
59869 Shift 0, v:1 sv:1 min:2 cur:4
59870 Out of Range Block::1 bo:162 Volume offset:674 Expected:4 Got:1
59871 Shift 0, v:1 sv:1 min:2 cur:4
59872 Out of Range Block::1 bo:163 Volume offset:675 Expected:4 Got:1
59873 Shift 0, v:1 sv:1 min:2 cur:4
59874 Out of Range Block::1 bo:164 Volume offset:676 Expected:4 Got:1
59875 Shift 0, v:1 sv:1 min:2 cur:4
59876 Out of Range Block::1 bo:165 Volume offset:677 Expected:4 Got:1
59877 Shift 0, v:1 sv:1 min:2 cur:4
59878 
59879 Out of Range Block::1 bo:166 Volume offset:678 Expected:4 Got:1
59880 Shift 0, v:1 sv:1 min:2 cur:4
59881 Out of Range Block::1 bo:167 Volume offset:679 Expected:4 Got:1
59882 Shift 0, v:1 sv:1 min:2 cur:4
59883 Out of Range Block::1 bo:168 Volume offset:680 Expected:4 Got:1
59884 Shift 0, v:1 sv:1 min:2 cur:4
59885 Out of Range Block::1 bo:169 Volume offset:681 Expected:4 Got:1
59886 Shift 0, v:1 sv:1 min:2 cur:4
59887 Out of Range Block::1 bo:170 Volume offset:682 Expected:4 Got:1
59888 Shift 0, v:1 sv:1 min:2 cur:4
59889 Out of Range Block::1 bo:171 Volume offset:683 Expected:4 Got:1
59890 Shift 0, v:1 sv:1 min:2 cur:4
59891 Out of Range Block::1 bo:172 Volume offset:684 Expected:4 Got:1
59892 Shift 0, v:1 sv:1 min:2 cur:4
59893 Out of Range Block::1 bo:173 Volume offset:685 Expected:4 Got:1
59894 Shift 0, v:1 sv:1 min:2 cur:4
59895 Out of Range Block::1 bo:174 Volume offset:686 Expected:4 Got:1
59896 Shift 0, v:1 sv:1 min:2 cur:4
59897 Out of Range Block::1 bo:175 Volume offset:687 Expected:4 Got:1
59898 Shift 0, v:1 sv:1 min:2 cur:4
59899 Out of Range Block::1 bo:176 Volume offset:688 Expected:4 Got:1
59900 Shift 0, v:1 sv:1 min:2 cur:4
59901 Out of Range Block::1 bo:177 Volume offset:689 Expected:4 Got:1
59902 Shift 0, v:1 sv:1 min:2 cur:4
59903 Out of Range Block::1 bo:178 Volume offset:690 Expected:4 Got:1
59904 Shift 0, v:1 sv:1 min:2 cur:4
59905 Out of Range Block::1 bo:179 Volume offset:691 Expected:4 Got:1
59906 Shift 0, v:1 sv:1 min:2 cur:4
59907 Out of Range Block::1 bo:180 Volume offset:692 Expected:4 Got:1
59908 Shift 0, v:1 sv:1 min:2 cur:4
59909 Out of Range Block::1 bo:181 Volume offset:693 Expected:4 Got:1
59910 Shift 0, v:1 sv:1 min:2 cur:4
59911 Out of Range Block::1 bo:182 Volume offset:694 Expected:4 Got:1
59912 Shift 0, v:1 sv:1 min:2 cur:4
59913 Out of Range Block::1 bo:183 Volume offset:695 Expected:4 Got:1
59914 Shift 0, v:1 sv:1 min:2 cur:4
59915 Out of Range Block::1 bo:184 Volume offset:696 Expected:4 Got:1
59916 Shift 0, v:1 sv:1 min:2 cur:4
59917 Out of Range Block::1 bo:185 Volume offset:697 Expected:4 Got:1
59918 Shift 0, v:1 sv:1 min:2 cur:4
59919 Out of Range Block::1 bo:186 Volume offset:698 Expected:4 Got:1
59920 Shift 0, v:1 sv:1 min:2 cur:4
59921 Out of Range Block::1 bo:187 Volume offset:699 Expected:4 Got:1
59922 Shift 0, v:1 sv:1 min:2 cur:4
59923 Out of Range Block::1 bo:188 Volume offset:700 Expected:4 Got:1
59924 Shift 0, v:1 sv:1 min:2 cur:4
59925 Out of Range Block::1 bo:189 Volume offset:701 Expected:4 Got:1
59926 Shift 0, v:1 sv:1 min:2 cur:4
59927 Out of Range Block::1 bo:190 Volume offset:702 Expected:4 Got:1
59928 Shift 0, v:1 sv:1 min:2 cur:4
59929 Out of Range Block::1 bo:191 Volume offset:703 Expected:4 Got:1
59930 Shift 0, v:1 sv:1 min:2 cur:4
59931 Out of Range Block::1 bo:192 Volume offset:704 Expected:4 Got:1
59932 Shift 0, v:1 sv:1 min:2 cur:4
59933 Out of Range Block::1 bo:193 Volume offset:705 Expected:4 Got:1
59934 Shift 0, v:1 sv:1 min:2 cur:4
59935 Out of Range Block::1 bo:194 Volume offset:706 Expected:4 Got:1
59936 Shift 0, v:1 sv:1 min:2 cur:4
59937 Out of Range Block::1 bo:195 Volume offset:707 Expected:4 Got:1
59938 Shift 0, v:1 sv:1 min:2 cur:4
59939 Out of Range Block::1 bo:196 Volume offset:708 Expected:4 Got:1
59940 Shift 0, v:1 sv:1 min:2 cur:4
59941 Out of Range Block::1 bo:197 Volume offset:709 Expected:4 Got:1
59942 Shift 0, v:1 sv:1 min:2 cur:4
59943 Out of Range Block::1 bo:198 Volume offset:710 Expected:4 Got:1
59944 Shift 0, v:1 sv:1 min:2 cur:4
59945 Out of Range Block::1 bo:199 Volume offset:711 Expected:4 Got:1
59946 Shift 0, v:1 sv:1 min:2 cur:4
59947 Out of Range Block::1 bo:200 Volume offset:712 Expected:4 Got:1
59948 Shift 0, v:1 sv:1 min:2 cur:4
59949 Out of Range Block::1 bo:201 Volume offset:713 Expected:4 Got:1
59950 Shift 0, v:1 sv:1 min:2 cur:4
59951 Out of Range Block::1 bo:202 Volume offset:714 Expected:4 Got:1
59952 Shift 0, v:1 sv:1 min:2 cur:4
59953 Out of Range Block::1 bo:203 Volume offset:715 Expected:4 Got:1
59954 Shift 0, v:1 sv:1 min:2 cur:4
59955 Out of Range Block::1 bo:204 Volume offset:716 Expected:4 Got:1
59956 Shift 0, v:1 sv:1 min:2 cur:4
59957 Out of Range Block::1 bo:205 Volume offset:717 Expected:4 Got:1
59958 Shift 0, v:1 sv:1 min:2 cur:4
59959 Out of Range Block::1 bo:206 Volume offset:718 Expected:4 Got:1
59960 Shift 0, v:1 sv:1 min:2 cur:4
59961 Out of Range Block::1 bo:207 Volume offset:719 Expected:4 Got:1
59962 Shift 0, v:1 sv:1 min:2 cur:4
59963 Out of Range Block::1 bo:208 Volume offset:720 Expected:4 Got:1
59964 Shift 0, v:1 sv:1 min:2 cur:4
59965 Out of Range Block::1 bo:209 Volume offset:721 Expected:4 Got:1
59966 Shift 0, v:1 sv:1 min:2 cur:4
59967 Out of Range Block::1 bo:210 Volume offset:722 Expected:4 Got:1
59968 Shift 0, v:1 sv:1 min:2 cur:4
59969 Out of Range Block::1 bo:211 Volume offset:723 Expected:4 Got:1
59970 Shift 0, v:1 sv:1 min:2 cur:4
59971 Out of Range Block::1 bo:212 Volume offset:724 Expected:4 Got:1
59972 Shift 0, v:1 sv:1 min:2 cur:4
59973 Out of Range Block::1 bo:213 Volume offset:725 Expected:4 Got:1
59974 Shift 0, v:1 sv:1 min:2 cur:4
59975 Out of Range Block::1 bo:214 Volume offset:726 Expected:4 Got:1
59976 Shift 0, v:1 sv:1 min:2 cur:4
59977 Out of Range Block::1 bo:215 Volume offset:727 Expected:4 Got:1
59978 Shift 0, v:1 sv:1 min:2 cur:4
59979 Out of Range Block::1 bo:216 Volume offset:728 Expected:4 Got:1
59980 Shift 0, v:1 sv:1 min:2 cur:4
59981 Out of Range Block::1 bo:217 Volume offset:729 Expected:4 Got:1
59982 Shift 0, v:1 sv:1 min:2 cur:4
59983 Out of Range Block::1 bo:218 Volume offset:730 Expected:4 Got:1
59984 Shift 0, v:1 sv:1 min:2 cur:4
59985 Out of Range Block::1 bo:219 Volume offset:731 Expected:4 Got:1
59986 Shift 0, v:1 sv:1 min:2 cur:4
59987 Out of Range Block::1 bo:220 Volume offset:732 Expected:4 Got:1
59988 Shift 0, v:1 sv:1 min:2 cur:4
59989 Out of Range Block::1 bo:221 Volume offset:733 Expected:4 Got:1
59990 Shift 0, v:1 sv:1 min:2 cur:4
59991 Out of Range Block::1 bo:222 Volume offset:734 Expected:4 Got:1
59992 Shift 0, v:1 sv:1 min:2 cur:4
59993 Out of Range Block::1 bo:223 Volume offset:735 Expected:4 Got:1
59994 Shift 0, v:1 sv:1 min:2 cur:4
59995 Out of Range Block::1 bo:224 Volume offset:736 Expected:4 Got:1
59996 Shift 0, v:1 sv:1 min:2 cur:4
59997 Out of Range Block::1 bo:225 Volume offset:737 Expected:4 Got:1
59998 Shift 0, v:1 sv:1 min:2 cur:4
59999 Out of Range Block::1 bo:226 Volume offset:738 Expected:4 Got:1
60000 Shift 0, v:1 sv:1 min:2 cur:4
60001 Out of Range Block::1 bo:227 Volume offset:739 Expected:4 Got:1
60002 Shift 0, v:1 sv:1 min:2 cur:4
60003 Out of Range Block::1 bo:228 Volume offset:740 Expected:4 Got:1
60004 Shift 0, v:1 sv:1 min:2 cur:4
60005 Out of Range Block::1 bo:229 Volume offset:741 Expected:4 Got:1
60006 Shift 0, v:1 sv:1 min:2 cur:4
60007 Out of Range Block::1 bo:230 Volume offset:742 Expected:4 Got:1
60008 Shift 0, v:1 sv:1 min:2 cur:4
60009 Out of Range Block::1 bo:231 Volume offset:743 Expected:4 Got:1
60010 Shift 0, v:1 sv:1 min:2 cur:4
60011 Out of Range Block::1 bo:232 Volume offset:744 Expected:4 Got:1
60012 Shift 0, v:1 sv:1 min:2 cur:4
60013 Out of Range Block::1 bo:233 Volume offset:745 Expected:4 Got:1
60014 Shift 0, v:1 sv:1 min:2 cur:4
60015 Out of Range Block::1 bo:234 Volume offset:746 Expected:4 Got:1
60016 Shift 0, v:1 sv:1 min:2 cur:4
60017 Out of Range Block::1 bo:235 Volume offset:747 Expected:4 Got:1
60018 Shift 0, v:1 sv:1 min:2 cur:4
60019 Out of Range Block::1 bo:236 Volume offset:748 Expected:4 Got:1
60020 Shift 0, v:1 sv:1 min:2 cur:4
60021 Out of Range Block::1 bo:237 Volume offset:749 Expected:4 Got:1
60022 test test::test_wl_commit_range_update_rollover_below ... Shift 0, v:1 sv:1 min:2 cur:4
60023 Out of Range Block::1 bo:238 Volume offset:750 Expected:4 Got:1
60024 Shift 0, v:1 sv:1 min:2 cur:4
60025 Out of Range Block::1 bo:239 Volume offset:751 Expected:4 Got:1
60026 Shift 0, v:1 sv:1 min:2 cur:4
60027 Out of Range Block::1 bo:240 Volume offset:752 Expected:4 Got:1
60028 Shift 0, v:1 sv:1 min:2 cur:4
60029 Out of Range Block::1 bo:241 Volume offset:753 Expected:4 Got:1
60030 Shift 0, v:1 sv:1 min:2 cur:4
60031 Out of Range Block::1 bo:242 Volume offset:754 Expected:4 Got:1
60032 Shift 0, v:1 sv:1 min:2 cur:4
60033 Out of Range Block::1 bo:243 Volume offset:755 Expected:4 Got:1
60034 Shift 0, v:1 sv:1 min:2 cur:4
60035 Out of Range Block::1 bo:244 Volume offset:756 Expected:4 Got:1
60036 Shift 0, v:1 sv:1 min:2 cur:4
60037 Out of Range Block::1 bo:245 Volume offset:757 Expected:4 Got:1
60038 Shift 0, v:1 sv:1 min:2 cur:4
60039 Out of Range Block::1 bo:246 Volume offset:758 Expected:4 Got:1
60040 Shift 0, v:1 sv:1 min:2 cur:4
60041 Out of Range Block::1 bo:247 Volume offset:759 Expected:4 Got:1
60042 Shift 0, v:1 sv:1 min:2 cur:4
60043 Out of Range Block::1 bo:248 Volume offset:760 Expected:4 Got:1
60044 Shift 0, v:1 sv:1 min:2 cur:4
60045 Out of Range Block::1 bo:249 Volume offset:761 Expected:4 Got:1
60046 Shift 0, v:1 sv:1 min:2 cur:4
60047 Out of Range Block::1 bo:250 Volume offset:762 Expected:4 Got:1
60048 Shift 0, v:1 sv:1 min:2 cur:4
60049 Out of Range Block::1 bo:251 Volume offset:763 Expected:4 Got:1
60050 Shift 0, v:1 sv:1 min:2 cur:4
60051 Out of Range Block::1 bo:252 Volume offset:764 Expected:4 Got:1
60052 Shift 0, v:1 sv:1 min:2 cur:4
60053 Out of Range Block::1 bo:253 Volume offset:765 Expected:4 Got:1
60054 Shift 0, v:1 sv:1 min:2 cur:4
60055 Out of Range Block::1 bo:254 Volume offset:766 Expected:4 Got:1
60056 okShift 0, v:1 sv:1 min:2 cur:4
60057 Out of Range Block::1 bo:255 Volume offset:767 Expected:4 Got:1
60058 Shift 0, v:1 sv:1 min:2 cur:4
60059 Out of Range Block::1 bo:256 Volume offset:768 Expected:4 Got:1
60060 Shift 0, v:1 sv:1 min:2 cur:4
60061 Out of Range Block::1 bo:257 Volume offset:769 Expected:4 Got:1
60062 Shift 0, v:1 sv:1 min:2 cur:4
60063 Out of Range Block::1 bo:258 Volume offset:770 Expected:4 Got:1
60064 Shift 0, v:1 sv:1 min:2 cur:4
60065 Out of Range Block::1 bo:259 Volume offset:771 Expected:4 Got:1
60066 Shift 0, v:1 sv:1 min:2 cur:4
60067 Out of Range Block::1 bo:260 Volume offset:772 Expected:4 Got:1
60068 Shift 0, v:1 sv:1 min:2 cur:4
60069 Out of Range Block::1 bo:261 Volume offset:773 Expected:4 Got:1
60070 Shift 0, v:1 sv:1 min:2 cur:4
60071 Out of Range Block::1 bo:262 Volume offset:774 Expected:4 Got:1
60072 Shift 0, v:1 sv:1 min:2 cur:4
60073 Out of Range Block::1 bo:263 Volume offset:775 Expected:4 Got:1
60074 Shift 0, v:1 sv:1 min:2 cur:4
60075 Out of Range Block::1 bo:264 Volume offset:776 Expected:4 Got:1
60076 Shift 0, v:1 sv:1 min:2 cur:4
60077 Out of Range Block::1 bo:265 Volume offset:777 Expected:4 Got:1
60078 Shift 0, v:1 sv:1 min:2 cur:4
60079 Out of Range Block::1 bo:266 Volume offset:778 Expected:4 Got:1
60080 Shift 0, v:1 sv:1 min:2 cur:4
60081 Out of Range Block::1 bo:267 Volume offset:779 Expected:4 Got:1
60082 Shift 0, v:1 sv:1 min:2 cur:4
60083 Out of Range Block::1 bo:268 Volume offset:780 Expected:4 Got:1
60084 Shift 0, v:1 sv:1 min:2 cur:4
60085 Out of Range Block::1 bo:269 Volume offset:781 Expected:4 Got:1
60086 Shift 0, v:1 sv:1 min:2 cur:4
60087 Out of Range Block::1 bo:270 Volume offset:782 Expected:4 Got:1
60088 Shift 0, v:1 sv:1 min:2 cur:4
60089 Out of Range Block::1 bo:271 Volume offset:783 Expected:4 Got:1
60090 Shift 0, v:1 sv:1 min:2 cur:4
60091 Out of Range Block::1 bo:272 Volume offset:784 Expected:4 Got:1
60092 Shift 0, v:1 sv:1 min:2 cur:4
60093 Out of Range Block::1 bo:273 Volume offset:785 Expected:4 Got:1
60094 Shift 0, v:1 sv:1 min:2 cur:4
60095 Out of Range Block::1 bo:274 Volume offset:786 Expected:4 Got:1
60096 Shift 0, v:1 sv:1 min:2 cur:4
60097 Out of Range Block::1 bo:275 Volume offset:787 Expected:4 Got:1
60098 Shift 0, v:1 sv:1 min:2 cur:4
60099 Out of Range Block::1 bo:276 Volume offset:788 Expected:4 Got:1
60100 Shift 0, v:1 sv:1 min:2 cur:4
60101 Out of Range Block::1 bo:277 Volume offset:789 Expected:4 Got:1
60102 Shift 0, v:1 sv:1 min:2 cur:4
60103 Out of Range Block::1 bo:278 Volume offset:790 Expected:4 Got:1
60104 Shift 0, v:1 sv:1 min:2 cur:4
60105 Out of Range Block::1 bo:279 Volume offset:791 Expected:4 Got:1
60106 Shift 0, v:1 sv:1 min:2 cur:4
60107 Out of Range Block::1 bo:280 Volume offset:792 Expected:4 Got:1
60108 Shift 0, v:1 sv:1 min:2 cur:4
60109 Out of Range Block::1 bo:281 Volume offset:793 Expected:4 Got:1
60110 Shift 0, v:1 sv:1 min:2 cur:4
60111 Out of Range Block::1 bo:282 Volume offset:794 Expected:4 Got:1
60112 Shift 0, v:1 sv:1 min:2 cur:4
60113 Out of Range Block::1 bo:283 Volume offset:795 Expected:4 Got:1
60114 Shift 0, v:1 sv:1 min:2 cur:4
60115 Out of Range Block::1 bo:284 Volume offset:796 Expected:4 Got:1
60116 Shift 0, v:1 sv:1 min:2 cur:4
60117 Out of Range Block::1 bo:285 Volume offset:797 Expected:4 Got:1
60118 
60119 Shift 0, v:1 sv:1 min:2 cur:4
60120 Out of Range Block::1 bo:286 Volume offset:798 Expected:4 Got:1
60121 Shift 0, v:1 sv:1 min:2 cur:4
60122 Out of Range Block::1 bo:287 Volume offset:799 Expected:4 Got:1
60123 Shift 0, v:1 sv:1 min:2 cur:4
60124 Out of Range Block::1 bo:288 Volume offset:800 Expected:4 Got:1
60125 Shift 0, v:1 sv:1 min:2 cur:4
60126 Out of Range Block::1 bo:289 Volume offset:801 Expected:4 Got:1
60127 Shift 0, v:1 sv:1 min:2 cur:4
60128 Out of Range Block::1 bo:290 Volume offset:802 Expected:4 Got:1
60129 Shift 0, v:1 sv:1 min:2 cur:4
60130 Out of Range Block::1 bo:291 Volume offset:803 Expected:4 Got:1
60131 Shift 0, v:1 sv:1 min:2 cur:4
60132 Out of Range Block::1 bo:292 Volume offset:804 Expected:4 Got:1
60133 Shift 0, v:1 sv:1 min:2 cur:4
60134 Out of Range Block::1 bo:293 Volume offset:805 Expected:4 Got:1
60135 Shift 0, v:1 sv:1 min:2 cur:4
60136 Out of Range Block::1 bo:294 Volume offset:806 Expected:4 Got:1
60137 Shift 0, v:1 sv:1 min:2 cur:4
60138 Out of Range Block::1 bo:295 Volume offset:807 Expected:4 Got:1
60139 Shift 0, v:1 sv:1 min:2 cur:4
60140 Out of Range Block::1 bo:296 Volume offset:808 Expected:4 Got:1
60141 Shift 0, v:1 sv:1 min:2 cur:4
60142 Out of Range Block::1 bo:297 Volume offset:809 Expected:4 Got:1
60143 Shift 0, v:1 sv:1 min:2 cur:4
60144 Out of Range Block::1 bo:298 Volume offset:810 Expected:4 Got:1
60145 Shift 0, v:1 sv:1 min:2 cur:4
60146 Out of Range Block::1 bo:299 Volume offset:811 Expected:4 Got:1
60147 Shift 0, v:1 sv:1 min:2 cur:4
60148 Out of Range Block::1 bo:300 Volume offset:812 Expected:4 Got:1
60149 Shift 0, v:1 sv:1 min:2 cur:4
60150 Out of Range Block::1 bo:301 Volume offset:813 Expected:4 Got:1
60151 Shift 0, v:1 sv:1 min:2 cur:4
60152 Out of Range Block::1 bo:302 Volume offset:814 Expected:4 Got:1
60153 Shift 0, v:1 sv:1 min:2 cur:4
60154 Out of Range Block::1 bo:303 Volume offset:815 Expected:4 Got:1
60155 Shift 0, v:1 sv:1 min:2 cur:4
60156 Out of Range Block::1 bo:304 Volume offset:816 Expected:4 Got:1
60157 Shift 0, v:1 sv:1 min:2 cur:4
60158 Out of Range Block::1 bo:305 Volume offset:817 Expected:4 Got:1
60159 Shift 0, v:1 sv:1 min:2 cur:4
60160 Out of Range Block::1 bo:306 Volume offset:818 Expected:4 Got:1
60161 Shift 0, v:1 sv:1 min:2 cur:4
60162 Out of Range Block::1 bo:307 Volume offset:819 Expected:4 Got:1
60163 Shift 0, v:1 sv:1 min:2 cur:4
60164 Out of Range Block::1 bo:308 Volume offset:820 Expected:4 Got:1
60165 Shift 0, v:1 sv:1 min:2 cur:4
60166 Out of Range Block::1 bo:309 Volume offset:821 Expected:4 Got:1
60167 Shift 0, v:1 sv:1 min:2 cur:4
60168 Out of Range Block::1 bo:310 Volume offset:822 Expected:4 Got:1
60169 Shift 0, v:1 sv:1 min:2 cur:4
60170 Out of Range Block::1 bo:311 Volume offset:823 Expected:4 Got:1
60171 Shift 0, v:1 sv:1 min:2 cur:4
60172 Out of Range Block::1 bo:312 Volume offset:824 Expected:4 Got:1
60173 Shift 0, v:1 sv:1 min:2 cur:4
60174 Out of Range Block::1 bo:313 Volume offset:825 Expected:4 Got:1
60175 Shift 0, v:1 sv:1 min:2 cur:4
60176 Out of Range Block::1 bo:314 Volume offset:826 Expected:4 Got:1
60177 Shift 0, v:1 sv:1 min:2 cur:4
60178 Out of Range Block::1 bo:315 Volume offset:827 Expected:4 Got:1
60179 Shift 0, v:1 sv:1 min:2 cur:4
60180 Out of Range Block::1 bo:316 Volume offset:828 Expected:4 Got:1
60181 Shift 0, v:1 sv:1 min:2 cur:4
60182 Out of Range Block::1 bo:317 Volume offset:829 Expected:4 Got:1
60183 Shift 0, v:1 sv:1 min:2 cur:4
60184 Out of Range Block::1 bo:318 Volume offset:830 Expected:4 Got:1
60185 Shift 0, v:1 sv:1 min:2 cur:4
60186 Out of Range Block::1 bo:319 Volume offset:831 Expected:4 Got:1
60187 Shift 0, v:1 sv:1 min:2 cur:4
60188 Out of Range Block::1 bo:320 Volume offset:832 Expected:4 Got:1
60189 Shift 0, v:1 sv:1 min:2 cur:4
60190 Out of Range Block::1 bo:321 Volume offset:833 Expected:4 Got:1
60191 Shift 0, v:1 sv:1 min:2 cur:4
60192 Out of Range Block::1 bo:322 Volume offset:834 Expected:4 Got:1
60193 Shift 0, v:1 sv:1 min:2 cur:4
60194 Out of Range Block::1 bo:323 Volume offset:835 Expected:4 Got:1
60195 Shift 0, v:1 sv:1 min:2 cur:4
60196 Out of Range Block::1 bo:324 Volume offset:836 Expected:4 Got:1
60197 Shift 0, v:1 sv:1 min:2 cur:4
60198 Out of Range Block::1 bo:325 Volume offset:837 Expected:4 Got:1
60199 Shift 0, v:1 sv:1 min:2 cur:4
60200 Out of Range Block::1 bo:326 Volume offset:838 Expected:4 Got:1
60201 Shift 0, v:1 sv:1 min:2 cur:4
60202 Out of Range Block::1 bo:327 Volume offset:839 Expected:4 Got:1
60203 Shift 0, v:1 sv:1 min:2 cur:4
60204 Out of Range Block::1 bo:328 Volume offset:840 Expected:4 Got:1
60205 Shift 0, v:1 sv:1 min:2 cur:4
60206 Out of Range Block::1 bo:329 Volume offset:841 Expected:4 Got:1
60207 Shift 0, v:1 sv:1 min:2 cur:4
60208 Out of Range Block::1 bo:330 Volume offset:842 Expected:4 Got:1
60209 Shift 0, v:1 sv:1 min:2 cur:4
60210 Out of Range Block::1 bo:331 Volume offset:843 Expected:4 Got:1
60211 Shift 0, v:1 sv:1 min:2 cur:4
60212 Out of Range Block::1 bo:332 Volume offset:844 Expected:4 Got:1
60213 Shift 0, v:1 sv:1 min:2 cur:4
60214 Out of Range Block::1 bo:333 Volume offset:845 Expected:4 Got:1
60215 Shift 0, v:1 sv:1 min:2 cur:4
60216 Out of Range Block::1 bo:334 Volume offset:846 Expected:4 Got:1
60217 Shift 0, v:1 sv:1 min:2 cur:4
60218 Out of Range Block::1 bo:335 Volume offset:847 Expected:4 Got:1
60219 Shift 0, v:1 sv:1 min:2 cur:4
60220 Out of Range Block::1 bo:336 Volume offset:848 Expected:4 Got:1
60221 Shift 0, v:1 sv:1 min:2 cur:4
60222 Out of Range Block::1 bo:337 Volume offset:849 Expected:4 Got:1
60223 Shift 0, v:1 sv:1 min:2 cur:4
60224 Out of Range Block::1 bo:338 Volume offset:850 Expected:4 Got:1
60225 Shift 0, v:1 sv:1 min:2 cur:4
60226 Out of Range Block::1 bo:339 Volume offset:851 Expected:4 Got:1
60227 Shift 0, v:1 sv:1 min:2 cur:4
60228 Out of Range Block::1 bo:340 Volume offset:852 Expected:4 Got:1
60229 Shift 0, v:1 sv:1 min:2 cur:4
60230 Out of Range Block::1 bo:341 Volume offset:853 Expected:4 Got:1
60231 Shift 0, v:1 sv:1 min:2 cur:4
60232 Out of Range Block::1 bo:342 Volume offset:854 Expected:4 Got:1
60233 Shift 0, v:1 sv:1 min:2 cur:4
60234 Out of Range Block::1 bo:343 Volume offset:855 Expected:4 Got:1
60235 Shift 0, v:1 sv:1 min:2 cur:4
60236 Out of Range Block::1 bo:344 Volume offset:856 Expected:4 Got:1
60237 Shift 0, v:1 sv:1 min:2 cur:4
60238 Out of Range Block::1 bo:345 Volume offset:857 Expected:4 Got:1
60239 Shift 0, v:1 sv:1 min:2 cur:4
60240 Out of Range Block::1 bo:346 Volume offset:858 Expected:4 Got:1
60241 Shift 0, v:1 sv:1 min:2 cur:4
60242 Out of Range Block::1 bo:347 Volume offset:859 Expected:4 Got:1
60243 Shift 0, v:1 sv:1 min:2 cur:4
60244 Out of Range Block::1 bo:348 Volume offset:860 Expected:4 Got:1
60245 Shift 0, v:1 sv:1 min:2 cur:4
60246 Out of Range Block::1 bo:349 Volume offset:861 Expected:4 Got:1
60247 Shift 0, v:1 sv:1 min:2 cur:4
60248 Out of Range Block::1 bo:350 Volume offset:862 Expected:4 Got:1
60249 Shift 0, v:1 sv:1 min:2 cur:4
60250 Out of Range Block::1 bo:351 Volume offset:863 Expected:4 Got:1
60251 Shift 0, v:1 sv:1 min:2 cur:4
60252 Out of Range Block::1 bo:352 Volume offset:864 Expected:4 Got:1
60253 Shift 0, v:1 sv:1 min:2 cur:4
60254 Out of Range Block::1 bo:353 Volume offset:865 Expected:4 Got:1
60255 Shift 0, v:1 sv:1 min:2 cur:4
60256 Out of Range Block::1 bo:354 Volume offset:866 Expected:4 Got:1
60257 Shift 0, v:1 sv:1 min:2 cur:4
60258 Out of Range Block::1 bo:355 Volume offset:867 Expected:4 Got:1
60259 Shift 0, v:1 sv:1 min:2 cur:4
60260 Out of Range Block::1 bo:356 Volume offset:868 Expected:4 Got:1
60261 Shift 0, v:1 sv:1 min:2 cur:4
60262 Out of Range Block::1 bo:357 Volume offset:869 Expected:4 Got:1
60263 Shift 0, v:1 sv:1 min:2 cur:4
60264 Out of Range Block::1 bo:358 Volume offset:870 Expected:4 Got:1
60265 Shift 0, v:1 sv:1 min:2 cur:4
60266 Out of Range Block::1 bo:359 Volume offset:871 Expected:4 Got:1
60267 Shift 0, v:1 sv:1 min:2 cur:4
60268 test test::test_wl_commit_range_update_rollover_above ... Out of Range Block::1 bo:360 Volume offset:872 Expected:4 Got:1
60269 Shift 0, v:1 sv:1 min:2 cur:4
60270 Out of Range Block::1 bo:361 Volume offset:873 Expected:4 Got:1
60271 Shift 0, v:1 sv:1 min:2 cur:4
60272 Out of Range Block::1 bo:362 Volume offset:874 Expected:4 Got:1
60273 Shift 0, v:1 sv:1 min:2 cur:4
60274 Out of Range Block::1 bo:363 Volume offset:875 Expected:4 Got:1
60275 Shift 0, v:1 sv:1 min:2 cur:4
60276 Out of Range Block::1 bo:364 Volume offset:876 Expected:4 Got:1
60277 Shift 0, v:1 sv:1 min:2 cur:4
60278 Out of Range Block::1 bo:365 Volume offset:877 Expected:4 Got:1
60279 Shift 0, v:1 sv:1 min:2 cur:4
60280 Out of Range Block::1 bo:366 Volume offset:878 Expected:4 Got:1
60281 Shift 0, v:1 sv:1 min:2 cur:4
60282 Out of Range Block::1 bo:367 Volume offset:879 Expected:4 Got:1
60283 Shift 0, v:1 sv:1 min:2 cur:4
60284 Out of Range Block::1 bo:368 Volume offset:880 Expected:4 Got:1
60285 Shift 0, v:1 sv:1 min:2 cur:4
60286 Out of Range Block::1 bo:369 Volume offset:881 Expected:4 Got:1
60287 Shift 0, v:1 sv:1 min:2 cur:4
60288 Out of Range Block::1 bo:370 Volume offset:882 Expected:4 Got:1
60289 Shift 0, v:1 sv:1 min:2 cur:4
60290 Out of Range Block::1 bo:371 Volume offset:883 Expected:4 Got:1
60291 Shift 0, v:1 sv:1 min:2 cur:4
60292 Out of Range Block::1 bo:372 Volume offset:884 Expected:4 Got:1
60293 Shift 0, v:1 sv:1 min:2 cur:4
60294 Out of Range Block::1 bo:373 Volume offset:885 Expected:4 Got:1
60295 Shift 0, v:1 sv:1 min:2 cur:4
60296 Out of Range Block::1 bo:374 Volume offset:886 Expected:4 Got:1
60297 Shift 0, v:1 sv:1 min:2 cur:4
60298 Out of Range Block::1 bo:375 Volume offset:887 Expected:4 Got:1
60299 Shift 0, v:1 sv:1 min:2 cur:4
60300 Out of Range Block::1 bo:376 Volume offset:888 Expected:4 Got:1
60301 Shift 0, v:1 sv:1 min:2 cur:4
60302 Out of Range Block::1 bo:377 Volume offset:889 Expected:4 Got:1
60303 Shift 0, v:1 sv:1 min:2 cur:4
60304 Out of Range Block::1 bo:378 Volume offset:890 Expected:4 Got:1
60305 Shift 0, v:1 sv:1 min:2 cur:4
60306 Out of Range Block::1 bo:379 Volume offset:891 Expected:4 Got:1
60307 Shift 0, v:1 sv:1 min:2 cur:4
60308 Out of Range Block::1 bo:380 Volume offset:892 Expected:4 Got:1
60309 Shift 0, v:1 sv:1 min:2 cur:4
60310 Out of Range Block::1 bo:381 Volume offset:893 Expected:4 Got:1
60311 Shift 0, v:1 sv:1 min:2 cur:4
60312 Out of Range Block::1 bo:382 Volume offset:894 Expected:4 Got:1
60313 Shift 0, v:1 sv:1 min:2 cur:4
60314 Out of Range Block::1 bo:383 Volume offset:895 Expected:4 Got:1
60315 Shift 0, v:1 sv:1 min:2 cur:4
60316 Out of Range Block::1 bo:384 Volume offset:896 Expected:4 Got:1
60317 Shift 0, v:1 sv:1 min:2 cur:4
60318 Out of Range Block::1 bo:385 Volume offset:897 Expected:4 Got:1
60319 Shift 0, v:1 sv:1 min:2 cur:4
60320 Out of Range Block::1 bo:386 Volume offset:898 Expected:4 Got:1
60321 Shift 0, v:1 sv:1 min:2 cur:4
60322 Out of Range Block::1 bo:387 Volume offset:899 Expected:4 Got:1
60323 Shift 0, v:1 sv:1 min:2 cur:4
60324 Out of Range Block::1 bo:388 Volume offset:900 Expected:4 Got:1
60325 Shift 0, v:1 sv:1 min:2 cur:4
60326 Out of Range Block::1 bo:389 Volume offset:901 Expected:4 Got:1
60327 Shift 0, v:1 sv:1 min:2 cur:4
60328 okOut of Range Block::1 bo:390 Volume offset:902 Expected:4 Got:1
60329 Shift 0, v:1 sv:1 min:2 cur:4
60330 Out of Range Block::1 bo:391 Volume offset:903 Expected:4 Got:1
60331 Shift 0, v:1 sv:1 min:2 cur:4
60332 Out of Range Block::1 bo:392 Volume offset:904 Expected:4 Got:1
60333 Shift 0, v:1 sv:1 min:2 cur:4
60334 Out of Range Block::1 bo:393 Volume offset:905 Expected:4 Got:1
60335 Shift 0, v:1 sv:1 min:2 cur:4
60336 Out of Range Block::1 bo:394 Volume offset:906 Expected:4 Got:1
60337 Shift 0, v:1 sv:1 min:2 cur:4
60338 Out of Range Block::1 bo:395 Volume offset:907 Expected:4 Got:1
60339 Shift 0, v:1 sv:1 min:2 cur:4
60340 Out of Range Block::1 bo:396 Volume offset:908 Expected:4 Got:1
60341 Shift 0, v:1 sv:1 min:2 cur:4
60342 Out of Range Block::1 bo:397 Volume offset:909 Expected:4 Got:1
60343 Shift 0, v:1 sv:1 min:2 cur:4
60344 Out of Range Block::1 bo:398 Volume offset:910 Expected:4 Got:1
60345 Shift 0, v:1 sv:1 min:2 cur:4
60346 Out of Range Block::1 bo:399 Volume offset:911 Expected:4 Got:1
60347 Shift 0, v:1 sv:1 min:2 cur:4
60348 Out of Range Block::1 bo:400 Volume offset:912 Expected:4 Got:1
60349 Shift 0, v:1 sv:1 min:2 cur:4
60350 Out of Range Block::1 bo:401 Volume offset:913 Expected:4 Got:1
60351 Shift 0, v:1 sv:1 min:2 cur:4
60352 Out of Range Block::1 bo:402 Volume offset:914 Expected:4 Got:1
60353 Shift 0, v:1 sv:1 min:2 cur:4
60354 Out of Range Block::1 bo:403 Volume offset:915 Expected:4 Got:1
60355 Shift 0, v:1 sv:1 min:2 cur:4
60356 Out of Range Block::1 bo:404 Volume offset:916 Expected:4 Got:1
60357 Shift 0, v:1 sv:1 min:2 cur:4
60358 Out of Range Block::1 bo:405 Volume offset:917 Expected:4 Got:1
60359 Shift 0, v:1 sv:1 min:2 cur:4
60360 Out of Range Block::1 bo:406 Volume offset:918 Expected:4 Got:1
60361 Shift 0, v:1 sv:1 min:2 cur:4
60362 Out of Range Block::1 bo:407 Volume offset:919 Expected:4 Got:1
60363 Shift 0, v:1 sv:1 min:2 cur:4
60364 Out of Range Block::1 bo:408 Volume offset:920 Expected:4 Got:1
60365 Shift 0, v:1 sv:1 min:2 cur:4
60366 Out of Range Block::1 bo:409 Volume offset:921 Expected:4 Got:1
60367 Shift 0, v:1 sv:1 min:2 cur:4
60368 Out of Range Block::1 bo:410 Volume offset:922 Expected:4 Got:1
60369 Shift 0, v:1 sv:1 min:2 cur:4
60370 Out of Range Block::1 bo:411 Volume offset:923 Expected:4 Got:1
60371 Shift 0, v:1 sv:1 min:2 cur:4
60372 Out of Range Block::1 bo:412 Volume offset:924 Expected:4 Got:1
60373 Shift 0, v:1 sv:1 min:2 cur:4
60374 Out of Range Block::1 bo:413 Volume offset:925 Expected:4 Got:1
60375 Shift 0, v:1 sv:1 min:2 cur:4
60376 Out of Range Block::1 bo:414 Volume offset:926 Expected:4 Got:1
60377 Shift 0, v:1 sv:1 min:2 cur:4
60378 Out of Range Block::1 bo:415 Volume offset:927 Expected:4 Got:1
60379 Shift 0, v:1 sv:1 min:2 cur:4
60380 Out of Range Block::1 bo:416 Volume offset:928 Expected:4 Got:1
60381 Shift 0, v:1 sv:1 min:2 cur:4
60382 
60383 Out of Range Block::1 bo:417 Volume offset:929 Expected:4 Got:1
60384 Shift 0, v:1 sv:1 min:2 cur:4
60385 Out of Range Block::1 bo:418 Volume offset:930 Expected:4 Got:1
60386 Shift 0, v:1 sv:1 min:2 cur:4
60387 Out of Range Block::1 bo:419 Volume offset:931 Expected:4 Got:1
60388 Shift 0, v:1 sv:1 min:2 cur:4
60389 Out of Range Block::1 bo:420 Volume offset:932 Expected:4 Got:1
60390 Shift 0, v:1 sv:1 min:2 cur:4
60391 Out of Range Block::1 bo:421 Volume offset:933 Expected:4 Got:1
60392 Shift 0, v:1 sv:1 min:2 cur:4
60393 Out of Range Block::1 bo:422 Volume offset:934 Expected:4 Got:1
60394 Shift 0, v:1 sv:1 min:2 cur:4
60395 Out of Range Block::1 bo:423 Volume offset:935 Expected:4 Got:1
60396 Shift 0, v:1 sv:1 min:2 cur:4
60397 Out of Range Block::1 bo:424 Volume offset:936 Expected:4 Got:1
60398 Shift 0, v:1 sv:1 min:2 cur:4
60399 Out of Range Block::1 bo:425 Volume offset:937 Expected:4 Got:1
60400 Shift 0, v:1 sv:1 min:2 cur:4
60401 Out of Range Block::1 bo:426 Volume offset:938 Expected:4 Got:1
60402 Shift 0, v:1 sv:1 min:2 cur:4
60403 Out of Range Block::1 bo:427 Volume offset:939 Expected:4 Got:1
60404 Shift 0, v:1 sv:1 min:2 cur:4
60405 Out of Range Block::1 bo:428 Volume offset:940 Expected:4 Got:1
60406 Shift 0, v:1 sv:1 min:2 cur:4
60407 Out of Range Block::1 bo:429 Volume offset:941 Expected:4 Got:1
60408 Shift 0, v:1 sv:1 min:2 cur:4
60409 Out of Range Block::1 bo:430 Volume offset:942 Expected:4 Got:1
60410 Shift 0, v:1 sv:1 min:2 cur:4
60411 Out of Range Block::1 bo:431 Volume offset:943 Expected:4 Got:1
60412 Shift 0, v:1 sv:1 min:2 cur:4
60413 Out of Range Block::1 bo:432 Volume offset:944 Expected:4 Got:1
60414 Shift 0, v:1 sv:1 min:2 cur:4
60415 Out of Range Block::1 bo:433 Volume offset:945 Expected:4 Got:1
60416 Shift 0, v:1 sv:1 min:2 cur:4
60417 Out of Range Block::1 bo:434 Volume offset:946 Expected:4 Got:1
60418 Shift 0, v:1 sv:1 min:2 cur:4
60419 Out of Range Block::1 bo:435 Volume offset:947 Expected:4 Got:1
60420 Shift 0, v:1 sv:1 min:2 cur:4
60421 Out of Range Block::1 bo:436 Volume offset:948 Expected:4 Got:1
60422 Shift 0, v:1 sv:1 min:2 cur:4
60423 Out of Range Block::1 bo:437 Volume offset:949 Expected:4 Got:1
60424 Shift 0, v:1 sv:1 min:2 cur:4
60425 Out of Range Block::1 bo:438 Volume offset:950 Expected:4 Got:1
60426 Shift 0, v:1 sv:1 min:2 cur:4
60427 Out of Range Block::1 bo:439 Volume offset:951 Expected:4 Got:1
60428 Shift 0, v:1 sv:1 min:2 cur:4
60429 Out of Range Block::1 bo:440 Volume offset:952 Expected:4 Got:1
60430 Shift 0, v:1 sv:1 min:2 cur:4
60431 Out of Range Block::1 bo:441 Volume offset:953 Expected:4 Got:1
60432 Shift 0, v:1 sv:1 min:2 cur:4
60433 Out of Range Block::1 bo:442 Volume offset:954 Expected:4 Got:1
60434 Shift 0, v:1 sv:1 min:2 cur:4
60435 Out of Range Block::1 bo:443 Volume offset:955 Expected:4 Got:1
60436 Shift 0, v:1 sv:1 min:2 cur:4
60437 Out of Range Block::1 bo:444 Volume offset:956 Expected:4 Got:1
60438 Shift 0, v:1 sv:1 min:2 cur:4
60439 Out of Range Block::1 bo:445 Volume offset:957 Expected:4 Got:1
60440 Shift 0, v:1 sv:1 min:2 cur:4
60441 Out of Range Block::1 bo:446 Volume offset:958 Expected:4 Got:1
60442 Shift 0, v:1 sv:1 min:2 cur:4
60443 Out of Range Block::1 bo:447 Volume offset:959 Expected:4 Got:1
60444 Shift 0, v:1 sv:1 min:2 cur:4
60445 Out of Range Block::1 bo:448 Volume offset:960 Expected:4 Got:1
60446 Shift 0, v:1 sv:1 min:2 cur:4
60447 Out of Range Block::1 bo:449 Volume offset:961 Expected:4 Got:1
60448 Shift 0, v:1 sv:1 min:2 cur:4
60449 Out of Range Block::1 bo:450 Volume offset:962 Expected:4 Got:1
60450 Shift 0, v:1 sv:1 min:2 cur:4
60451 Out of Range Block::1 bo:451 Volume offset:963 Expected:4 Got:1
60452 Shift 0, v:1 sv:1 min:2 cur:4
60453 Out of Range Block::1 bo:452 Volume offset:964 Expected:4 Got:1
60454 Shift 0, v:1 sv:1 min:2 cur:4
60455 Out of Range Block::1 bo:453 Volume offset:965 Expected:4 Got:1
60456 Shift 0, v:1 sv:1 min:2 cur:4
60457 Out of Range Block::1 bo:454 Volume offset:966 Expected:4 Got:1
60458 Shift 0, v:1 sv:1 min:2 cur:4
60459 Out of Range Block::1 bo:455 Volume offset:967 Expected:4 Got:1
60460 Shift 0, v:1 sv:1 min:2 cur:4
60461 Out of Range Block::1 bo:456 Volume offset:968 Expected:4 Got:1
60462 Shift 0, v:1 sv:1 min:2 cur:4
60463 Out of Range Block::1 bo:457 Volume offset:969 Expected:4 Got:1
60464 Shift 0, v:1 sv:1 min:2 cur:4
60465 Out of Range Block::1 bo:458 Volume offset:970 Expected:4 Got:1
60466 Shift 0, v:1 sv:1 min:2 cur:4
60467 Out of Range Block::1 bo:459 Volume offset:971 Expected:4 Got:1
60468 Shift 0, v:1 sv:1 min:2 cur:4
60469 Out of Range Block::1 bo:460 Volume offset:972 Expected:4 Got:1
60470 Shift 0, v:1 sv:1 min:2 cur:4
60471 Out of Range Block::1 bo:461 Volume offset:973 Expected:4 Got:1
60472 Shift 0, v:1 sv:1 min:2 cur:4
60473 Out of Range Block::1 bo:462 Volume offset:974 Expected:4 Got:1
60474 Shift 0, v:1 sv:1 min:2 cur:4
60475 Out of Range Block::1 bo:463 Volume offset:975 Expected:4 Got:1
60476 Shift 0, v:1 sv:1 min:2 cur:4
60477 Out of Range Block::1 bo:464 Volume offset:976 Expected:4 Got:1
60478 Shift 0, v:1 sv:1 min:2 cur:4
60479 Out of Range Block::1 bo:465 Volume offset:977 Expected:4 Got:1
60480 Shift 0, v:1 sv:1 min:2 cur:4
60481 Out of Range Block::1 bo:466 Volume offset:978 Expected:4 Got:1
60482 test test::test_read_compare_large ... Shift 0, v:1 sv:1 min:2 cur:4
60483 Out of Range Block::1 bo:467 Volume offset:979 Expected:4 Got:1
60484 Shift 0, v:1 sv:1 min:2 cur:4
60485 Out of Range Block::1 bo:468 Volume offset:980 Expected:4 Got:1
60486 Shift 0, v:1 sv:1 min:2 cur:4
60487 Out of Range Block::1 bo:469 Volume offset:981 Expected:4 Got:1
60488 Shift 0, v:1 sv:1 min:2 cur:4
60489 Out of Range Block::1 bo:470 Volume offset:982 Expected:4 Got:1
60490 Shift 0, v:1 sv:1 min:2 cur:4
60491 Out of Range Block::1 bo:471 Volume offset:983 Expected:4 Got:1
60492 Shift 0, v:1 sv:1 min:2 cur:4
60493 Out of Range Block::1 bo:472 Volume offset:984 Expected:4 Got:1
60494 Shift 0, v:1 sv:1 min:2 cur:4
60495 Out of Range Block::1 bo:473 Volume offset:985 Expected:4 Got:1
60496 Shift 0, v:1 sv:1 min:2 cur:4
60497 Out of Range Block::1 bo:474 Volume offset:986 Expected:4 Got:1
60498 Shift 0, v:1 sv:1 min:2 cur:4
60499 Out of Range Block::1 bo:475 Volume offset:987 Expected:4 Got:1
60500 Shift 0, v:1 sv:1 min:2 cur:4
60501 Out of Range Block::1 bo:476 Volume offset:988 Expected:4 Got:1
60502 Shift 0, v:1 sv:1 min:2 cur:4
60503 Out of Range Block::1 bo:477 Volume offset:989 Expected:4 Got:1
60504 Shift 0, v:1 sv:1 min:2 cur:4
60505 Out of Range Block::1 bo:478 Volume offset:990 Expected:4 Got:1
60506 Shift 0, v:1 sv:1 min:2 cur:4
60507 Out of Range Block::1 bo:479 Volume offset:991 Expected:4 Got:1
60508 Shift 0, v:1 sv:1 min:2 cur:4
60509 Out of Range Block::1 bo:480 Volume offset:992 Expected:4 Got:1
60510 Shift 0, v:1 sv:1 min:2 cur:4
60511 Out of Range Block::1 bo:481 Volume offset:993 Expected:4 Got:1
60512 Shift 0, v:1 sv:1 min:2 cur:4
60513 Out of Range Block::1 bo:482 Volume offset:994 Expected:4 Got:1
60514 Shift 0, v:1 sv:1 min:2 cur:4
60515 Out of Range Block::1 bo:483 Volume offset:995 Expected:4 Got:1
60516 Shift 0, v:1 sv:1 min:2 cur:4
60517 Out of Range Block::1 bo:484 Volume offset:996 Expected:4 Got:1
60518 Shift 0, v:1 sv:1 min:2 cur:4
60519 Out of Range Block::1 bo:485 Volume offset:997 Expected:4 Got:1
60520 Shift 0, v:1 sv:1 min:2 cur:4
60521 Out of Range Block::1 bo:486 Volume offset:998 Expected:4 Got:1
60522 Shift 0, v:1 sv:1 min:2 cur:4
60523 Out of Range Block::1 bo:487 Volume offset:999 Expected:4 Got:1
60524 Shift 0, v:1 sv:1 min:2 cur:4
60525 Out of Range Block::1 bo:488 Volume offset:1000 Expected:4 Got:1
60526 Shift 0, v:1 sv:1 min:2 cur:4
60527 Out of Range Block::1 bo:489 Volume offset:1001 Expected:4 Got:1
60528 Shift 0, v:1 sv:1 min:2 cur:4
60529 Out of Range Block::1 bo:490 Volume offset:1002 Expected:4 Got:1
60530 Shift 0, v:1 sv:1 min:2 cur:4
60531 Out of Range Block::1 bo:491 Volume offset:1003 Expected:4 Got:1
60532 Shift 0, v:1 sv:1 min:2 cur:4
60533 Out of Range Block::1 bo:492 Volume offset:1004 Expected:4 Got:1
60534 Shift 0, v:1 sv:1 min:2 cur:4
60535 Out of Range Block::1 bo:493 Volume offset:1005 Expected:4 Got:1
60536 Shift 0, v:1 sv:1 min:2 cur:4
60537 Out of Range Block::1 bo:494 Volume offset:1006 Expected:4 Got:1
60538 Shift 0, v:1 sv:1 min:2 cur:4
60539 Out of Range Block::1 bo:495 Volume offset:1007 Expected:4 Got:1
60540 Shift 0, v:1 sv:1 min:2 cur:4
60541 Out of Range Block::1 bo:496 Volume offset:1008 Expected:4 Got:1
60542 Shift 0, v:1 sv:1 min:2 cur:4
60543 Out of Range Block::1 bo:497 Volume offset:1009 Expected:4 Got:1
60544 Shift 0, v:1 sv:1 min:2 cur:4
60545 Out of Range Block::1 bo:498 Volume offset:1010 Expected:4 Got:1
60546 Shift 0, v:1 sv:1 min:2 cur:4
60547 Out of Range Block::1 bo:499 Volume offset:1011 Expected:4 Got:1
60548 Shift 0, v:1 sv:1 min:2 cur:4
60549 Out of Range Block::1 bo:500 Volume offset:1012 Expected:4 Got:1
60550 Shift 0, v:1 sv:1 min:2 cur:4
60551 Out of Range Block::1 bo:501 Volume offset:1013 Expected:4 Got:1
60552 Shift 0, v:1 sv:1 min:2 cur:4
60553 Out of Range Block::1 bo:502 Volume offset:1014 Expected:4 Got:1
60554 Shift 0, v:1 sv:1 min:2 cur:4
60555 Out of Range Block::1 bo:503 Volume offset:1015 Expected:4 Got:1
60556 Shift 0, v:1 sv:1 min:2 cur:4
60557 Out of Range Block::1 bo:504 Volume offset:1016 Expected:4 Got:1
60558 Shift 0, v:1 sv:1 min:2 cur:4
60559 Out of Range Block::1 bo:505 Volume offset:1017 Expected:4 Got:1
60560 Shift 0, v:1 sv:1 min:2 cur:4
60561 okOut of Range Block::1 bo:506 Volume offset:1018 Expected:4 Got:1
60562 Shift 0, v:1 sv:1 min:2 cur:4
60563 Out of Range Block::1 bo:507 Volume offset:1019 Expected:4 Got:1
60564 Shift 0, v:1 sv:1 min:2 cur:4
60565 Out of Range Block::1 bo:508 Volume offset:1020 Expected:4 Got:1
60566 Shift 0, v:1 sv:1 min:2 cur:4
60567 Out of Range Block::1 bo:509 Volume offset:1021 Expected:4 Got:1
60568 Shift 0, v:1 sv:1 min:2 cur:4
60569 Out of Range Block::1 bo:510 Volume offset:1022 Expected:4 Got:1
60570 Shift 0, v:1 sv:1 min:2 cur:4
60571 Out of Range Block::1 bo:511 Volume offset:1023 Expected:4 Got:1
60572 
60573 Shift 0, v:2 sv:2 min:2 cur:4
60574 Update block 1 to 2 (min:2 max:4 res:true)
60575 In Range Block::1 bo:1 Volume offset:513 Expected:4 Got:2
60576 Shift 0, v:2 sv:2 min:2 cur:2
60577 test test::test_wl_empty ... In Range Block::1 bo:2 Volume offset:514 Expected:4 Got:2
60578 Shift 0, v:2 sv:2 min:2 cur:2
60579 In Range Block::1 bo:3 Volume offset:515 Expected:4 Got:2
60580 Shift 0, v:2 sv:2 min:2 cur:2
60581 In Range Block::1 bo:4 Volume offset:516 Expected:4 Got:2
60582 okShift 0, v:2 sv:2 min:2 cur:2
60583 In Range Block::1 bo:5 Volume offset:517 Expected:4 Got:2
60584 Shift 0, v:2 sv:2 min:2 cur:2
60585 In Range Block::1 bo:6 Volume offset:518 Expected:4 Got:2
60586 Shift 0, v:2 sv:2 min:2 cur:2
60587 In Range Block::1 bo:7 Volume offset:519 Expected:4 Got:2
60588 Shift 0, v:2 sv:2 min:2 cur:2
60589 In Range Block::1 bo:8 Volume offset:520 Expected:4 Got:2
60590 Shift 0, v:2 sv:2 min:2 cur:2
60591 In Range Block::1 bo:9 Volume offset:521 Expected:4 Got:2
60592 Shift 0, v:2 sv:2 min:2 cur:2
60593 In Range Block::1 bo:10 Volume offset:522 Expected:4 Got:2
60594 Shift 0, v:2 sv:2 min:2 cur:2
60595 In Range Block::1 bo:11 Volume offset:523 Expected:4 Got:2
60596 Shift 0, v:2 sv:2 min:2 cur:2
60597 In Range Block::1 bo:12 Volume offset:524 Expected:4 Got:2
60598 Shift 0, v:2 sv:2 min:2 cur:2
60599 In Range Block::1 bo:13 Volume offset:525 Expected:4 Got:2
60600 Shift 0, v:2 sv:2 min:2 cur:2
60601 In Range Block::1 bo:14 Volume offset:526 Expected:4 Got:2
60602 Shift 0, v:2 sv:2 min:2 cur:2
60603 In Range Block::1 bo:15 Volume offset:527 Expected:4 Got:2
60604 Shift 0, v:2 sv:2 min:2 cur:2
60605 In Range Block::1 bo:16 Volume offset:528 Expected:4 Got:2
60606 Shift 0, v:2 sv:2 min:2 cur:2
60607 In Range Block::1 bo:17 Volume offset:529 Expected:4 Got:2
60608 Shift 0, v:2 sv:2 min:2 cur:2
60609 In Range Block::1 bo:18 Volume offset:530 Expected:4 Got:2
60610 Shift 0, v:2 sv:2 min:2 cur:2
60611 In Range Block::1 bo:19 Volume offset:531 Expected:4 Got:2
60612 Shift 0, v:2 sv:2 min:2 cur:2
60613 In Range Block::1 bo:20 Volume offset:532 Expected:4 Got:2
60614 Shift 0, v:2 sv:2 min:2 cur:2
60615 In Range Block::1 bo:21 Volume offset:533 Expected:4 Got:2
60616 Shift 0, v:2 sv:2 min:2 cur:2
60617 In Range Block::1 bo:22 Volume offset:534 Expected:4 Got:2
60618 Shift 0, v:2 sv:2 min:2 cur:2
60619 In Range Block::1 bo:23 Volume offset:535 Expected:4 Got:2
60620 Shift 0, v:2 sv:2 min:2 cur:2
60621 In Range Block::1 bo:24 Volume offset:536 Expected:4 Got:2
60622 Shift 0, v:2 sv:2 min:2 cur:2
60623 In Range Block::1 bo:25 Volume offset:537 Expected:4 Got:2
60624 Shift 0, v:2 sv:2 min:2 cur:2
60625 In Range Block::1 bo:26 Volume offset:538 Expected:4 Got:2
60626 Shift 0, v:2 sv:2 min:2 cur:2
60627 In Range Block::1 bo:27 Volume offset:539 Expected:4 Got:2
60628 Shift 0, v:2 sv:2 min:2 cur:2
60629 In Range Block::1 bo:28 Volume offset:540 Expected:4 Got:2
60630 Shift 0, v:2 sv:2 min:2 cur:2
60631 In Range Block::1 bo:29 Volume offset:541 Expected:4 Got:2
60632 Shift 0, v:2 sv:2 min:2 cur:2
60633 In Range Block::1 bo:30 Volume offset:542 Expected:4 Got:2
60634 Shift 0, v:2 sv:2 min:2 cur:2
60635 In Range Block::1 bo:31 Volume offset:543 Expected:4 Got:2
60636 Shift 0, v:2 sv:2 min:2 cur:2
60637 In Range Block::1 bo:32 Volume offset:544 Expected:4 Got:2
60638 Shift 0, v:2 sv:2 min:2 cur:2
60639 In Range Block::1 bo:33 Volume offset:545 Expected:4 Got:2
60640 Shift 0, v:2 sv:2 min:2 cur:2
60641 In Range Block::1 bo:34 Volume offset:546 Expected:4 Got:2
60642 Shift 0, v:2 sv:2 min:2 cur:2
60643 In Range Block::1 bo:35 Volume offset:547 Expected:4 Got:2
60644 Shift 0, v:2 sv:2 min:2 cur:2
60645 In Range Block::1 bo:36 Volume offset:548 Expected:4 Got:2
60646 
60647 Shift 0, v:2 sv:2 min:2 cur:2
60648 In Range Block::1 bo:37 Volume offset:549 Expected:4 Got:2
60649 Shift 0, v:2 sv:2 min:2 cur:2
60650 In Range Block::1 bo:38 Volume offset:550 Expected:4 Got:2
60651 Shift 0, v:2 sv:2 min:2 cur:2
60652 In Range Block::1 bo:39 Volume offset:551 Expected:4 Got:2
60653 Shift 0, v:2 sv:2 min:2 cur:2
60654 In Range Block::1 bo:40 Volume offset:552 Expected:4 Got:2
60655 Shift 0, v:2 sv:2 min:2 cur:2
60656 In Range Block::1 bo:41 Volume offset:553 Expected:4 Got:2
60657 Shift 0, v:2 sv:2 min:2 cur:2
60658 In Range Block::1 bo:42 Volume offset:554 Expected:4 Got:2
60659 Shift 0, v:2 sv:2 min:2 cur:2
60660 In Range Block::1 bo:43 Volume offset:555 Expected:4 Got:2
60661 Shift 0, v:2 sv:2 min:2 cur:2
60662 In Range Block::1 bo:44 Volume offset:556 Expected:4 Got:2
60663 Shift 0, v:2 sv:2 min:2 cur:2
60664 In Range Block::1 bo:45 Volume offset:557 Expected:4 Got:2
60665 Shift 0, v:2 sv:2 min:2 cur:2
60666 In Range Block::1 bo:46 Volume offset:558 Expected:4 Got:2
60667 Shift 0, v:2 sv:2 min:2 cur:2
60668 In Range Block::1 bo:47 Volume offset:559 Expected:4 Got:2
60669 Shift 0, v:2 sv:2 min:2 cur:2
60670 In Range Block::1 bo:48 Volume offset:560 Expected:4 Got:2
60671 Shift 0, v:2 sv:2 min:2 cur:2
60672 In Range Block::1 bo:49 Volume offset:561 Expected:4 Got:2
60673 Shift 0, v:2 sv:2 min:2 cur:2
60674 In Range Block::1 bo:50 Volume offset:562 Expected:4 Got:2
60675 Shift 0, v:2 sv:2 min:2 cur:2
60676 In Range Block::1 bo:51 Volume offset:563 Expected:4 Got:2
60677 Shift 0, v:2 sv:2 min:2 cur:2
60678 In Range Block::1 bo:52 Volume offset:564 Expected:4 Got:2
60679 Shift 0, v:2 sv:2 min:2 cur:2
60680 In Range Block::1 bo:53 Volume offset:565 Expected:4 Got:2
60681 Shift 0, v:2 sv:2 min:2 cur:2
60682 test test::test_wl_is_zero ... In Range Block::1 bo:54 Volume offset:566 Expected:4 Got:2
60683 Shift 0, v:2 sv:2 min:2 cur:2
60684 In Range Block::1 bo:55 Volume offset:567 Expected:4 Got:2
60685 Shift 0, v:2 sv:2 min:2 cur:2
60686 In Range Block::1 bo:56 Volume offset:568 Expected:4 Got:2
60687 Shift 0, v:2 sv:2 min:2 cur:2
60688 okIn Range Block::1 bo:57 Volume offset:569 Expected:4 Got:2
60689 Shift 0, v:2 sv:2 min:2 cur:2
60690 In Range Block::1 bo:58 Volume offset:570 Expected:4 Got:2
60691 Shift 0, v:2 sv:2 min:2 cur:2
60692 In Range Block::1 bo:59 Volume offset:571 Expected:4 Got:2
60693 Shift 0, v:2 sv:2 min:2 cur:2
60694 In Range Block::1 bo:60 Volume offset:572 Expected:4 Got:2
60695 Shift 0, v:2 sv:2 min:2 cur:2
60696 In Range Block::1 bo:61 Volume offset:573 Expected:4 Got:2
60697 Shift 0, v:2 sv:2 min:2 cur:2
60698 In Range Block::1 bo:62 Volume offset:574 Expected:4 Got:2
60699 Shift 0, v:2 sv:2 min:2 cur:2
60700 In Range Block::1 bo:63 Volume offset:575 Expected:4 Got:2
60701 Shift 0, v:2 sv:2 min:2 cur:2
60702 In Range Block::1 bo:64 Volume offset:576 Expected:4 Got:2
60703 Shift 0, v:2 sv:2 min:2 cur:2
60704 In Range Block::1 bo:65 Volume offset:577 Expected:4 Got:2
60705 Shift 0, v:2 sv:2 min:2 cur:2
60706 In Range Block::1 bo:66 Volume offset:578 Expected:4 Got:2
60707 Shift 0, v:2 sv:2 min:2 cur:2
60708 In Range Block::1 bo:67 Volume offset:579 Expected:4 Got:2
60709 Shift 0, v:2 sv:2 min:2 cur:2
60710 In Range Block::1 bo:68 Volume offset:580 Expected:4 Got:2
60711 Shift 0, v:2 sv:2 min:2 cur:2
60712 In Range Block::1 bo:69 Volume offset:581 Expected:4 Got:2
60713 Shift 0, v:2 sv:2 min:2 cur:2
60714 In Range Block::1 bo:70 Volume offset:582 Expected:4 Got:2
60715 Shift 0, v:2 sv:2 min:2 cur:2
60716 In Range Block::1 bo:71 Volume offset:583 Expected:4 Got:2
60717 Shift 0, v:2 sv:2 min:2 cur:2
60718 In Range Block::1 bo:72 Volume offset:584 Expected:4 Got:2
60719 Shift 0, v:2 sv:2 min:2 cur:2
60720 In Range Block::1 bo:73 Volume offset:585 Expected:4 Got:2
60721 Shift 0, v:2 sv:2 min:2 cur:2
60722 In Range Block::1 bo:74 Volume offset:586 Expected:4 Got:2
60723 Shift 0, v:2 sv:2 min:2 cur:2
60724 In Range Block::1 bo:75 Volume offset:587 Expected:4 Got:2
60725 Shift 0, v:2 sv:2 min:2 cur:2
60726 In Range Block::1 bo:76 Volume offset:588 Expected:4 Got:2
60727 Shift 0, v:2 sv:2 min:2 cur:2
60728 In Range Block::1 bo:77 Volume offset:589 Expected:4 Got:2
60729 Shift 0, v:2 sv:2 min:2 cur:2
60730 In Range Block::1 bo:78 Volume offset:590 Expected:4 Got:2
60731 Shift 0, v:2 sv:2 min:2 cur:2
60732 In Range Block::1 bo:79 Volume offset:591 Expected:4 Got:2
60733 Shift 0, v:2 sv:2 min:2 cur:2
60734 In Range Block::1 bo:80 Volume offset:592 Expected:4 Got:2
60735 Shift 0, v:2 sv:2 min:2 cur:2
60736 In Range Block::1 bo:81 Volume offset:593 Expected:4 Got:2
60737 Shift 0, v:2 sv:2 min:2 cur:2
60738 In Range Block::1 bo:82 Volume offset:594 Expected:4 Got:2
60739 Shift 0, v:2 sv:2 min:2 cur:2
60740 In Range Block::1 bo:83 Volume offset:595 Expected:4 Got:2
60741 Shift 0, v:2 sv:2 min:2 cur:2
60742 In Range Block::1 bo:84 Volume offset:596 Expected:4 Got:2
60743 Shift 0, v:2 sv:2 min:2 cur:2
60744 In Range Block::1 bo:85 Volume offset:597 Expected:4 Got:2
60745 Shift 0, v:2 sv:2 min:2 cur:2
60746 In Range Block::1 bo:86 Volume offset:598 Expected:4 Got:2
60747 Shift 0, v:2 sv:2 min:2 cur:2
60748 In Range Block::1 bo:87 Volume offset:599 Expected:4 Got:2
60749 Shift 0, v:2 sv:2 min:2 cur:2
60750 In Range Block::1 bo:88 Volume offset:600 Expected:4 Got:2
60751 Shift 0, v:2 sv:2 min:2 cur:2
60752 In Range Block::1 bo:89 Volume offset:601 Expected:4 Got:2
60753 Shift 0, v:2 sv:2 min:2 cur:2
60754 In Range Block::1 bo:90 Volume offset:602 Expected:4 Got:2
60755 Shift 0, v:2 sv:2 min:2 cur:2
60756 In Range Block::1 bo:91 Volume offset:603 Expected:4 Got:2
60757 Shift 0, v:2 sv:2 min:2 cur:2
60758 In Range Block::1 bo:92 Volume offset:604 Expected:4 Got:2
60759 Shift 0, v:2 sv:2 min:2 cur:2
60760 In Range Block::1 bo:93 Volume offset:605 Expected:4 Got:2
60761 Shift 0, v:2 sv:2 min:2 cur:2
60762 In Range Block::1 bo:94 Volume offset:606 Expected:4 Got:2
60763 Shift 0, v:2 sv:2 min:2 cur:2
60764 In Range Block::1 bo:95 Volume offset:607 Expected:4 Got:2
60765 Shift 0, v:2 sv:2 min:2 cur:2
60766 In Range Block::1 bo:96 Volume offset:608 Expected:4 Got:2
60767 Shift 0, v:2 sv:2 min:2 cur:2
60768 In Range Block::1 bo:97 Volume offset:609 Expected:4 Got:2
60769 Shift 0, v:2 sv:2 min:2 cur:2
60770 
60771 In Range Block::1 bo:98 Volume offset:610 Expected:4 Got:2
60772 Shift 0, v:2 sv:2 min:2 cur:2
60773 In Range Block::1 bo:99 Volume offset:611 Expected:4 Got:2
60774 Shift 0, v:2 sv:2 min:2 cur:2
60775 In Range Block::1 bo:100 Volume offset:612 Expected:4 Got:2
60776 Shift 0, v:2 sv:2 min:2 cur:2
60777 In Range Block::1 bo:101 Volume offset:613 Expected:4 Got:2
60778 Shift 0, v:2 sv:2 min:2 cur:2
60779 In Range Block::1 bo:102 Volume offset:614 Expected:4 Got:2
60780 Shift 0, v:2 sv:2 min:2 cur:2
60781 In Range Block::1 bo:103 Volume offset:615 Expected:4 Got:2
60782 Shift 0, v:2 sv:2 min:2 cur:2
60783 In Range Block::1 bo:104 Volume offset:616 Expected:4 Got:2
60784 Shift 0, v:2 sv:2 min:2 cur:2
60785 In Range Block::1 bo:105 Volume offset:617 Expected:4 Got:2
60786 Shift 0, v:2 sv:2 min:2 cur:2
60787 In Range Block::1 bo:106 Volume offset:618 Expected:4 Got:2
60788 Shift 0, v:2 sv:2 min:2 cur:2
60789 test test::test_wl_set ... In Range Block::1 bo:107 Volume offset:619 Expected:4 Got:2
60790 Shift 0, v:2 sv:2 min:2 cur:2
60791 In Range Block::1 bo:108 Volume offset:620 Expected:4 Got:2
60792 Shift 0, v:2 sv:2 min:2 cur:2
60793 In Range Block::1 bo:109 Volume offset:621 Expected:4 Got:2
60794 Shift 0, v:2 sv:2 min:2 cur:2
60795 In Range Block::1 bo:110 Volume offset:622 Expected:4 Got:2
60796 Shift 0, v:2 sv:2 min:2 cur:2
60797 In Range Block::1 bo:111 Volume offset:623 Expected:4 Got:2
60798 Shift 0, v:2 sv:2 min:2 cur:2
60799 In Range Block::1 bo:112 Volume offset:624 Expected:4 Got:2
60800 Shift 0, v:2 sv:2 min:2 cur:2
60801 In Range Block::1 bo:113 Volume offset:625 Expected:4 Got:2
60802 Shift 0, v:2 sv:2 min:2 cur:2
60803 In Range Block::1 bo:114 Volume offset:626 Expected:4 Got:2
60804 Shift 0, v:2 sv:2 min:2 cur:2
60805 In Range Block::1 bo:115 Volume offset:627 Expected:4 Got:2
60806 Shift 0, v:2 sv:2 min:2 cur:2
60807 In Range Block::1 bo:116 Volume offset:628 Expected:4 Got:2
60808 Shift 0, v:2 sv:2 min:2 cur:2
60809 In Range Block::1 bo:117 Volume offset:629 Expected:4 Got:2
60810 Shift 0, v:2 sv:2 min:2 cur:2
60811 In Range Block::1 bo:118 Volume offset:630 Expected:4 Got:2
60812 okShift 0, v:2 sv:2 min:2 cur:2
60813 In Range Block::1 bo:119 Volume offset:631 Expected:4 Got:2
60814 Shift 0, v:2 sv:2 min:2 cur:2
60815 In Range Block::1 bo:120 Volume offset:632 Expected:4 Got:2
60816 Shift 0, v:2 sv:2 min:2 cur:2
60817 In Range Block::1 bo:121 Volume offset:633 Expected:4 Got:2
60818 Shift 0, v:2 sv:2 min:2 cur:2
60819 In Range Block::1 bo:122 Volume offset:634 Expected:4 Got:2
60820 Shift 0, v:2 sv:2 min:2 cur:2
60821 In Range Block::1 bo:123 Volume offset:635 Expected:4 Got:2
60822 Shift 0, v:2 sv:2 min:2 cur:2
60823 In Range Block::1 bo:124 Volume offset:636 Expected:4 Got:2
60824 Shift 0, v:2 sv:2 min:2 cur:2
60825 In Range Block::1 bo:125 Volume offset:637 Expected:4 Got:2
60826 Shift 0, v:2 sv:2 min:2 cur:2
60827 In Range Block::1 bo:126 Volume offset:638 Expected:4 Got:2
60828 Shift 0, v:2 sv:2 min:2 cur:2
60829 In Range Block::1 bo:127 Volume offset:639 Expected:4 Got:2
60830 Shift 0, v:2 sv:2 min:2 cur:2
60831 In Range Block::1 bo:128 Volume offset:640 Expected:4 Got:2
60832 Shift 0, v:2 sv:2 min:2 cur:2
60833 In Range Block::1 bo:129 Volume offset:641 Expected:4 Got:2
60834 Shift 0, v:2 sv:2 min:2 cur:2
60835 In Range Block::1 bo:130 Volume offset:642 Expected:4 Got:2
60836 Shift 0, v:2 sv:2 min:2 cur:2
60837 In Range Block::1 bo:131 Volume offset:643 Expected:4 Got:2
60838 Shift 0, v:2 sv:2 min:2 cur:2
60839 In Range Block::1 bo:132 Volume offset:644 Expected:4 Got:2
60840 Shift 0, v:2 sv:2 min:2 cur:2
60841 In Range Block::1 bo:133 Volume offset:645 Expected:4 Got:2
60842 Shift 0, v:2 sv:2 min:2 cur:2
60843 In Range Block::1 bo:134 Volume offset:646 Expected:4 Got:2
60844 Shift 0, v:2 sv:2 min:2 cur:2
60845 In Range Block::1 bo:135 Volume offset:647 Expected:4 Got:2
60846 Shift 0, v:2 sv:2 min:2 cur:2
60847 In Range Block::1 bo:136 Volume offset:648 Expected:4 Got:2
60848 Shift 0, v:2 sv:2 min:2 cur:2
60849 In Range Block::1 bo:137 Volume offset:649 Expected:4 Got:2
60850 Shift 0, v:2 sv:2 min:2 cur:2
60851 In Range Block::1 bo:138 Volume offset:650 Expected:4 Got:2
60852 Shift 0, v:2 sv:2 min:2 cur:2
60853 In Range Block::1 bo:139 Volume offset:651 Expected:4 Got:2
60854 Shift 0, v:2 sv:2 min:2 cur:2
60855 In Range Block::1 bo:140 Volume offset:652 Expected:4 Got:2
60856 Shift 0, v:2 sv:2 min:2 cur:2
60857 In Range Block::1 bo:141 Volume offset:653 Expected:4 Got:2
60858 Shift 0, v:2 sv:2 min:2 cur:2
60859 In Range Block::1 bo:142 Volume offset:654 Expected:4 Got:2
60860 Shift 0, v:2 sv:2 min:2 cur:2
60861 In Range Block::1 bo:143 Volume offset:655 Expected:4 Got:2
60862 Shift 0, v:2 sv:2 min:2 cur:2
60863 In Range Block::1 bo:144 Volume offset:656 Expected:4 Got:2
60864 Shift 0, v:2 sv:2 min:2 cur:2
60865 In Range Block::1 bo:145 Volume offset:657 Expected:4 Got:2
60866 Shift 0, v:2 sv:2 min:2 cur:2
60867 In Range Block::1 bo:146 Volume offset:658 Expected:4 Got:2
60868 Shift 0, v:2 sv:2 min:2 cur:2
60869 In Range Block::1 bo:147 Volume offset:659 Expected:4 Got:2
60870 Shift 0, v:2 sv:2 min:2 cur:2
60871 In Range Block::1 bo:148 Volume offset:660 Expected:4 Got:2
60872 Shift 0, v:2 sv:2 min:2 cur:2
60873 In Range Block::1 bo:149 Volume offset:661 Expected:4 Got:2
60874 Shift 0, v:2 sv:2 min:2 cur:2
60875 In Range Block::1 bo:150 Volume offset:662 Expected:4 Got:2
60876 Shift 0, v:2 sv:2 min:2 cur:2
60877 In Range Block::1 bo:151 Volume offset:663 Expected:4 Got:2
60878 Shift 0, v:2 sv:2 min:2 cur:2
60879 In Range Block::1 bo:152 Volume offset:664 Expected:4 Got:2
60880 Shift 0, v:2 sv:2 min:2 cur:2
60881 In Range Block::1 bo:153 Volume offset:665 Expected:4 Got:2
60882 Shift 0, v:2 sv:2 min:2 cur:2
60883 In Range Block::1 bo:154 Volume offset:666 Expected:4 Got:2
60884 Shift 0, v:2 sv:2 min:2 cur:2
60885 In Range Block::1 bo:155 Volume offset:667 Expected:4 Got:2
60886 Shift 0, v:2 sv:2 min:2 cur:2
60887 In Range Block::1 bo:156 Volume offset:668 Expected:4 Got:2
60888 Shift 0, v:2 sv:2 min:2 cur:2
60889 In Range Block::1 bo:157 Volume offset:669 Expected:4 Got:2
60890 Shift 0, v:2 sv:2 min:2 cur:2
60891 In Range Block::1 bo:158 Volume offset:670 Expected:4 Got:2
60892 Shift 0, v:2 sv:2 min:2 cur:2
60893 In Range Block::1 bo:159 Volume offset:671 Expected:4 Got:2
60894 Shift 0, v:2 sv:2 min:2 cur:2
60895 In Range Block::1 bo:160 Volume offset:672 Expected:4 Got:2
60896 Shift 0, v:2 sv:2 min:2 cur:2
60897 In Range Block::1 bo:161 Volume offset:673 Expected:4 Got:2
60898 Shift 0, v:2 sv:2 min:2 cur:2
60899 In Range Block::1 bo:162 Volume offset:674 Expected:4 Got:2
60900 Shift 0, v:2 sv:2 min:2 cur:2
60901 In Range Block::1 bo:163 Volume offset:675 Expected:4 Got:2
60902 Shift 0, v:2 sv:2 min:2 cur:2
60903 In Range Block::1 bo:164 Volume offset:676 Expected:4 Got:2
60904 
60905 Shift 0, v:2 sv:2 min:2 cur:2
60906 In Range Block::1 bo:165 Volume offset:677 Expected:4 Got:2
60907 Shift 0, v:2 sv:2 min:2 cur:2
60908 In Range Block::1 bo:166 Volume offset:678 Expected:4 Got:2
60909 Shift 0, v:2 sv:2 min:2 cur:2
60910 In Range Block::1 bo:167 Volume offset:679 Expected:4 Got:2
60911 Shift 0, v:2 sv:2 min:2 cur:2
60912 In Range Block::1 bo:168 Volume offset:680 Expected:4 Got:2
60913 Shift 0, v:2 sv:2 min:2 cur:2
60914 In Range Block::1 bo:169 Volume offset:681 Expected:4 Got:2
60915 Shift 0, v:2 sv:2 min:2 cur:2
60916 In Range Block::1 bo:170 Volume offset:682 Expected:4 Got:2
60917 Shift 0, v:2 sv:2 min:2 cur:2
60918 In Range Block::1 bo:171 Volume offset:683 Expected:4 Got:2
60919 Shift 0, v:2 sv:2 min:2 cur:2
60920 In Range Block::1 bo:172 Volume offset:684 Expected:4 Got:2
60921 Shift 0, v:2 sv:2 min:2 cur:2
60922 In Range Block::1 bo:173 Volume offset:685 Expected:4 Got:2
60923 test test::test_wl_update ... Shift 0, v:2 sv:2 min:2 cur:2
60924 In Range Block::1 bo:174 Volume offset:686 Expected:4 Got:2
60925 Shift 0, v:2 sv:2 min:2 cur:2
60926 In Range Block::1 bo:175 Volume offset:687 Expected:4 Got:2
60927 Shift 0, v:2 sv:2 min:2 cur:2
60928 In Range Block::1 bo:176 Volume offset:688 Expected:4 Got:2
60929 Shift 0, v:2 sv:2 min:2 cur:2
60930 In Range Block::1 bo:177 Volume offset:689 Expected:4 Got:2
60931 Shift 0, v:2 sv:2 min:2 cur:2
60932 In Range Block::1 bo:178 Volume offset:690 Expected:4 Got:2
60933 Shift 0, v:2 sv:2 min:2 cur:2
60934 In Range Block::1 bo:179 Volume offset:691 Expected:4 Got:2
60935 Shift 0, v:2 sv:2 min:2 cur:2
60936 In Range Block::1 bo:180 Volume offset:692 Expected:4 Got:2
60937 Shift 0, v:2 sv:2 min:2 cur:2
60938 In Range Block::1 bo:181 Volume offset:693 Expected:4 Got:2
60939 Shift 0, v:2 sv:2 min:2 cur:2
60940 In Range Block::1 bo:182 Volume offset:694 Expected:4 Got:2
60941 Shift 0, v:2 sv:2 min:2 cur:2
60942 In Range Block::1 bo:183 Volume offset:695 Expected:4 Got:2
60943 Shift 0, v:2 sv:2 min:2 cur:2
60944 In Range Block::1 bo:184 Volume offset:696 Expected:4 Got:2
60945 Shift 0, v:2 sv:2 min:2 cur:2
60946 In Range Block::1 bo:185 Volume offset:697 Expected:4 Got:2
60947 Shift 0, v:2 sv:2 min:2 cur:2
60948 In Range Block::1 bo:186 Volume offset:698 Expected:4 Got:2
60949 Shift 0, v:2 sv:2 min:2 cur:2
60950 In Range Block::1 bo:187 Volume offset:699 Expected:4 Got:2
60951 Shift 0, v:2 sv:2 min:2 cur:2
60952 In Range Block::1 bo:188 Volume offset:700 Expected:4 Got:2
60953 Shift 0, v:2 sv:2 min:2 cur:2
60954 In Range Block::1 bo:189 Volume offset:701 Expected:4 Got:2
60955 Shift 0, v:2 sv:2 min:2 cur:2
60956 In Range Block::1 bo:190 Volume offset:702 Expected:4 Got:2
60957 Shift 0, v:2 sv:2 min:2 cur:2
60958 In Range Block::1 bo:191 Volume offset:703 Expected:4 Got:2
60959 Shift 0, v:2 sv:2 min:2 cur:2
60960 In Range Block::1 bo:192 Volume offset:704 Expected:4 Got:2
60961 Shift 0, v:2 sv:2 min:2 cur:2
60962 In Range Block::1 bo:193 Volume offset:705 Expected:4 Got:2
60963 Shift 0, v:2 sv:2 min:2 cur:2
60964 In Range Block::1 bo:194 Volume offset:706 Expected:4 Got:2
60965 Shift 0, v:2 sv:2 min:2 cur:2
60966 In Range Block::1 bo:195 Volume offset:707 Expected:4 Got:2
60967 Shift 0, v:2 sv:2 min:2 cur:2
60968 In Range Block::1 bo:196 Volume offset:708 Expected:4 Got:2
60969 Shift 0, v:2 sv:2 min:2 cur:2
60970 In Range Block::1 bo:197 Volume offset:709 Expected:4 Got:2
60971 Shift 0, v:2 sv:2 min:2 cur:2
60972 In Range Block::1 bo:198 Volume offset:710 Expected:4 Got:2
60973 Shift 0, v:2 sv:2 min:2 cur:2
60974 In Range Block::1 bo:199 Volume offset:711 Expected:4 Got:2
60975 Shift 0, v:2 sv:2 min:2 cur:2
60976 In Range Block::1 bo:200 Volume offset:712 Expected:4 Got:2
60977 Shift 0, v:2 sv:2 min:2 cur:2
60978 In Range Block::1 bo:201 Volume offset:713 Expected:4 Got:2
60979 Shift 0, v:2 sv:2 min:2 cur:2
60980 In Range Block::1 bo:202 Volume offset:714 Expected:4 Got:2
60981 Shift 0, v:2 sv:2 min:2 cur:2
60982 In Range Block::1 bo:203 Volume offset:715 Expected:4 Got:2
60983 Shift 0, v:2 sv:2 min:2 cur:2
60984 In Range Block::1 bo:204 Volume offset:716 Expected:4 Got:2
60985 Shift 0, v:2 sv:2 min:2 cur:2
60986 In Range Block::1 bo:205 Volume offset:717 Expected:4 Got:2
60987 Shift 0, v:2 sv:2 min:2 cur:2
60988 In Range Block::1 bo:206 Volume offset:718 Expected:4 Got:2
60989 Shift 0, v:2 sv:2 min:2 cur:2
60990 In Range Block::1 bo:207 Volume offset:719 Expected:4 Got:2
60991 Shift 0, v:2 sv:2 min:2 cur:2
60992 In Range Block::1 bo:208 Volume offset:720 Expected:4 Got:2
60993 Shift 0, v:2 sv:2 min:2 cur:2
60994 In Range Block::1 bo:209 Volume offset:721 Expected:4 Got:2
60995 Shift 0, v:2 sv:2 min:2 cur:2
60996 In Range Block::1 bo:210 Volume offset:722 Expected:4 Got:2
60997 Shift 0, v:2 sv:2 min:2 cur:2
60998 In Range Block::1 bo:211 Volume offset:723 Expected:4 Got:2
60999 Shift 0, v:2 sv:2 min:2 cur:2
61000 In Range Block::1 bo:212 Volume offset:724 Expected:4 Got:2
61001 Shift 0, v:2 sv:2 min:2 cur:2
61002 In Range Block::1 bo:213 Volume offset:725 Expected:4 Got:2
61003 okShift 0, v:2 sv:2 min:2 cur:2
61004 In Range Block::1 bo:214 Volume offset:726 Expected:4 Got:2
61005 Shift 0, v:2 sv:2 min:2 cur:2
61006 In Range Block::1 bo:215 Volume offset:727 Expected:4 Got:2
61007 Shift 0, v:2 sv:2 min:2 cur:2
61008 In Range Block::1 bo:216 Volume offset:728 Expected:4 Got:2
61009 Shift 0, v:2 sv:2 min:2 cur:2
61010 In Range Block::1 bo:217 Volume offset:729 Expected:4 Got:2
61011 Shift 0, v:2 sv:2 min:2 cur:2
61012 In Range Block::1 bo:218 Volume offset:730 Expected:4 Got:2
61013 Shift 0, v:2 sv:2 min:2 cur:2
61014 In Range Block::1 bo:219 Volume offset:731 Expected:4 Got:2
61015 Shift 0, v:2 sv:2 min:2 cur:2
61016 In Range Block::1 bo:220 Volume offset:732 Expected:4 Got:2
61017 Shift 0, v:2 sv:2 min:2 cur:2
61018 In Range Block::1 bo:221 Volume offset:733 Expected:4 Got:2
61019 Shift 0, v:2 sv:2 min:2 cur:2
61020 In Range Block::1 bo:222 Volume offset:734 Expected:4 Got:2
61021 Shift 0, v:2 sv:2 min:2 cur:2
61022 In Range Block::1 bo:223 Volume offset:735 Expected:4 Got:2
61023 Shift 0, v:2 sv:2 min:2 cur:2
61024 In Range Block::1 bo:224 Volume offset:736 Expected:4 Got:2
61025 Shift 0, v:2 sv:2 min:2 cur:2
61026 In Range Block::1 bo:225 Volume offset:737 Expected:4 Got:2
61027 Shift 0, v:2 sv:2 min:2 cur:2
61028 In Range Block::1 bo:226 Volume offset:738 Expected:4 Got:2
61029 Shift 0, v:2 sv:2 min:2 cur:2
61030 In Range Block::1 bo:227 Volume offset:739 Expected:4 Got:2
61031 Shift 0, v:2 sv:2 min:2 cur:2
61032 In Range Block::1 bo:228 Volume offset:740 Expected:4 Got:2
61033 Shift 0, v:2 sv:2 min:2 cur:2
61034 In Range Block::1 bo:229 Volume offset:741 Expected:4 Got:2
61035 Shift 0, v:2 sv:2 min:2 cur:2
61036 In Range Block::1 bo:230 Volume offset:742 Expected:4 Got:2
61037 Shift 0, v:2 sv:2 min:2 cur:2
61038 In Range Block::1 bo:231 Volume offset:743 Expected:4 Got:2
61039 Shift 0, v:2 sv:2 min:2 cur:2
61040 In Range Block::1 bo:232 Volume offset:744 Expected:4 Got:2
61041 Shift 0, v:2 sv:2 min:2 cur:2
61042 In Range Block::1 bo:233 Volume offset:745 Expected:4 Got:2
61043 Shift 0, v:2 sv:2 min:2 cur:2
61044 In Range Block::1 bo:234 Volume offset:746 Expected:4 Got:2
61045 Shift 0, v:2 sv:2 min:2 cur:2
61046 In Range Block::1 bo:235 Volume offset:747 Expected:4 Got:2
61047 Shift 0, v:2 sv:2 min:2 cur:2
61048 In Range Block::1 bo:236 Volume offset:748 Expected:4 Got:2
61049 Shift 0, v:2 sv:2 min:2 cur:2
61050 In Range Block::1 bo:237 Volume offset:749 Expected:4 Got:2
61051 Shift 0, v:2 sv:2 min:2 cur:2
61052 In Range Block::1 bo:238 Volume offset:750 Expected:4 Got:2
61053 Shift 0, v:2 sv:2 min:2 cur:2
61054 In Range Block::1 bo:239 Volume offset:751 Expected:4 Got:2
61055 Shift 0, v:2 sv:2 min:2 cur:2
61056 In Range Block::1 bo:240 Volume offset:752 Expected:4 Got:2
61057 Shift 0, v:2 sv:2 min:2 cur:2
61058 In Range Block::1 bo:241 Volume offset:753 Expected:4 Got:2
61059 Shift 0, v:2 sv:2 min:2 cur:2
61060 In Range Block::1 bo:242 Volume offset:754 Expected:4 Got:2
61061 Shift 0, v:2 sv:2 min:2 cur:2
61062 In Range Block::1 bo:243 Volume offset:755 Expected:4 Got:2
61063 Shift 0, v:2 sv:2 min:2 cur:2
61064 In Range Block::1 bo:244 Volume offset:756 Expected:4 Got:2
61065 Shift 0, v:2 sv:2 min:2 cur:2
61066 In Range Block::1 bo:245 Volume offset:757 Expected:4 Got:2
61067 Shift 0, v:2 sv:2 min:2 cur:2
61068 In Range Block::1 bo:246 Volume offset:758 Expected:4 Got:2
61069 Shift 0, v:2 sv:2 min:2 cur:2
61070 In Range Block::1 bo:247 Volume offset:759 Expected:4 Got:2
61071 Shift 0, v:2 sv:2 min:2 cur:2
61072 In Range Block::1 bo:248 Volume offset:760 Expected:4 Got:2
61073 Shift 0, v:2 sv:2 min:2 cur:2
61074 In Range Block::1 bo:249 Volume offset:761 Expected:4 Got:2
61075 Shift 0, v:2 sv:2 min:2 cur:2
61076 In Range Block::1 bo:250 Volume offset:762 Expected:4 Got:2
61077 Shift 0, v:2 sv:2 min:2 cur:2
61078 In Range Block::1 bo:251 Volume offset:763 Expected:4 Got:2
61079 Shift 0, v:2 sv:2 min:2 cur:2
61080 In Range Block::1 bo:252 Volume offset:764 Expected:4 Got:2
61081 Shift 0, v:2 sv:2 min:2 cur:2
61082 In Range Block::1 bo:253 Volume offset:765 Expected:4 Got:2
61083 Shift 0, v:2 sv:2 min:2 cur:2
61084 In Range Block::1 bo:254 Volume offset:766 Expected:4 Got:2
61085 Shift 0, v:2 sv:2 min:2 cur:2
61086 In Range Block::1 bo:255 Volume offset:767 Expected:4 Got:2
61087 Shift 0, v:2 sv:2 min:2 cur:2
61088 In Range Block::1 bo:256 Volume offset:768 Expected:4 Got:2
61089 Shift 0, v:2 sv:2 min:2 cur:2
61090 In Range Block::1 bo:257 Volume offset:769 Expected:4 Got:2
61091 Shift 0, v:2 sv:2 min:2 cur:2
61092 In Range Block::1 bo:258 Volume offset:770 Expected:4 Got:2
61093 Shift 0, v:2 sv:2 min:2 cur:2
61094 In Range Block::1 bo:259 Volume offset:771 Expected:4 Got:2
61095 Shift 0, v:2 sv:2 min:2 cur:2
61096 In Range Block::1 bo:260 Volume offset:772 Expected:4 Got:2
61097 Shift 0, v:2 sv:2 min:2 cur:2
61098 In Range Block::1 bo:261 Volume offset:773 Expected:4 Got:2
61099 Shift 0, v:2 sv:2 min:2 cur:2
61100 In Range Block::1 bo:262 Volume offset:774 Expected:4 Got:2
61101 Shift 0, v:2 sv:2 min:2 cur:2
61102 In Range Block::1 bo:263 Volume offset:775 Expected:4 Got:2
61103 Shift 0, v:2 sv:2 min:2 cur:2
61104 In Range Block::1 bo:264 Volume offset:776 Expected:4 Got:2
61105 Shift 0, v:2 sv:2 min:2 cur:2
61106 In Range Block::1 bo:265 Volume offset:777 Expected:4 Got:2
61107 Shift 0, v:2 sv:2 min:2 cur:2
61108 In Range Block::1 bo:266 Volume offset:778 Expected:4 Got:2
61109 Shift 0, v:2 sv:2 min:2 cur:2
61110 In Range Block::1 bo:267 Volume offset:779 Expected:4 Got:2
61111 Shift 0, v:2 sv:2 min:2 cur:2
61112 In Range Block::1 bo:268 Volume offset:780 Expected:4 Got:2
61113 Shift 0, v:2 sv:2 min:2 cur:2
61114 
61115 In Range Block::1 bo:269 Volume offset:781 Expected:4 Got:2
61116 Shift 0, v:2 sv:2 min:2 cur:2
61117 In Range Block::1 bo:270 Volume offset:782 Expected:4 Got:2
61118 Shift 0, v:2 sv:2 min:2 cur:2
61119 In Range Block::1 bo:271 Volume offset:783 Expected:4 Got:2
61120 Shift 0, v:2 sv:2 min:2 cur:2
61121 In Range Block::1 bo:272 Volume offset:784 Expected:4 Got:2
61122 Shift 0, v:2 sv:2 min:2 cur:2
61123 In Range Block::1 bo:273 Volume offset:785 Expected:4 Got:2
61124 Shift 0, v:2 sv:2 min:2 cur:2
61125 In Range Block::1 bo:274 Volume offset:786 Expected:4 Got:2
61126 Shift 0, v:2 sv:2 min:2 cur:2
61127 In Range Block::1 bo:275 Volume offset:787 Expected:4 Got:2
61128 Shift 0, v:2 sv:2 min:2 cur:2
61129 In Range Block::1 bo:276 Volume offset:788 Expected:4 Got:2
61130 Shift 0, v:2 sv:2 min:2 cur:2
61131 In Range Block::1 bo:277 Volume offset:789 Expected:4 Got:2
61132 Shift 0, v:2 sv:2 min:2 cur:2
61133 In Range Block::1 bo:278 Volume offset:790 Expected:4 Got:2
61134 Shift 0, v:2 sv:2 min:2 cur:2
61135 In Range Block::1 bo:279 Volume offset:791 Expected:4 Got:2
61136 Shift 0, v:2 sv:2 min:2 cur:2
61137 In Range Block::1 bo:280 Volume offset:792 Expected:4 Got:2
61138 Shift 0, v:2 sv:2 min:2 cur:2
61139 In Range Block::1 bo:281 Volume offset:793 Expected:4 Got:2
61140 Shift 0, v:2 sv:2 min:2 cur:2
61141 In Range Block::1 bo:282 Volume offset:794 Expected:4 Got:2
61142 test test::test_wl_update_commit ... Shift 0, v:2 sv:2 min:2 cur:2
61143 In Range Block::1 bo:283 Volume offset:795 Expected:4 Got:2
61144 Shift 0, v:2 sv:2 min:2 cur:2
61145 In Range Block::1 bo:284 Volume offset:796 Expected:4 Got:2
61146 Shift 0, v:2 sv:2 min:2 cur:2
61147 In Range Block::1 bo:285 Volume offset:797 Expected:4 Got:2
61148 Shift 0, v:2 sv:2 min:2 cur:2
61149 In Range Block::1 bo:286 Volume offset:798 Expected:4 Got:2
61150 Shift 0, v:2 sv:2 min:2 cur:2
61151 In Range Block::1 bo:287 Volume offset:799 Expected:4 Got:2
61152 Shift 0, v:2 sv:2 min:2 cur:2
61153 In Range Block::1 bo:288 Volume offset:800 Expected:4 Got:2
61154 Shift 0, v:2 sv:2 min:2 cur:2
61155 In Range Block::1 bo:289 Volume offset:801 Expected:4 Got:2
61156 Shift 0, v:2 sv:2 min:2 cur:2
61157 In Range Block::1 bo:290 Volume offset:802 Expected:4 Got:2
61158 Shift 0, v:2 sv:2 min:2 cur:2
61159 In Range Block::1 bo:291 Volume offset:803 Expected:4 Got:2
61160 Shift 0, v:2 sv:2 min:2 cur:2
61161 In Range Block::1 bo:292 Volume offset:804 Expected:4 Got:2
61162 Shift 0, v:2 sv:2 min:2 cur:2
61163 In Range Block::1 bo:293 Volume offset:805 Expected:4 Got:2
61164 Shift 0, v:2 sv:2 min:2 cur:2
61165 In Range Block::1 bo:294 Volume offset:806 Expected:4 Got:2
61166 Shift 0, v:2 sv:2 min:2 cur:2
61167 In Range Block::1 bo:295 Volume offset:807 Expected:4 Got:2
61168 Shift 0, v:2 sv:2 min:2 cur:2
61169 In Range Block::1 bo:296 Volume offset:808 Expected:4 Got:2
61170 Shift 0, v:2 sv:2 min:2 cur:2
61171 In Range Block::1 bo:297 Volume offset:809 Expected:4 Got:2
61172 Shift 0, v:2 sv:2 min:2 cur:2
61173 In Range Block::1 bo:298 Volume offset:810 Expected:4 Got:2
61174 Shift 0, v:2 sv:2 min:2 cur:2
61175 In Range Block::1 bo:299 Volume offset:811 Expected:4 Got:2
61176 Shift 0, v:2 sv:2 min:2 cur:2
61177 In Range Block::1 bo:300 Volume offset:812 Expected:4 Got:2
61178 Shift 0, v:2 sv:2 min:2 cur:2
61179 In Range Block::1 bo:301 Volume offset:813 Expected:4 Got:2
61180 Shift 0, v:2 sv:2 min:2 cur:2
61181 In Range Block::1 bo:302 Volume offset:814 Expected:4 Got:2
61182 Shift 0, v:2 sv:2 min:2 cur:2
61183 In Range Block::1 bo:303 Volume offset:815 Expected:4 Got:2
61184 Shift 0, v:2 sv:2 min:2 cur:2
61185 In Range Block::1 bo:304 Volume offset:816 Expected:4 Got:2
61186 Shift 0, v:2 sv:2 min:2 cur:2
61187 In Range Block::1 bo:305 Volume offset:817 Expected:4 Got:2
61188 Shift 0, v:2 sv:2 min:2 cur:2
61189 In Range Block::1 bo:306 Volume offset:818 Expected:4 Got:2
61190 Shift 0, v:2 sv:2 min:2 cur:2
61191 In Range Block::1 bo:307 Volume offset:819 Expected:4 Got:2
61192 Shift 0, v:2 sv:2 min:2 cur:2
61193 In Range Block::1 bo:308 Volume offset:820 Expected:4 Got:2
61194 Shift 0, v:2 sv:2 min:2 cur:2
61195 In Range Block::1 bo:309 Volume offset:821 Expected:4 Got:2
61196 Shift 0, v:2 sv:2 min:2 cur:2
61197 In Range Block::1 bo:310 Volume offset:822 Expected:4 Got:2
61198 Shift 0, v:2 sv:2 min:2 cur:2
61199 In Range Block::1 bo:311 Volume offset:823 Expected:4 Got:2
61200 Shift 0, v:2 sv:2 min:2 cur:2
61201 In Range Block::1 bo:312 Volume offset:824 Expected:4 Got:2
61202 Shift 0, v:2 sv:2 min:2 cur:2
61203 In Range Block::1 bo:313 Volume offset:825 Expected:4 Got:2
61204 Shift 0, v:2 sv:2 min:2 cur:2
61205 In Range Block::1 bo:314 Volume offset:826 Expected:4 Got:2
61206 Shift 0, v:2 sv:2 min:2 cur:2
61207 In Range Block::1 bo:315 Volume offset:827 Expected:4 Got:2
61208 Shift 0, v:2 sv:2 min:2 cur:2
61209 In Range Block::1 bo:316 Volume offset:828 Expected:4 Got:2
61210 Shift 0, v:2 sv:2 min:2 cur:2
61211 In Range Block::1 bo:317 Volume offset:829 Expected:4 Got:2
61212 Shift 0, v:2 sv:2 min:2 cur:2
61213 In Range Block::1 bo:318 Volume offset:830 Expected:4 Got:2
61214 Shift 0, v:2 sv:2 min:2 cur:2
61215 In Range Block::1 bo:319 Volume offset:831 Expected:4 Got:2
61216 Shift 0, v:2 sv:2 min:2 cur:2
61217 In Range Block::1 bo:320 Volume offset:832 Expected:4 Got:2
61218 Shift 0, v:2 sv:2 min:2 cur:2
61219 In Range Block::1 bo:321 Volume offset:833 Expected:4 Got:2
61220 Shift 0, v:2 sv:2 min:2 cur:2
61221 In Range Block::1 bo:322 Volume offset:834 Expected:4 Got:2
61222 Shift 0, v:2 sv:2 min:2 cur:2
61223 In Range Block::1 bo:323 Volume offset:835 Expected:4 Got:2
61224 Shift 0, v:2 sv:2 min:2 cur:2
61225 In Range Block::1 bo:324 Volume offset:836 Expected:4 Got:2
61226 Shift 0, v:2 sv:2 min:2 cur:2
61227 In Range Block::1 bo:325 Volume offset:837 Expected:4 Got:2
61228 Shift 0, v:2 sv:2 min:2 cur:2
61229 In Range Block::1 bo:326 Volume offset:838 Expected:4 Got:2
61230 Shift 0, v:2 sv:2 min:2 cur:2
61231 In Range Block::1 bo:327 Volume offset:839 Expected:4 Got:2
61232 Shift 0, v:2 sv:2 min:2 cur:2
61233 In Range Block::1 bo:328 Volume offset:840 Expected:4 Got:2
61234 Shift 0, v:2 sv:2 min:2 cur:2
61235 In Range Block::1 bo:329 Volume offset:841 Expected:4 Got:2
61236 Shift 0, v:2 sv:2 min:2 cur:2
61237 In Range Block::1 bo:330 Volume offset:842 Expected:4 Got:2
61238 Shift 0, v:2 sv:2 min:2 cur:2
61239 In Range Block::1 bo:331 Volume offset:843 Expected:4 Got:2
61240 Shift 0, v:2 sv:2 min:2 cur:2
61241 In Range Block::1 bo:332 Volume offset:844 Expected:4 Got:2
61242 Shift 0, v:2 sv:2 min:2 cur:2
61243 In Range Block::1 bo:333 Volume offset:845 Expected:4 Got:2
61244 Shift 0, v:2 sv:2 min:2 cur:2
61245 In Range Block::1 bo:334 Volume offset:846 Expected:4 Got:2
61246 Shift 0, v:2 sv:2 min:2 cur:2
61247 In Range Block::1 bo:335 Volume offset:847 Expected:4 Got:2
61248 Shift 0, v:2 sv:2 min:2 cur:2
61249 In Range Block::1 bo:336 Volume offset:848 Expected:4 Got:2
61250 Shift 0, v:2 sv:2 min:2 cur:2
61251 In Range Block::1 bo:337 Volume offset:849 Expected:4 Got:2
61252 Shift 0, v:2 sv:2 min:2 cur:2
61253 In Range Block::1 bo:338 Volume offset:850 Expected:4 Got:2
61254 Shift 0, v:2 sv:2 min:2 cur:2
61255 In Range Block::1 bo:339 Volume offset:851 Expected:4 Got:2
61256 Shift 0, v:2 sv:2 min:2 cur:2
61257 In Range Block::1 bo:340 Volume offset:852 Expected:4 Got:2
61258 Shift 0, v:2 sv:2 min:2 cur:2
61259 In Range Block::1 bo:341 Volume offset:853 Expected:4 Got:2
61260 Shift 0, v:2 sv:2 min:2 cur:2
61261 In Range Block::1 bo:342 Volume offset:854 Expected:4 Got:2
61262 Shift 0, v:2 sv:2 min:2 cur:2
61263 In Range Block::1 bo:343 Volume offset:855 Expected:4 Got:2
61264 Shift 0, v:2 sv:2 min:2 cur:2
61265 In Range Block::1 bo:344 Volume offset:856 Expected:4 Got:2
61266 Shift 0, v:2 sv:2 min:2 cur:2
61267 In Range Block::1 bo:345 Volume offset:857 Expected:4 Got:2
61268 Shift 0, v:2 sv:2 min:2 cur:2
61269 In Range Block::1 bo:346 Volume offset:858 Expected:4 Got:2
61270 Shift 0, v:2 sv:2 min:2 cur:2
61271 In Range Block::1 bo:347 Volume offset:859 Expected:4 Got:2
61272 Shift 0, v:2 sv:2 min:2 cur:2
61273 In Range Block::1 bo:348 Volume offset:860 Expected:4 Got:2
61274 Shift 0, v:2 sv:2 min:2 cur:2
61275 In Range Block::1 bo:349 Volume offset:861 Expected:4 Got:2
61276 Shift 0, v:2 sv:2 min:2 cur:2
61277 In Range Block::1 bo:350 Volume offset:862 Expected:4 Got:2
61278 Shift 0, v:2 sv:2 min:2 cur:2
61279 In Range Block::1 bo:351 Volume offset:863 Expected:4 Got:2
61280 Shift 0, v:2 sv:2 min:2 cur:2
61281 In Range Block::1 bo:352 Volume offset:864 Expected:4 Got:2
61282 Shift 0, v:2 sv:2 min:2 cur:2
61283 In Range Block::1 bo:353 Volume offset:865 Expected:4 Got:2
61284 Shift 0, v:2 sv:2 min:2 cur:2
61285 In Range Block::1 bo:354 Volume offset:866 Expected:4 Got:2
61286 Shift 0, v:2 sv:2 min:2 cur:2
61287 In Range Block::1 bo:355 Volume offset:867 Expected:4 Got:2
61288 Shift 0, v:2 sv:2 min:2 cur:2
61289 In Range Block::1 bo:356 Volume offset:868 Expected:4 Got:2
61290 Shift 0, v:2 sv:2 min:2 cur:2
61291 In Range Block::1 bo:357 Volume offset:869 Expected:4 Got:2
61292 Shift 0, v:2 sv:2 min:2 cur:2
61293 In Range Block::1 bo:358 Volume offset:870 Expected:4 Got:2
61294 Shift 0, v:2 sv:2 min:2 cur:2
61295 In Range Block::1 bo:359 Volume offset:871 Expected:4 Got:2
61296 Shift 0, v:2 sv:2 min:2 cur:2
61297 In Range Block::1 bo:360 Volume offset:872 Expected:4 Got:2
61298 Shift 0, v:2 sv:2 min:2 cur:2
61299 In Range Block::1 bo:361 Volume offset:873 Expected:4 Got:2
61300 Shift 0, v:2 sv:2 min:2 cur:2
61301 In Range Block::1 bo:362 Volume offset:874 Expected:4 Got:2
61302 Shift 0, v:2 sv:2 min:2 cur:2
61303 In Range Block::1 bo:363 Volume offset:875 Expected:4 Got:2
61304 Shift 0, v:2 sv:2 min:2 cur:2
61305 In Range Block::1 bo:364 Volume offset:876 Expected:4 Got:2
61306 Shift 0, v:2 sv:2 min:2 cur:2
61307 In Range Block::1 bo:365 Volume offset:877 Expected:4 Got:2
61308 Shift 0, v:2 sv:2 min:2 cur:2
61309 In Range Block::1 bo:366 Volume offset:878 Expected:4 Got:2
61310 Shift 0, v:2 sv:2 min:2 cur:2
61311 In Range Block::1 bo:367 Volume offset:879 Expected:4 Got:2
61312 Shift 0, v:2 sv:2 min:2 cur:2
61313 In Range Block::1 bo:368 Volume offset:880 Expected:4 Got:2
61314 Shift 0, v:2 sv:2 min:2 cur:2
61315 In Range Block::1 bo:369 Volume offset:881 Expected:4 Got:2
61316 Shift 0, v:2 sv:2 min:2 cur:2
61317 In Range Block::1 bo:370 Volume offset:882 Expected:4 Got:2
61318 Shift 0, v:2 sv:2 min:2 cur:2
61319 In Range Block::1 bo:371 Volume offset:883 Expected:4 Got:2
61320 Shift 0, v:2 sv:2 min:2 cur:2
61321 In Range Block::1 bo:372 Volume offset:884 Expected:4 Got:2
61322 Shift 0, v:2 sv:2 min:2 cur:2
61323 In Range Block::1 bo:373 Volume offset:885 Expected:4 Got:2
61324 Shift 0, v:2 sv:2 min:2 cur:2
61325 okIn Range Block::1 bo:374 Volume offset:886 Expected:4 Got:2
61326 Shift 0, v:2 sv:2 min:2 cur:2
61327 In Range Block::1 bo:375 Volume offset:887 Expected:4 Got:2
61328 Shift 0, v:2 sv:2 min:2 cur:2
61329 In Range Block::1 bo:376 Volume offset:888 Expected:4 Got:2
61330 Shift 0, v:2 sv:2 min:2 cur:2
61331 In Range Block::1 bo:377 Volume offset:889 Expected:4 Got:2
61332 Shift 0, v:2 sv:2 min:2 cur:2
61333 In Range Block::1 bo:378 Volume offset:890 Expected:4 Got:2
61334 Shift 0, v:2 sv:2 min:2 cur:2
61335 In Range Block::1 bo:379 Volume offset:891 Expected:4 Got:2
61336 Shift 0, v:2 sv:2 min:2 cur:2
61337 In Range Block::1 bo:380 Volume offset:892 Expected:4 Got:2
61338 Shift 0, v:2 sv:2 min:2 cur:2
61339 In Range Block::1 bo:381 Volume offset:893 Expected:4 Got:2
61340 Shift 0, v:2 sv:2 min:2 cur:2
61341 In Range Block::1 bo:382 Volume offset:894 Expected:4 Got:2
61342 Shift 0, v:2 sv:2 min:2 cur:2
61343 In Range Block::1 bo:383 Volume offset:895 Expected:4 Got:2
61344 Shift 0, v:2 sv:2 min:2 cur:2
61345 In Range Block::1 bo:384 Volume offset:896 Expected:4 Got:2
61346 Shift 0, v:2 sv:2 min:2 cur:2
61347 In Range Block::1 bo:385 Volume offset:897 Expected:4 Got:2
61348 Shift 0, v:2 sv:2 min:2 cur:2
61349 In Range Block::1 bo:386 Volume offset:898 Expected:4 Got:2
61350 Shift 0, v:2 sv:2 min:2 cur:2
61351 In Range Block::1 bo:387 Volume offset:899 Expected:4 Got:2
61352 Shift 0, v:2 sv:2 min:2 cur:2
61353 In Range Block::1 bo:388 Volume offset:900 Expected:4 Got:2
61354 Shift 0, v:2 sv:2 min:2 cur:2
61355 In Range Block::1 bo:389 Volume offset:901 Expected:4 Got:2
61356 Shift 0, v:2 sv:2 min:2 cur:2
61357 In Range Block::1 bo:390 Volume offset:902 Expected:4 Got:2
61358 Shift 0, v:2 sv:2 min:2 cur:2
61359 In Range Block::1 bo:391 Volume offset:903 Expected:4 Got:2
61360 Shift 0, v:2 sv:2 min:2 cur:2
61361 In Range Block::1 bo:392 Volume offset:904 Expected:4 Got:2
61362 Shift 0, v:2 sv:2 min:2 cur:2
61363 In Range Block::1 bo:393 Volume offset:905 Expected:4 Got:2
61364 Shift 0, v:2 sv:2 min:2 cur:2
61365 In Range Block::1 bo:394 Volume offset:906 Expected:4 Got:2
61366 Shift 0, v:2 sv:2 min:2 cur:2
61367 In Range Block::1 bo:395 Volume offset:907 Expected:4 Got:2
61368 Shift 0, v:2 sv:2 min:2 cur:2
61369 In Range Block::1 bo:396 Volume offset:908 Expected:4 Got:2
61370 Shift 0, v:2 sv:2 min:2 cur:2
61371 In Range Block::1 bo:397 Volume offset:909 Expected:4 Got:2
61372 Shift 0, v:2 sv:2 min:2 cur:2
61373 In Range Block::1 bo:398 Volume offset:910 Expected:4 Got:2
61374 Shift 0, v:2 sv:2 min:2 cur:2
61375 In Range Block::1 bo:399 Volume offset:911 Expected:4 Got:2
61376 Shift 0, v:2 sv:2 min:2 cur:2
61377 In Range Block::1 bo:400 Volume offset:912 Expected:4 Got:2
61378 Shift 0, v:2 sv:2 min:2 cur:2
61379 In Range Block::1 bo:401 Volume offset:913 Expected:4 Got:2
61380 Shift 0, v:2 sv:2 min:2 cur:2
61381 In Range Block::1 bo:402 Volume offset:914 Expected:4 Got:2
61382 Shift 0, v:2 sv:2 min:2 cur:2
61383 In Range Block::1 bo:403 Volume offset:915 Expected:4 Got:2
61384 Shift 0, v:2 sv:2 min:2 cur:2
61385 In Range Block::1 bo:404 Volume offset:916 Expected:4 Got:2
61386 Shift 0, v:2 sv:2 min:2 cur:2
61387 In Range Block::1 bo:405 Volume offset:917 Expected:4 Got:2
61388 Shift 0, v:2 sv:2 min:2 cur:2
61389 In Range Block::1 bo:406 Volume offset:918 Expected:4 Got:2
61390 Shift 0, v:2 sv:2 min:2 cur:2
61391 In Range Block::1 bo:407 Volume offset:919 Expected:4 Got:2
61392 Shift 0, v:2 sv:2 min:2 cur:2
61393 In Range Block::1 bo:408 Volume offset:920 Expected:4 Got:2
61394 Shift 0, v:2 sv:2 min:2 cur:2
61395 In Range Block::1 bo:409 Volume offset:921 Expected:4 Got:2
61396 Shift 0, v:2 sv:2 min:2 cur:2
61397 In Range Block::1 bo:410 Volume offset:922 Expected:4 Got:2
61398 Shift 0, v:2 sv:2 min:2 cur:2
61399 In Range Block::1 bo:411 Volume offset:923 Expected:4 Got:2
61400 Shift 0, v:2 sv:2 min:2 cur:2
61401 In Range Block::1 bo:412 Volume offset:924 Expected:4 Got:2
61402 Shift 0, v:2 sv:2 min:2 cur:2
61403 In Range Block::1 bo:413 Volume offset:925 Expected:4 Got:2
61404 Shift 0, v:2 sv:2 min:2 cur:2
61405 In Range Block::1 bo:414 Volume offset:926 Expected:4 Got:2
61406 Shift 0, v:2 sv:2 min:2 cur:2
61407 In Range Block::1 bo:415 Volume offset:927 Expected:4 Got:2
61408 Shift 0, v:2 sv:2 min:2 cur:2
61409 In Range Block::1 bo:416 Volume offset:928 Expected:4 Got:2
61410 Shift 0, v:2 sv:2 min:2 cur:2
61411 In Range Block::1 bo:417 Volume offset:929 Expected:4 Got:2
61412 Shift 0, v:2 sv:2 min:2 cur:2
61413 In Range Block::1 bo:418 Volume offset:930 Expected:4 Got:2
61414 Shift 0, v:2 sv:2 min:2 cur:2
61415 In Range Block::1 bo:419 Volume offset:931 Expected:4 Got:2
61416 Shift 0, v:2 sv:2 min:2 cur:2
61417 In Range Block::1 bo:420 Volume offset:932 Expected:4 Got:2
61418 Shift 0, v:2 sv:2 min:2 cur:2
61419 In Range Block::1 bo:421 Volume offset:933 Expected:4 Got:2
61420 Shift 0, v:2 sv:2 min:2 cur:2
61421 In Range Block::1 bo:422 Volume offset:934 Expected:4 Got:2
61422 Shift 0, v:2 sv:2 min:2 cur:2
61423 In Range Block::1 bo:423 Volume offset:935 Expected:4 Got:2
61424 Shift 0, v:2 sv:2 min:2 cur:2
61425 In Range Block::1 bo:424 Volume offset:936 Expected:4 Got:2
61426 Shift 0, v:2 sv:2 min:2 cur:2
61427 In Range Block::1 bo:425 Volume offset:937 Expected:4 Got:2
61428 Shift 0, v:2 sv:2 min:2 cur:2
61429 In Range Block::1 bo:426 Volume offset:938 Expected:4 Got:2
61430 Shift 0, v:2 sv:2 min:2 cur:2
61431 In Range Block::1 bo:427 Volume offset:939 Expected:4 Got:2
61432 Shift 0, v:2 sv:2 min:2 cur:2
61433 In Range Block::1 bo:428 Volume offset:940 Expected:4 Got:2
61434 Shift 0, v:2 sv:2 min:2 cur:2
61435 In Range Block::1 bo:429 Volume offset:941 Expected:4 Got:2
61436 Shift 0, v:2 sv:2 min:2 cur:2
61437 In Range Block::1 bo:430 Volume offset:942 Expected:4 Got:2
61438 Shift 0, v:2 sv:2 min:2 cur:2
61439 In Range Block::1 bo:431 Volume offset:943 Expected:4 Got:2
61440 Shift 0, v:2 sv:2 min:2 cur:2
61441 In Range Block::1 bo:432 Volume offset:944 Expected:4 Got:2
61442 Shift 0, v:2 sv:2 min:2 cur:2
61443 In Range Block::1 bo:433 Volume offset:945 Expected:4 Got:2
61444 Shift 0, v:2 sv:2 min:2 cur:2
61445 In Range Block::1 bo:434 Volume offset:946 Expected:4 Got:2
61446 Shift 0, v:2 sv:2 min:2 cur:2
61447 In Range Block::1 bo:435 Volume offset:947 Expected:4 Got:2
61448 Shift 0, v:2 sv:2 min:2 cur:2
61449 In Range Block::1 bo:436 Volume offset:948 Expected:4 Got:2
61450 Shift 0, v:2 sv:2 min:2 cur:2
61451 In Range Block::1 bo:437 Volume offset:949 Expected:4 Got:2
61452 Shift 0, v:2 sv:2 min:2 cur:2
61453 In Range Block::1 bo:438 Volume offset:950 Expected:4 Got:2
61454 Shift 0, v:2 sv:2 min:2 cur:2
61455 In Range Block::1 bo:439 Volume offset:951 Expected:4 Got:2
61456 Shift 0, v:2 sv:2 min:2 cur:2
61457 In Range Block::1 bo:440 Volume offset:952 Expected:4 Got:2
61458 Shift 0, v:2 sv:2 min:2 cur:2
61459 In Range Block::1 bo:441 Volume offset:953 Expected:4 Got:2
61460 Shift 0, v:2 sv:2 min:2 cur:2
61461 In Range Block::1 bo:442 Volume offset:954 Expected:4 Got:2
61462 Shift 0, v:2 sv:2 min:2 cur:2
61463 In Range Block::1 bo:443 Volume offset:955 Expected:4 Got:2
61464 Shift 0, v:2 sv:2 min:2 cur:2
61465 In Range Block::1 bo:444 Volume offset:956 Expected:4 Got:2
61466 Shift 0, v:2 sv:2 min:2 cur:2
61467 In Range Block::1 bo:445 Volume offset:957 Expected:4 Got:2
61468 Shift 0, v:2 sv:2 min:2 cur:2
61469 In Range Block::1 bo:446 Volume offset:958 Expected:4 Got:2
61470 Shift 0, v:2 sv:2 min:2 cur:2
61471 In Range Block::1 bo:447 Volume offset:959 Expected:4 Got:2
61472 Shift 0, v:2 sv:2 min:2 cur:2
61473 In Range Block::1 bo:448 Volume offset:960 Expected:4 Got:2
61474 Shift 0, v:2 sv:2 min:2 cur:2
61475 In Range Block::1 bo:449 Volume offset:961 Expected:4 Got:2
61476 Shift 0, v:2 sv:2 min:2 cur:2
61477 In Range Block::1 bo:450 Volume offset:962 Expected:4 Got:2
61478 Shift 0, v:2 sv:2 min:2 cur:2
61479 In Range Block::1 bo:451 Volume offset:963 Expected:4 Got:2
61480 Shift 0, v:2 sv:2 min:2 cur:2
61481 In Range Block::1 bo:452 Volume offset:964 Expected:4 Got:2
61482 Shift 0, v:2 sv:2 min:2 cur:2
61483 In Range Block::1 bo:453 Volume offset:965 Expected:4 Got:2
61484 Shift 0, v:2 sv:2 min:2 cur:2
61485 In Range Block::1 bo:454 Volume offset:966 Expected:4 Got:2
61486 Shift 0, v:2 sv:2 min:2 cur:2
61487 In Range Block::1 bo:455 Volume offset:967 Expected:4 Got:2
61488 Shift 0, v:2 sv:2 min:2 cur:2
61489 In Range Block::1 bo:456 Volume offset:968 Expected:4 Got:2
61490 
61491 Shift 0, v:2 sv:2 min:2 cur:2
61492 In Range Block::1 bo:457 Volume offset:969 Expected:4 Got:2
61493 Shift 0, v:2 sv:2 min:2 cur:2
61494 In Range Block::1 bo:458 Volume offset:970 Expected:4 Got:2
61495 Shift 0, v:2 sv:2 min:2 cur:2
61496 In Range Block::1 bo:459 Volume offset:971 Expected:4 Got:2
61497 Shift 0, v:2 sv:2 min:2 cur:2
61498 In Range Block::1 bo:460 Volume offset:972 Expected:4 Got:2
61499 Shift 0, v:2 sv:2 min:2 cur:2
61500 In Range Block::1 bo:461 Volume offset:973 Expected:4 Got:2
61501 Shift 0, v:2 sv:2 min:2 cur:2
61502 In Range Block::1 bo:462 Volume offset:974 Expected:4 Got:2
61503 Shift 0, v:2 sv:2 min:2 cur:2
61504 In Range Block::1 bo:463 Volume offset:975 Expected:4 Got:2
61505 Shift 0, v:2 sv:2 min:2 cur:2
61506 In Range Block::1 bo:464 Volume offset:976 Expected:4 Got:2
61507 Shift 0, v:2 sv:2 min:2 cur:2
61508 In Range Block::1 bo:465 Volume offset:977 Expected:4 Got:2
61509 Shift 0, v:2 sv:2 min:2 cur:2
61510 In Range Block::1 bo:466 Volume offset:978 Expected:4 Got:2
61511 Shift 0, v:2 sv:2 min:2 cur:2
61512 In Range Block::1 bo:467 Volume offset:979 Expected:4 Got:2
61513 Shift 0, v:2 sv:2 min:2 cur:2
61514 In Range Block::1 bo:468 Volume offset:980 Expected:4 Got:2
61515 Shift 0, v:2 sv:2 min:2 cur:2
61516 In Range Block::1 bo:469 Volume offset:981 Expected:4 Got:2
61517 Shift 0, v:2 sv:2 min:2 cur:2
61518 In Range Block::1 bo:470 Volume offset:982 Expected:4 Got:2
61519 Shift 0, v:2 sv:2 min:2 cur:2
61520 In Range Block::1 bo:471 Volume offset:983 Expected:4 Got:2
61521 Shift 0, v:2 sv:2 min:2 cur:2
61522 In Range Block::1 bo:472 Volume offset:984 Expected:4 Got:2
61523 Shift 0, v:2 sv:2 min:2 cur:2
61524 In Range Block::1 bo:473 Volume offset:985 Expected:4 Got:2
61525 Shift 0, v:2 sv:2 min:2 cur:2
61526 In Range Block::1 bo:474 Volume offset:986 Expected:4 Got:2
61527 Shift 0, v:2 sv:2 min:2 cur:2
61528 In Range Block::1 bo:475 Volume offset:987 Expected:4 Got:2
61529 Shift 0, v:2 sv:2 min:2 cur:2
61530 In Range Block::1 bo:476 Volume offset:988 Expected:4 Got:2
61531 Shift 0, v:2 sv:2 min:2 cur:2
61532 In Range Block::1 bo:477 Volume offset:989 Expected:4 Got:2
61533 Shift 0, v:2 sv:2 min:2 cur:2
61534 In Range Block::1 bo:478 Volume offset:990 Expected:4 Got:2
61535 Shift 0, v:2 sv:2 min:2 cur:2
61536 In Range Block::1 bo:479 Volume offset:991 Expected:4 Got:2
61537 Shift 0, v:2 sv:2 min:2 cur:2
61538 In Range Block::1 bo:480 Volume offset:992 Expected:4 Got:2
61539 Shift 0, v:2 sv:2 min:2 cur:2
61540 In Range Block::1 bo:481 Volume offset:993 Expected:4 Got:2
61541 Shift 0, v:2 sv:2 min:2 cur:2
61542 In Range Block::1 bo:482 Volume offset:994 Expected:4 Got:2
61543 Shift 0, v:2 sv:2 min:2 cur:2
61544 In Range Block::1 bo:483 Volume offset:995 Expected:4 Got:2
61545 Shift 0, v:2 sv:2 min:2 cur:2
61546 In Range Block::1 bo:484 Volume offset:996 Expected:4 Got:2
61547 Shift 0, v:2 sv:2 min:2 cur:2
61548 In Range Block::1 bo:485 Volume offset:997 Expected:4 Got:2
61549 Shift 0, v:2 sv:2 min:2 cur:2
61550 In Range Block::1 bo:486 Volume offset:998 Expected:4 Got:2
61551 Shift 0, v:2 sv:2 min:2 cur:2
61552 In Range Block::1 bo:487 Volume offset:999 Expected:4 Got:2
61553 Shift 0, v:2 sv:2 min:2 cur:2
61554 In Range Block::1 bo:488 Volume offset:1000 Expected:4 Got:2
61555 Shift 0, v:2 sv:2 min:2 cur:2
61556 In Range Block::1 bo:489 Volume offset:1001 Expected:4 Got:2
61557 Shift 0, v:2 sv:2 min:2 cur:2
61558 In Range Block::1 bo:490 Volume offset:1002 Expected:4 Got:2
61559 Shift 0, v:2 sv:2 min:2 cur:2
61560 In Range Block::1 bo:491 Volume offset:1003 Expected:4 Got:2
61561 Shift 0, v:2 sv:2 min:2 cur:2
61562 In Range Block::1 bo:492 Volume offset:1004 Expected:4 Got:2
61563 Shift 0, v:2 sv:2 min:2 cur:2
61564 In Range Block::1 bo:493 Volume offset:1005 Expected:4 Got:2
61565 Shift 0, v:2 sv:2 min:2 cur:2
61566 In Range Block::1 bo:494 Volume offset:1006 Expected:4 Got:2
61567 Shift 0, v:2 sv:2 min:2 cur:2
61568 In Range Block::1 bo:495 Volume offset:1007 Expected:4 Got:2
61569 Shift 0, v:2 sv:2 min:2 cur:2
61570 In Range Block::1 bo:496 Volume offset:1008 Expected:4 Got:2
61571 Shift 0, v:2 sv:2 min:2 cur:2
61572 In Range Block::1 bo:497 Volume offset:1009 Expected:4 Got:2
61573 test test::test_wl_update_commit_2 ... Shift 0, v:2 sv:2 min:2 cur:2
61574 In Range Block::1 bo:498 Volume offset:1010 Expected:4 Got:2
61575 Shift 0, v:2 sv:2 min:2 cur:2
61576 In Range Block::1 bo:499 Volume offset:1011 Expected:4 Got:2
61577 Shift 0, v:2 sv:2 min:2 cur:2
61578 In Range Block::1 bo:500 Volume offset:1012 Expected:4 Got:2
61579 Shift 0, v:2 sv:2 min:2 cur:2
61580 In Range Block::1 bo:501 Volume offset:1013 Expected:4 Got:2
61581 Shift 0, v:2 sv:2 min:2 cur:2
61582 In Range Block::1 bo:502 Volume offset:1014 Expected:4 Got:2
61583 Shift 0, v:2 sv:2 min:2 cur:2
61584 In Range Block::1 bo:503 Volume offset:1015 Expected:4 Got:2
61585 Shift 0, v:2 sv:2 min:2 cur:2
61586 In Range Block::1 bo:504 Volume offset:1016 Expected:4 Got:2
61587 Shift 0, v:2 sv:2 min:2 cur:2
61588 In Range Block::1 bo:505 Volume offset:1017 Expected:4 Got:2
61589 Shift 0, v:2 sv:2 min:2 cur:2
61590 In Range Block::1 bo:506 Volume offset:1018 Expected:4 Got:2
61591 Shift 0, v:2 sv:2 min:2 cur:2
61592 In Range Block::1 bo:507 Volume offset:1019 Expected:4 Got:2
61593 Shift 0, v:2 sv:2 min:2 cur:2
61594 In Range Block::1 bo:508 Volume offset:1020 Expected:4 Got:2
61595 Shift 0, v:2 sv:2 min:2 cur:2
61596 In Range Block::1 bo:509 Volume offset:1021 Expected:4 Got:2
61597 Shift 0, v:2 sv:2 min:2 cur:2
61598 In Range Block::1 bo:510 Volume offset:1022 Expected:4 Got:2
61599 Shift 0, v:2 sv:2 min:2 cur:2
61600 In Range Block::1 bo:511 Volume offset:1023 Expected:4 Got:2
61601 Shift 0, v:4 sv:4 min:2 cur:2
61602 Out of Range Block::1 bo:1 Volume offset:513 Expected:2 Got:4
61603 Shift 0, v:4 sv:4 min:2 cur:2
61604 Out of Range Block::1 bo:2 Volume offset:514 Expected:2 Got:4
61605 Shift 0, v:4 sv:4 min:2 cur:2
61606 Out of Range Block::1 bo:3 Volume offset:515 Expected:2 Got:4
61607 Shift 0, v:4 sv:4 min:2 cur:2
61608 Out of Range Block::1 bo:4 Volume offset:516 Expected:2 Got:4
61609 Shift 0, v:4 sv:4 min:2 cur:2
61610 Out of Range Block::1 bo:5 Volume offset:517 Expected:2 Got:4
61611 Shift 0, v:4 sv:4 min:2 cur:2
61612 Out of Range Block::1 bo:6 Volume offset:518 Expected:2 Got:4
61613 Shift 0, v:4 sv:4 min:2 cur:2
61614 Out of Range Block::1 bo:7 Volume offset:519 Expected:2 Got:4
61615 Shift 0, v:4 sv:4 min:2 cur:2
61616 Out of Range Block::1 bo:8 Volume offset:520 Expected:2 Got:4
61617 Shift 0, v:4 sv:4 min:2 cur:2
61618 Out of Range Block::1 bo:9 Volume offset:521 Expected:2 Got:4
61619 Shift 0, v:4 sv:4 min:2 cur:2
61620 Out of Range Block::1 bo:10 Volume offset:522 Expected:2 Got:4
61621 Shift 0, v:4 sv:4 min:2 cur:2
61622 Out of Range Block::1 bo:11 Volume offset:523 Expected:2 Got:4
61623 Shift 0, v:4 sv:4 min:2 cur:2
61624 Out of Range Block::1 bo:12 Volume offset:524 Expected:2 Got:4
61625 Shift 0, v:4 sv:4 min:2 cur:2
61626 Out of Range Block::1 bo:13 Volume offset:525 Expected:2 Got:4
61627 Shift 0, v:4 sv:4 min:2 cur:2
61628 Out of Range Block::1 bo:14 Volume offset:526 Expected:2 Got:4
61629 Shift 0, v:4 sv:4 min:2 cur:2
61630 Out of Range Block::1 bo:15 Volume offset:527 Expected:2 Got:4
61631 Shift 0, v:4 sv:4 min:2 cur:2
61632 Out of Range Block::1 bo:16 Volume offset:528 Expected:2 Got:4
61633 Shift 0, v:4 sv:4 min:2 cur:2
61634 Out of Range Block::1 bo:17 Volume offset:529 Expected:2 Got:4
61635 Shift 0, v:4 sv:4 min:2 cur:2
61636 Out of Range Block::1 bo:18 Volume offset:530 Expected:2 Got:4
61637 Shift 0, v:4 sv:4 min:2 cur:2
61638 Out of Range Block::1 bo:19 Volume offset:531 Expected:2 Got:4
61639 Shift 0, v:4 sv:4 min:2 cur:2
61640 Out of Range Block::1 bo:20 Volume offset:532 Expected:2 Got:4
61641 Shift 0, v:4 sv:4 min:2 cur:2
61642 Out of Range Block::1 bo:21 Volume offset:533 Expected:2 Got:4
61643 Shift 0, v:4 sv:4 min:2 cur:2
61644 Out of Range Block::1 bo:22 Volume offset:534 Expected:2 Got:4
61645 Shift 0, v:4 sv:4 min:2 cur:2
61646 Out of Range Block::1 bo:23 Volume offset:535 Expected:2 Got:4
61647 Shift 0, v:4 sv:4 min:2 cur:2
61648 Out of Range Block::1 bo:24 Volume offset:536 Expected:2 Got:4
61649 Shift 0, v:4 sv:4 min:2 cur:2
61650 Out of Range Block::1 bo:25 Volume offset:537 Expected:2 Got:4
61651 Shift 0, v:4 sv:4 min:2 cur:2
61652 Out of Range Block::1 bo:26 Volume offset:538 Expected:2 Got:4
61653 Shift 0, v:4 sv:4 min:2 cur:2
61654 Out of Range Block::1 bo:27 Volume offset:539 Expected:2 Got:4
61655 Shift 0, v:4 sv:4 min:2 cur:2
61656 Out of Range Block::1 bo:28 Volume offset:540 Expected:2 Got:4
61657 Shift 0, v:4 sv:4 min:2 cur:2
61658 Out of Range Block::1 bo:29 Volume offset:541 Expected:2 Got:4
61659 Shift 0, v:4 sv:4 min:2 cur:2
61660 Out of Range Block::1 bo:30 Volume offset:542 Expected:2 Got:4
61661 Shift 0, v:4 sv:4 min:2 cur:2
61662 Out of Range Block::1 bo:31 Volume offset:543 Expected:2 Got:4
61663 Shift 0, v:4 sv:4 min:2 cur:2
61664 Out of Range Block::1 bo:32 Volume offset:544 Expected:2 Got:4
61665 Shift 0, v:4 sv:4 min:2 cur:2
61666 Out of Range Block::1 bo:33 Volume offset:545 Expected:2 Got:4
61667 Shift 0, v:4 sv:4 min:2 cur:2
61668 Out of Range Block::1 bo:34 Volume offset:546 Expected:2 Got:4
61669 Shift 0, v:4 sv:4 min:2 cur:2
61670 Out of Range Block::1 bo:35 Volume offset:547 Expected:2 Got:4
61671 Shift 0, v:4 sv:4 min:2 cur:2
61672 Out of Range Block::1 bo:36 Volume offset:548 Expected:2 Got:4
61673 Shift 0, v:4 sv:4 min:2 cur:2
61674 Out of Range Block::1 bo:37 Volume offset:549 Expected:2 Got:4
61675 Shift 0, v:4 sv:4 min:2 cur:2
61676 Out of Range Block::1 bo:38 Volume offset:550 Expected:2 Got:4
61677 Shift 0, v:4 sv:4 min:2 cur:2
61678 Out of Range Block::1 bo:39 Volume offset:551 Expected:2 Got:4
61679 Shift 0, v:4 sv:4 min:2 cur:2
61680 Out of Range Block::1 bo:40 Volume offset:552 Expected:2 Got:4
61681 Shift 0, v:4 sv:4 min:2 cur:2
61682 Out of Range Block::1 bo:41 Volume offset:553 Expected:2 Got:4
61683 Shift 0, v:4 sv:4 min:2 cur:2
61684 Out of Range Block::1 bo:42 Volume offset:554 Expected:2 Got:4
61685 Shift 0, v:4 sv:4 min:2 cur:2
61686 Out of Range Block::1 bo:43 Volume offset:555 Expected:2 Got:4
61687 Shift 0, v:4 sv:4 min:2 cur:2
61688 Out of Range Block::1 bo:44 Volume offset:556 Expected:2 Got:4
61689 Shift 0, v:4 sv:4 min:2 cur:2
61690 Out of Range Block::1 bo:45 Volume offset:557 Expected:2 Got:4
61691 okShift 0, v:4 sv:4 min:2 cur:2
61692 Out of Range Block::1 bo:46 Volume offset:558 Expected:2 Got:4
61693 Shift 0, v:4 sv:4 min:2 cur:2
61694 Out of Range Block::1 bo:47 Volume offset:559 Expected:2 Got:4
61695 Shift 0, v:4 sv:4 min:2 cur:2
61696 Out of Range Block::1 bo:48 Volume offset:560 Expected:2 Got:4
61697 Shift 0, v:4 sv:4 min:2 cur:2
61698 Out of Range Block::1 bo:49 Volume offset:561 Expected:2 Got:4
61699 Shift 0, v:4 sv:4 min:2 cur:2
61700 Out of Range Block::1 bo:50 Volume offset:562 Expected:2 Got:4
61701 Shift 0, v:4 sv:4 min:2 cur:2
61702 Out of Range Block::1 bo:51 Volume offset:563 Expected:2 Got:4
61703 Shift 0, v:4 sv:4 min:2 cur:2
61704 Out of Range Block::1 bo:52 Volume offset:564 Expected:2 Got:4
61705 Shift 0, v:4 sv:4 min:2 cur:2
61706 
61707 Out of Range Block::1 bo:53 Volume offset:565 Expected:2 Got:4
61708 Shift 0, v:4 sv:4 min:2 cur:2
61709 Out of Range Block::1 bo:54 Volume offset:566 Expected:2 Got:4
61710 Shift 0, v:4 sv:4 min:2 cur:2
61711 Out of Range Block::1 bo:55 Volume offset:567 Expected:2 Got:4
61712 Shift 0, v:4 sv:4 min:2 cur:2
61713 Out of Range Block::1 bo:56 Volume offset:568 Expected:2 Got:4
61714 Shift 0, v:4 sv:4 min:2 cur:2
61715 Out of Range Block::1 bo:57 Volume offset:569 Expected:2 Got:4
61716 Shift 0, v:4 sv:4 min:2 cur:2
61717 Out of Range Block::1 bo:58 Volume offset:570 Expected:2 Got:4
61718 Shift 0, v:4 sv:4 min:2 cur:2
61719 Out of Range Block::1 bo:59 Volume offset:571 Expected:2 Got:4
61720 Shift 0, v:4 sv:4 min:2 cur:2
61721 Out of Range Block::1 bo:60 Volume offset:572 Expected:2 Got:4
61722 Shift 0, v:4 sv:4 min:2 cur:2
61723 Out of Range Block::1 bo:61 Volume offset:573 Expected:2 Got:4
61724 Shift 0, v:4 sv:4 min:2 cur:2
61725 Out of Range Block::1 bo:62 Volume offset:574 Expected:2 Got:4
61726 Shift 0, v:4 sv:4 min:2 cur:2
61727 Out of Range Block::1 bo:63 Volume offset:575 Expected:2 Got:4
61728 Shift 0, v:4 sv:4 min:2 cur:2
61729 Out of Range Block::1 bo:64 Volume offset:576 Expected:2 Got:4
61730 Shift 0, v:4 sv:4 min:2 cur:2
61731 Out of Range Block::1 bo:65 Volume offset:577 Expected:2 Got:4
61732 Shift 0, v:4 sv:4 min:2 cur:2
61733 Out of Range Block::1 bo:66 Volume offset:578 Expected:2 Got:4
61734 Shift 0, v:4 sv:4 min:2 cur:2
61735 Out of Range Block::1 bo:67 Volume offset:579 Expected:2 Got:4
61736 Shift 0, v:4 sv:4 min:2 cur:2
61737 Out of Range Block::1 bo:68 Volume offset:580 Expected:2 Got:4
61738 Shift 0, v:4 sv:4 min:2 cur:2
61739 Out of Range Block::1 bo:69 Volume offset:581 Expected:2 Got:4
61740 Shift 0, v:4 sv:4 min:2 cur:2
61741 Out of Range Block::1 bo:70 Volume offset:582 Expected:2 Got:4
61742 Shift 0, v:4 sv:4 min:2 cur:2
61743 Out of Range Block::1 bo:71 Volume offset:583 Expected:2 Got:4
61744 Shift 0, v:4 sv:4 min:2 cur:2
61745 Out of Range Block::1 bo:72 Volume offset:584 Expected:2 Got:4
61746 Shift 0, v:4 sv:4 min:2 cur:2
61747 Out of Range Block::1 bo:73 Volume offset:585 Expected:2 Got:4
61748 Shift 0, v:4 sv:4 min:2 cur:2
61749 Out of Range Block::1 bo:74 Volume offset:586 Expected:2 Got:4
61750 Shift 0, v:4 sv:4 min:2 cur:2
61751 Out of Range Block::1 bo:75 Volume offset:587 Expected:2 Got:4
61752 Shift 0, v:4 sv:4 min:2 cur:2
61753 Out of Range Block::1 bo:76 Volume offset:588 Expected:2 Got:4
61754 Shift 0, v:4 sv:4 min:2 cur:2
61755 Out of Range Block::1 bo:77 Volume offset:589 Expected:2 Got:4
61756 Shift 0, v:4 sv:4 min:2 cur:2
61757 Out of Range Block::1 bo:78 Volume offset:590 Expected:2 Got:4
61758 Shift 0, v:4 sv:4 min:2 cur:2
61759 Out of Range Block::1 bo:79 Volume offset:591 Expected:2 Got:4
61760 Shift 0, v:4 sv:4 min:2 cur:2
61761 Out of Range Block::1 bo:80 Volume offset:592 Expected:2 Got:4
61762 Shift 0, v:4 sv:4 min:2 cur:2
61763 Out of Range Block::1 bo:81 Volume offset:593 Expected:2 Got:4
61764 Shift 0, v:4 sv:4 min:2 cur:2
61765 Out of Range Block::1 bo:82 Volume offset:594 Expected:2 Got:4
61766 Shift 0, v:4 sv:4 min:2 cur:2
61767 Out of Range Block::1 bo:83 Volume offset:595 Expected:2 Got:4
61768 Shift 0, v:4 sv:4 min:2 cur:2
61769 Out of Range Block::1 bo:84 Volume offset:596 Expected:2 Got:4
61770 Shift 0, v:4 sv:4 min:2 cur:2
61771 Out of Range Block::1 bo:85 Volume offset:597 Expected:2 Got:4
61772 Shift 0, v:4 sv:4 min:2 cur:2
61773 Out of Range Block::1 bo:86 Volume offset:598 Expected:2 Got:4
61774 Shift 0, v:4 sv:4 min:2 cur:2
61775 Out of Range Block::1 bo:87 Volume offset:599 Expected:2 Got:4
61776 Shift 0, v:4 sv:4 min:2 cur:2
61777 Out of Range Block::1 bo:88 Volume offset:600 Expected:2 Got:4
61778 Shift 0, v:4 sv:4 min:2 cur:2
61779 Out of Range Block::1 bo:89 Volume offset:601 Expected:2 Got:4
61780 Shift 0, v:4 sv:4 min:2 cur:2
61781 Out of Range Block::1 bo:90 Volume offset:602 Expected:2 Got:4
61782 Shift 0, v:4 sv:4 min:2 cur:2
61783 Out of Range Block::1 bo:91 Volume offset:603 Expected:2 Got:4
61784 Shift 0, v:4 sv:4 min:2 cur:2
61785 Out of Range Block::1 bo:92 Volume offset:604 Expected:2 Got:4
61786 Shift 0, v:4 sv:4 min:2 cur:2
61787 Out of Range Block::1 bo:93 Volume offset:605 Expected:2 Got:4
61788 Shift 0, v:4 sv:4 min:2 cur:2
61789 Out of Range Block::1 bo:94 Volume offset:606 Expected:2 Got:4
61790 Shift 0, v:4 sv:4 min:2 cur:2
61791 Out of Range Block::1 bo:95 Volume offset:607 Expected:2 Got:4
61792 Shift 0, v:4 sv:4 min:2 cur:2
61793 Out of Range Block::1 bo:96 Volume offset:608 Expected:2 Got:4
61794 Shift 0, v:4 sv:4 min:2 cur:2
61795 Out of Range Block::1 bo:97 Volume offset:609 Expected:2 Got:4
61796 Shift 0, v:4 sv:4 min:2 cur:2
61797 Out of Range Block::1 bo:98 Volume offset:610 Expected:2 Got:4
61798 Shift 0, v:4 sv:4 min:2 cur:2
61799 Out of Range Block::1 bo:99 Volume offset:611 Expected:2 Got:4
61800 Shift 0, v:4 sv:4 min:2 cur:2
61801 Out of Range Block::1 bo:100 Volume offset:612 Expected:2 Got:4
61802 Shift 0, v:4 sv:4 min:2 cur:2
61803 Out of Range Block::1 bo:101 Volume offset:613 Expected:2 Got:4
61804 Shift 0, v:4 sv:4 min:2 cur:2
61805 Out of Range Block::1 bo:102 Volume offset:614 Expected:2 Got:4
61806 Shift 0, v:4 sv:4 min:2 cur:2
61807 Out of Range Block::1 bo:103 Volume offset:615 Expected:2 Got:4
61808 Shift 0, v:4 sv:4 min:2 cur:2
61809 Out of Range Block::1 bo:104 Volume offset:616 Expected:2 Got:4
61810 Shift 0, v:4 sv:4 min:2 cur:2
61811 Out of Range Block::1 bo:105 Volume offset:617 Expected:2 Got:4
61812 Shift 0, v:4 sv:4 min:2 cur:2
61813 Out of Range Block::1 bo:106 Volume offset:618 Expected:2 Got:4
61814 Shift 0, v:4 sv:4 min:2 cur:2
61815 Out of Range Block::1 bo:107 Volume offset:619 Expected:2 Got:4
61816 Shift 0, v:4 sv:4 min:2 cur:2
61817 Out of Range Block::1 bo:108 Volume offset:620 Expected:2 Got:4
61818 Shift 0, v:4 sv:4 min:2 cur:2
61819 Out of Range Block::1 bo:109 Volume offset:621 Expected:2 Got:4
61820 Shift 0, v:4 sv:4 min:2 cur:2
61821 Out of Range Block::1 bo:110 Volume offset:622 Expected:2 Got:4
61822 Shift 0, v:4 sv:4 min:2 cur:2
61823 Out of Range Block::1 bo:111 Volume offset:623 Expected:2 Got:4
61824 Shift 0, v:4 sv:4 min:2 cur:2
61825 Out of Range Block::1 bo:112 Volume offset:624 Expected:2 Got:4
61826 Shift 0, v:4 sv:4 min:2 cur:2
61827 Out of Range Block::1 bo:113 Volume offset:625 Expected:2 Got:4
61828 Shift 0, v:4 sv:4 min:2 cur:2
61829 Out of Range Block::1 bo:114 Volume offset:626 Expected:2 Got:4
61830 Shift 0, v:4 sv:4 min:2 cur:2
61831 Out of Range Block::1 bo:115 Volume offset:627 Expected:2 Got:4
61832 Shift 0, v:4 sv:4 min:2 cur:2
61833 Out of Range Block::1 bo:116 Volume offset:628 Expected:2 Got:4
61834 Shift 0, v:4 sv:4 min:2 cur:2
61835 Out of Range Block::1 bo:117 Volume offset:629 Expected:2 Got:4
61836 Shift 0, v:4 sv:4 min:2 cur:2
61837 Out of Range Block::1 bo:118 Volume offset:630 Expected:2 Got:4
61838 Shift 0, v:4 sv:4 min:2 cur:2
61839 Out of Range Block::1 bo:119 Volume offset:631 Expected:2 Got:4
61840 Shift 0, v:4 sv:4 min:2 cur:2
61841 Out of Range Block::1 bo:120 Volume offset:632 Expected:2 Got:4
61842 Shift 0, v:4 sv:4 min:2 cur:2
61843 Out of Range Block::1 bo:121 Volume offset:633 Expected:2 Got:4
61844 Shift 0, v:4 sv:4 min:2 cur:2
61845 Out of Range Block::1 bo:122 Volume offset:634 Expected:2 Got:4
61846 Shift 0, v:4 sv:4 min:2 cur:2
61847 Out of Range Block::1 bo:123 Volume offset:635 Expected:2 Got:4
61848 Shift 0, v:4 sv:4 min:2 cur:2
61849 Out of Range Block::1 bo:124 Volume offset:636 Expected:2 Got:4
61850 Shift 0, v:4 sv:4 min:2 cur:2
61851 Out of Range Block::1 bo:125 Volume offset:637 Expected:2 Got:4
61852 Shift 0, v:4 sv:4 min:2 cur:2
61853 Out of Range Block::1 bo:126 Volume offset:638 Expected:2 Got:4
61854 Shift 0, v:4 sv:4 min:2 cur:2
61855 Out of Range Block::1 bo:127 Volume offset:639 Expected:2 Got:4
61856 Shift 0, v:4 sv:4 min:2 cur:2
61857 Out of Range Block::1 bo:128 Volume offset:640 Expected:2 Got:4
61858 Shift 0, v:4 sv:4 min:2 cur:2
61859 Out of Range Block::1 bo:129 Volume offset:641 Expected:2 Got:4
61860 Shift 0, v:4 sv:4 min:2 cur:2
61861 Out of Range Block::1 bo:130 Volume offset:642 Expected:2 Got:4
61862 Shift 0, v:4 sv:4 min:2 cur:2
61863 Out of Range Block::1 bo:131 Volume offset:643 Expected:2 Got:4
61864 Shift 0, v:4 sv:4 min:2 cur:2
61865 Out of Range Block::1 bo:132 Volume offset:644 Expected:2 Got:4
61866 Shift 0, v:4 sv:4 min:2 cur:2
61867 Out of Range Block::1 bo:133 Volume offset:645 Expected:2 Got:4
61868 Shift 0, v:4 sv:4 min:2 cur:2
61869 Out of Range Block::1 bo:134 Volume offset:646 Expected:2 Got:4
61870 Shift 0, v:4 sv:4 min:2 cur:2
61871 Out of Range Block::1 bo:135 Volume offset:647 Expected:2 Got:4
61872 Shift 0, v:4 sv:4 min:2 cur:2
61873 Out of Range Block::1 bo:136 Volume offset:648 Expected:2 Got:4
61874 Shift 0, v:4 sv:4 min:2 cur:2
61875 Out of Range Block::1 bo:137 Volume offset:649 Expected:2 Got:4
61876 Shift 0, v:4 sv:4 min:2 cur:2
61877 Out of Range Block::1 bo:138 Volume offset:650 Expected:2 Got:4
61878 Shift 0, v:4 sv:4 min:2 cur:2
61879 Out of Range Block::1 bo:139 Volume offset:651 Expected:2 Got:4
61880 Shift 0, v:4 sv:4 min:2 cur:2
61881 Out of Range Block::1 bo:140 Volume offset:652 Expected:2 Got:4
61882 Shift 0, v:4 sv:4 min:2 cur:2
61883 Out of Range Block::1 bo:141 Volume offset:653 Expected:2 Got:4
61884 Shift 0, v:4 sv:4 min:2 cur:2
61885 Out of Range Block::1 bo:142 Volume offset:654 Expected:2 Got:4
61886 Shift 0, v:4 sv:4 min:2 cur:2
61887 Out of Range Block::1 bo:143 Volume offset:655 Expected:2 Got:4
61888 Shift 0, v:4 sv:4 min:2 cur:2
61889 Out of Range Block::1 bo:144 Volume offset:656 Expected:2 Got:4
61890 Shift 0, v:4 sv:4 min:2 cur:2
61891 Out of Range Block::1 bo:145 Volume offset:657 Expected:2 Got:4
61892 Shift 0, v:4 sv:4 min:2 cur:2
61893 Out of Range Block::1 bo:146 Volume offset:658 Expected:2 Got:4
61894 Shift 0, v:4 sv:4 min:2 cur:2
61895 Out of Range Block::1 bo:147 Volume offset:659 Expected:2 Got:4
61896 Shift 0, v:4 sv:4 min:2 cur:2
61897 Out of Range Block::1 bo:148 Volume offset:660 Expected:2 Got:4
61898 Shift 0, v:4 sv:4 min:2 cur:2
61899 Out of Range Block::1 bo:149 Volume offset:661 Expected:2 Got:4
61900 Shift 0, v:4 sv:4 min:2 cur:2
61901 Out of Range Block::1 bo:150 Volume offset:662 Expected:2 Got:4
61902 Shift 0, v:4 sv:4 min:2 cur:2
61903 Out of Range Block::1 bo:151 Volume offset:663 Expected:2 Got:4
61904 Shift 0, v:4 sv:4 min:2 cur:2
61905 Out of Range Block::1 bo:152 Volume offset:664 Expected:2 Got:4
61906 Shift 0, v:4 sv:4 min:2 cur:2
61907 Out of Range Block::1 bo:153 Volume offset:665 Expected:2 Got:4
61908 Shift 0, v:4 sv:4 min:2 cur:2
61909 Out of Range Block::1 bo:154 Volume offset:666 Expected:2 Got:4
61910 Shift 0, v:4 sv:4 min:2 cur:2
61911 Out of Range Block::1 bo:155 Volume offset:667 Expected:2 Got:4
61912 Shift 0, v:4 sv:4 min:2 cur:2
61913 test test::test_wl_update_commit_rollover ... Out of Range Block::1 bo:156 Volume offset:668 Expected:2 Got:4
61914 Shift 0, v:4 sv:4 min:2 cur:2
61915 Out of Range Block::1 bo:157 Volume offset:669 Expected:2 Got:4
61916 Shift 0, v:4 sv:4 min:2 cur:2
61917 Out of Range Block::1 bo:158 Volume offset:670 Expected:2 Got:4
61918 Shift 0, v:4 sv:4 min:2 cur:2
61919 Out of Range Block::1 bo:159 Volume offset:671 Expected:2 Got:4
61920 Shift 0, v:4 sv:4 min:2 cur:2
61921 Out of Range Block::1 bo:160 Volume offset:672 Expected:2 Got:4
61922 Shift 0, v:4 sv:4 min:2 cur:2
61923 Out of Range Block::1 bo:161 Volume offset:673 Expected:2 Got:4
61924 Shift 0, v:4 sv:4 min:2 cur:2
61925 Out of Range Block::1 bo:162 Volume offset:674 Expected:2 Got:4
61926 Shift 0, v:4 sv:4 min:2 cur:2
61927 Out of Range Block::1 bo:163 Volume offset:675 Expected:2 Got:4
61928 Shift 0, v:4 sv:4 min:2 cur:2
61929 Out of Range Block::1 bo:164 Volume offset:676 Expected:2 Got:4
61930 Shift 0, v:4 sv:4 min:2 cur:2
61931 Out of Range Block::1 bo:165 Volume offset:677 Expected:2 Got:4
61932 Shift 0, v:4 sv:4 min:2 cur:2
61933 Out of Range Block::1 bo:166 Volume offset:678 Expected:2 Got:4
61934 Shift 0, v:4 sv:4 min:2 cur:2
61935 Out of Range Block::1 bo:167 Volume offset:679 Expected:2 Got:4
61936 Shift 0, v:4 sv:4 min:2 cur:2
61937 Out of Range Block::1 bo:168 Volume offset:680 Expected:2 Got:4
61938 Shift 0, v:4 sv:4 min:2 cur:2
61939 Out of Range Block::1 bo:169 Volume offset:681 Expected:2 Got:4
61940 Shift 0, v:4 sv:4 min:2 cur:2
61941 Out of Range Block::1 bo:170 Volume offset:682 Expected:2 Got:4
61942 Shift 0, v:4 sv:4 min:2 cur:2
61943 Out of Range Block::1 bo:171 Volume offset:683 Expected:2 Got:4
61944 Shift 0, v:4 sv:4 min:2 cur:2
61945 Out of Range Block::1 bo:172 Volume offset:684 Expected:2 Got:4
61946 Shift 0, v:4 sv:4 min:2 cur:2
61947 Out of Range Block::1 bo:173 Volume offset:685 Expected:2 Got:4
61948 okShift 0, v:4 sv:4 min:2 cur:2
61949 Out of Range Block::1 bo:174 Volume offset:686 Expected:2 Got:4
61950 Shift 0, v:4 sv:4 min:2 cur:2
61951 Out of Range Block::1 bo:175 Volume offset:687 Expected:2 Got:4
61952 Shift 0, v:4 sv:4 min:2 cur:2
61953 Out of Range Block::1 bo:176 Volume offset:688 Expected:2 Got:4
61954 Shift 0, v:4 sv:4 min:2 cur:2
61955 Out of Range Block::1 bo:177 Volume offset:689 Expected:2 Got:4
61956 Shift 0, v:4 sv:4 min:2 cur:2
61957 Out of Range Block::1 bo:178 Volume offset:690 Expected:2 Got:4
61958 Shift 0, v:4 sv:4 min:2 cur:2
61959 Out of Range Block::1 bo:179 Volume offset:691 Expected:2 Got:4
61960 Shift 0, v:4 sv:4 min:2 cur:2
61961 Out of Range Block::1 bo:180 Volume offset:692 Expected:2 Got:4
61962 Shift 0, v:4 sv:4 min:2 cur:2
61963 Out of Range Block::1 bo:181 Volume offset:693 Expected:2 Got:4
61964 Shift 0, v:4 sv:4 min:2 cur:2
61965 Out of Range Block::1 bo:182 Volume offset:694 Expected:2 Got:4
61966 Shift 0, v:4 sv:4 min:2 cur:2
61967 Out of Range Block::1 bo:183 Volume offset:695 Expected:2 Got:4
61968 Shift 0, v:4 sv:4 min:2 cur:2
61969 Out of Range Block::1 bo:184 Volume offset:696 Expected:2 Got:4
61970 Shift 0, v:4 sv:4 min:2 cur:2
61971 Out of Range Block::1 bo:185 Volume offset:697 Expected:2 Got:4
61972 Shift 0, v:4 sv:4 min:2 cur:2
61973 Out of Range Block::1 bo:186 Volume offset:698 Expected:2 Got:4
61974 Shift 0, v:4 sv:4 min:2 cur:2
61975 Out of Range Block::1 bo:187 Volume offset:699 Expected:2 Got:4
61976 Shift 0, v:4 sv:4 min:2 cur:2
61977 Out of Range Block::1 bo:188 Volume offset:700 Expected:2 Got:4
61978 Shift 0, v:4 sv:4 min:2 cur:2
61979 Out of Range Block::1 bo:189 Volume offset:701 Expected:2 Got:4
61980 Shift 0, v:4 sv:4 min:2 cur:2
61981 Out of Range Block::1 bo:190 Volume offset:702 Expected:2 Got:4
61982 Shift 0, v:4 sv:4 min:2 cur:2
61983 Out of Range Block::1 bo:191 Volume offset:703 Expected:2 Got:4
61984 Shift 0, v:4 sv:4 min:2 cur:2
61985 Out of Range Block::1 bo:192 Volume offset:704 Expected:2 Got:4
61986 Shift 0, v:4 sv:4 min:2 cur:2
61987 Out of Range Block::1 bo:193 Volume offset:705 Expected:2 Got:4
61988 Shift 0, v:4 sv:4 min:2 cur:2
61989 Out of Range Block::1 bo:194 Volume offset:706 Expected:2 Got:4
61990 Shift 0, v:4 sv:4 min:2 cur:2
61991 Out of Range Block::1 bo:195 Volume offset:707 Expected:2 Got:4
61992 Shift 0, v:4 sv:4 min:2 cur:2
61993 Out of Range Block::1 bo:196 Volume offset:708 Expected:2 Got:4
61994 Shift 0, v:4 sv:4 min:2 cur:2
61995 Out of Range Block::1 bo:197 Volume offset:709 Expected:2 Got:4
61996 Shift 0, v:4 sv:4 min:2 cur:2
61997 Out of Range Block::1 bo:198 Volume offset:710 Expected:2 Got:4
61998 Shift 0, v:4 sv:4 min:2 cur:2
61999 Out of Range Block::1 bo:199 Volume offset:711 Expected:2 Got:4
62000 Shift 0, v:4 sv:4 min:2 cur:2
62001 Out of Range Block::1 bo:200 Volume offset:712 Expected:2 Got:4
62002 Shift 0, v:4 sv:4 min:2 cur:2
62003 Out of Range Block::1 bo:201 Volume offset:713 Expected:2 Got:4
62004 Shift 0, v:4 sv:4 min:2 cur:2
62005 Out of Range Block::1 bo:202 Volume offset:714 Expected:2 Got:4
62006 Shift 0, v:4 sv:4 min:2 cur:2
62007 Out of Range Block::1 bo:203 Volume offset:715 Expected:2 Got:4
62008 Shift 0, v:4 sv:4 min:2 cur:2
62009 Out of Range Block::1 bo:204 Volume offset:716 Expected:2 Got:4
62010 Shift 0, v:4 sv:4 min:2 cur:2
62011 Out of Range Block::1 bo:205 Volume offset:717 Expected:2 Got:4
62012 Shift 0, v:4 sv:4 min:2 cur:2
62013 Out of Range Block::1 bo:206 Volume offset:718 Expected:2 Got:4
62014 Shift 0, v:4 sv:4 min:2 cur:2
62015 Out of Range Block::1 bo:207 Volume offset:719 Expected:2 Got:4
62016 Shift 0, v:4 sv:4 min:2 cur:2
62017 Out of Range Block::1 bo:208 Volume offset:720 Expected:2 Got:4
62018 Shift 0, v:4 sv:4 min:2 cur:2
62019 
62020 Out of Range Block::1 bo:209 Volume offset:721 Expected:2 Got:4
62021 Shift 0, v:4 sv:4 min:2 cur:2
62022 Out of Range Block::1 bo:210 Volume offset:722 Expected:2 Got:4
62023 Shift 0, v:4 sv:4 min:2 cur:2
62024 Out of Range Block::1 bo:211 Volume offset:723 Expected:2 Got:4
62025 Shift 0, v:4 sv:4 min:2 cur:2
62026 Out of Range Block::1 bo:212 Volume offset:724 Expected:2 Got:4
62027 Shift 0, v:4 sv:4 min:2 cur:2
62028 Out of Range Block::1 bo:213 Volume offset:725 Expected:2 Got:4
62029 Shift 0, v:4 sv:4 min:2 cur:2
62030 Out of Range Block::1 bo:214 Volume offset:726 Expected:2 Got:4
62031 Shift 0, v:4 sv:4 min:2 cur:2
62032 Out of Range Block::1 bo:215 Volume offset:727 Expected:2 Got:4
62033 Shift 0, v:4 sv:4 min:2 cur:2
62034 Out of Range Block::1 bo:216 Volume offset:728 Expected:2 Got:4
62035 Shift 0, v:4 sv:4 min:2 cur:2
62036 Out of Range Block::1 bo:217 Volume offset:729 Expected:2 Got:4
62037 Shift 0, v:4 sv:4 min:2 cur:2
62038 Out of Range Block::1 bo:218 Volume offset:730 Expected:2 Got:4
62039 Shift 0, v:4 sv:4 min:2 cur:2
62040 Out of Range Block::1 bo:219 Volume offset:731 Expected:2 Got:4
62041 Shift 0, v:4 sv:4 min:2 cur:2
62042 Out of Range Block::1 bo:220 Volume offset:732 Expected:2 Got:4
62043 Shift 0, v:4 sv:4 min:2 cur:2
62044 Out of Range Block::1 bo:221 Volume offset:733 Expected:2 Got:4
62045 Shift 0, v:4 sv:4 min:2 cur:2
62046 Out of Range Block::1 bo:222 Volume offset:734 Expected:2 Got:4
62047 Shift 0, v:4 sv:4 min:2 cur:2
62048 Out of Range Block::1 bo:223 Volume offset:735 Expected:2 Got:4
62049 Shift 0, v:4 sv:4 min:2 cur:2
62050 Out of Range Block::1 bo:224 Volume offset:736 Expected:2 Got:4
62051 Shift 0, v:4 sv:4 min:2 cur:2
62052 Out of Range Block::1 bo:225 Volume offset:737 Expected:2 Got:4
62053 Shift 0, v:4 sv:4 min:2 cur:2
62054 Out of Range Block::1 bo:226 Volume offset:738 Expected:2 Got:4
62055 Shift 0, v:4 sv:4 min:2 cur:2
62056 Out of Range Block::1 bo:227 Volume offset:739 Expected:2 Got:4
62057 Shift 0, v:4 sv:4 min:2 cur:2
62058 Out of Range Block::1 bo:228 Volume offset:740 Expected:2 Got:4
62059 Shift 0, v:4 sv:4 min:2 cur:2
62060 Out of Range Block::1 bo:229 Volume offset:741 Expected:2 Got:4
62061 Shift 0, v:4 sv:4 min:2 cur:2
62062 Out of Range Block::1 bo:230 Volume offset:742 Expected:2 Got:4
62063 Shift 0, v:4 sv:4 min:2 cur:2
62064 Out of Range Block::1 bo:231 Volume offset:743 Expected:2 Got:4
62065 Shift 0, v:4 sv:4 min:2 cur:2
62066 Out of Range Block::1 bo:232 Volume offset:744 Expected:2 Got:4
62067 Shift 0, v:4 sv:4 min:2 cur:2
62068 Out of Range Block::1 bo:233 Volume offset:745 Expected:2 Got:4
62069 Shift 0, v:4 sv:4 min:2 cur:2
62070 Out of Range Block::1 bo:234 Volume offset:746 Expected:2 Got:4
62071 Shift 0, v:4 sv:4 min:2 cur:2
62072 Out of Range Block::1 bo:235 Volume offset:747 Expected:2 Got:4
62073 Shift 0, v:4 sv:4 min:2 cur:2
62074 Out of Range Block::1 bo:236 Volume offset:748 Expected:2 Got:4
62075 Shift 0, v:4 sv:4 min:2 cur:2
62076 Out of Range Block::1 bo:237 Volume offset:749 Expected:2 Got:4
62077 Shift 0, v:4 sv:4 min:2 cur:2
62078 Out of Range Block::1 bo:238 Volume offset:750 Expected:2 Got:4
62079 Shift 0, v:4 sv:4 min:2 cur:2
62080 Out of Range Block::1 bo:239 Volume offset:751 Expected:2 Got:4
62081 Shift 0, v:4 sv:4 min:2 cur:2
62082 Out of Range Block::1 bo:240 Volume offset:752 Expected:2 Got:4
62083 Shift 0, v:4 sv:4 min:2 cur:2
62084 Out of Range Block::1 bo:241 Volume offset:753 Expected:2 Got:4
62085 Shift 0, v:4 sv:4 min:2 cur:2
62086 Out of Range Block::1 bo:242 Volume offset:754 Expected:2 Got:4
62087 Shift 0, v:4 sv:4 min:2 cur:2
62088 Out of Range Block::1 bo:243 Volume offset:755 Expected:2 Got:4
62089 Shift 0, v:4 sv:4 min:2 cur:2
62090 Out of Range Block::1 bo:244 Volume offset:756 Expected:2 Got:4
62091 Shift 0, v:4 sv:4 min:2 cur:2
62092 Out of Range Block::1 bo:245 Volume offset:757 Expected:2 Got:4
62093 Shift 0, v:4 sv:4 min:2 cur:2
62094 Out of Range Block::1 bo:246 Volume offset:758 Expected:2 Got:4
62095 Shift 0, v:4 sv:4 min:2 cur:2
62096 Out of Range Block::1 bo:247 Volume offset:759 Expected:2 Got:4
62097 Shift 0, v:4 sv:4 min:2 cur:2
62098 Out of Range Block::1 bo:248 Volume offset:760 Expected:2 Got:4
62099 Shift 0, v:4 sv:4 min:2 cur:2
62100 Out of Range Block::1 bo:249 Volume offset:761 Expected:2 Got:4
62101 Shift 0, v:4 sv:4 min:2 cur:2
62102 Out of Range Block::1 bo:250 Volume offset:762 Expected:2 Got:4
62103 Shift 0, v:4 sv:4 min:2 cur:2
62104 Out of Range Block::1 bo:251 Volume offset:763 Expected:2 Got:4
62105 Shift 0, v:4 sv:4 min:2 cur:2
62106 Out of Range Block::1 bo:252 Volume offset:764 Expected:2 Got:4
62107 Shift 0, v:4 sv:4 min:2 cur:2
62108 Out of Range Block::1 bo:253 Volume offset:765 Expected:2 Got:4
62109 Shift 0, v:4 sv:4 min:2 cur:2
62110 Out of Range Block::1 bo:254 Volume offset:766 Expected:2 Got:4
62111 Shift 0, v:4 sv:4 min:2 cur:2
62112 Out of Range Block::1 bo:255 Volume offset:767 Expected:2 Got:4
62113 Shift 0, v:4 sv:4 min:2 cur:2
62114 Out of Range Block::1 bo:256 Volume offset:768 Expected:2 Got:4
62115 Shift 0, v:4 sv:4 min:2 cur:2
62116 Out of Range Block::1 bo:257 Volume offset:769 Expected:2 Got:4
62117 Shift 0, v:4 sv:4 min:2 cur:2
62118 Out of Range Block::1 bo:258 Volume offset:770 Expected:2 Got:4
62119 Shift 0, v:4 sv:4 min:2 cur:2
62120 Out of Range Block::1 bo:259 Volume offset:771 Expected:2 Got:4
62121 Shift 0, v:4 sv:4 min:2 cur:2
62122 test test::test_wl_update_rollover ... Out of Range Block::1 bo:260 Volume offset:772 Expected:2 Got:4
62123 Shift 0, v:4 sv:4 min:2 cur:2
62124 Out of Range Block::1 bo:261 Volume offset:773 Expected:2 Got:4
62125 Shift 0, v:4 sv:4 min:2 cur:2
62126 Out of Range Block::1 bo:262 Volume offset:774 Expected:2 Got:4
62127 Shift 0, v:4 sv:4 min:2 cur:2
62128 Out of Range Block::1 bo:263 Volume offset:775 Expected:2 Got:4
62129 Shift 0, v:4 sv:4 min:2 cur:2
62130 Out of Range Block::1 bo:264 Volume offset:776 Expected:2 Got:4
62131 Shift 0, v:4 sv:4 min:2 cur:2
62132 Out of Range Block::1 bo:265 Volume offset:777 Expected:2 Got:4
62133 Shift 0, v:4 sv:4 min:2 cur:2
62134 Out of Range Block::1 bo:266 Volume offset:778 Expected:2 Got:4
62135 Shift 0, v:4 sv:4 min:2 cur:2
62136 Out of Range Block::1 bo:267 Volume offset:779 Expected:2 Got:4
62137 Shift 0, v:4 sv:4 min:2 cur:2
62138 Out of Range Block::1 bo:268 Volume offset:780 Expected:2 Got:4
62139 Shift 0, v:4 sv:4 min:2 cur:2
62140 Out of Range Block::1 bo:269 Volume offset:781 Expected:2 Got:4
62141 Shift 0, v:4 sv:4 min:2 cur:2
62142 Out of Range Block::1 bo:270 Volume offset:782 Expected:2 Got:4
62143 Shift 0, v:4 sv:4 min:2 cur:2
62144 Out of Range Block::1 bo:271 Volume offset:783 Expected:2 Got:4
62145 Shift 0, v:4 sv:4 min:2 cur:2
62146 Out of Range Block::1 bo:272 Volume offset:784 Expected:2 Got:4
62147 Shift 0, v:4 sv:4 min:2 cur:2
62148 Out of Range Block::1 bo:273 Volume offset:785 Expected:2 Got:4
62149 Shift 0, v:4 sv:4 min:2 cur:2
62150 Out of Range Block::1 bo:274 Volume offset:786 Expected:2 Got:4
62151 Shift 0, v:4 sv:4 min:2 cur:2
62152 Out of Range Block::1 bo:275 Volume offset:787 Expected:2 Got:4
62153 Shift 0, v:4 sv:4 min:2 cur:2
62154 Out of Range Block::1 bo:276 Volume offset:788 Expected:2 Got:4
62155 Shift 0, v:4 sv:4 min:2 cur:2
62156 Out of Range Block::1 bo:277 Volume offset:789 Expected:2 Got:4
62157 Shift 0, v:4 sv:4 min:2 cur:2
62158 Out of Range Block::1 bo:278 Volume offset:790 Expected:2 Got:4
62159 Shift 0, v:4 sv:4 min:2 cur:2
62160 Out of Range Block::1 bo:279 Volume offset:791 Expected:2 Got:4
62161 Shift 0, v:4 sv:4 min:2 cur:2
62162 Out of Range Block::1 bo:280 Volume offset:792 Expected:2 Got:4
62163 Shift 0, v:4 sv:4 min:2 cur:2
62164 Out of Range Block::1 bo:281 Volume offset:793 Expected:2 Got:4
62165 Shift 0, v:4 sv:4 min:2 cur:2
62166 Out of Range Block::1 bo:282 Volume offset:794 Expected:2 Got:4
62167 Shift 0, v:4 sv:4 min:2 cur:2
62168 Out of Range Block::1 bo:283 Volume offset:795 Expected:2 Got:4
62169 Shift 0, v:4 sv:4 min:2 cur:2
62170 Out of Range Block::1 bo:284 Volume offset:796 Expected:2 Got:4
62171 Shift 0, v:4 sv:4 min:2 cur:2
62172 Out of Range Block::1 bo:285 Volume offset:797 Expected:2 Got:4
62173 Shift 0, v:4 sv:4 min:2 cur:2
62174 Out of Range Block::1 bo:286 Volume offset:798 Expected:2 Got:4
62175 Shift 0, v:4 sv:4 min:2 cur:2
62176 Out of Range Block::1 bo:287 Volume offset:799 Expected:2 Got:4
62177 Shift 0, v:4 sv:4 min:2 cur:2
62178 Out of Range Block::1 bo:288 Volume offset:800 Expected:2 Got:4
62179 Shift 0, v:4 sv:4 min:2 cur:2
62180 Out of Range Block::1 bo:289 Volume offset:801 Expected:2 Got:4
62181 Shift 0, v:4 sv:4 min:2 cur:2
62182 Out of Range Block::1 bo:290 Volume offset:802 Expected:2 Got:4
62183 Shift 0, v:4 sv:4 min:2 cur:2
62184 Out of Range Block::1 bo:291 Volume offset:803 Expected:2 Got:4
62185 Shift 0, v:4 sv:4 min:2 cur:2
62186 Out of Range Block::1 bo:292 Volume offset:804 Expected:2 Got:4
62187 Shift 0, v:4 sv:4 min:2 cur:2
62188 Out of Range Block::1 bo:293 Volume offset:805 Expected:2 Got:4
62189 Shift 0, v:4 sv:4 min:2 cur:2
62190 Out of Range Block::1 bo:294 Volume offset:806 Expected:2 Got:4
62191 Shift 0, v:4 sv:4 min:2 cur:2
62192 Out of Range Block::1 bo:295 Volume offset:807 Expected:2 Got:4
62193 Shift 0, v:4 sv:4 min:2 cur:2
62194 Out of Range Block::1 bo:296 Volume offset:808 Expected:2 Got:4
62195 Shift 0, v:4 sv:4 min:2 cur:2
62196 Out of Range Block::1 bo:297 Volume offset:809 Expected:2 Got:4
62197 Shift 0, v:4 sv:4 min:2 cur:2
62198 Out of Range Block::1 bo:298 Volume offset:810 Expected:2 Got:4
62199 Shift 0, v:4 sv:4 min:2 cur:2
62200 Out of Range Block::1 bo:299 Volume offset:811 Expected:2 Got:4
62201 Shift 0, v:4 sv:4 min:2 cur:2
62202 Out of Range Block::1 bo:300 Volume offset:812 Expected:2 Got:4
62203 Shift 0, v:4 sv:4 min:2 cur:2
62204 Out of Range Block::1 bo:301 Volume offset:813 Expected:2 Got:4
62205 Shift 0, v:4 sv:4 min:2 cur:2
62206 Out of Range Block::1 bo:302 Volume offset:814 Expected:2 Got:4
62207 Shift 0, v:4 sv:4 min:2 cur:2
62208 Out of Range Block::1 bo:303 Volume offset:815 Expected:2 Got:4
62209 Shift 0, v:4 sv:4 min:2 cur:2
62210 Out of Range Block::1 bo:304 Volume offset:816 Expected:2 Got:4
62211 Shift 0, v:4 sv:4 min:2 cur:2
62212 Out of Range Block::1 bo:305 Volume offset:817 Expected:2 Got:4
62213 Shift 0, v:4 sv:4 min:2 cur:2
62214 Out of Range Block::1 bo:306 Volume offset:818 Expected:2 Got:4
62215 Shift 0, v:4 sv:4 min:2 cur:2
62216 Out of Range Block::1 bo:307 Volume offset:819 Expected:2 Got:4
62217 Shift 0, v:4 sv:4 min:2 cur:2
62218 Out of Range Block::1 bo:308 Volume offset:820 Expected:2 Got:4
62219 Shift 0, v:4 sv:4 min:2 cur:2
62220 Out of Range Block::1 bo:309 Volume offset:821 Expected:2 Got:4
62221 Shift 0, v:4 sv:4 min:2 cur:2
62222 Out of Range Block::1 bo:310 Volume offset:822 Expected:2 Got:4
62223 okShift 0, v:4 sv:4 min:2 cur:2
62224 Out of Range Block::1 bo:311 Volume offset:823 Expected:2 Got:4
62225 Shift 0, v:4 sv:4 min:2 cur:2
62226 Out of Range Block::1 bo:312 Volume offset:824 Expected:2 Got:4
62227 Shift 0, v:4 sv:4 min:2 cur:2
62228 Out of Range Block::1 bo:313 Volume offset:825 Expected:2 Got:4
62229 Shift 0, v:4 sv:4 min:2 cur:2
62230 Out of Range Block::1 bo:314 Volume offset:826 Expected:2 Got:4
62231 Shift 0, v:4 sv:4 min:2 cur:2
62232 Out of Range Block::1 bo:315 Volume offset:827 Expected:2 Got:4
62233 Shift 0, v:4 sv:4 min:2 cur:2
62234 Out of Range Block::1 bo:316 Volume offset:828 Expected:2 Got:4
62235 Shift 0, v:4 sv:4 min:2 cur:2
62236 Out of Range Block::1 bo:317 Volume offset:829 Expected:2 Got:4
62237 Shift 0, v:4 sv:4 min:2 cur:2
62238 Out of Range Block::1 bo:318 Volume offset:830 Expected:2 Got:4
62239 Shift 0, v:4 sv:4 min:2 cur:2
62240 Out of Range Block::1 bo:319 Volume offset:831 Expected:2 Got:4
62241 Shift 0, v:4 sv:4 min:2 cur:2
62242 Out of Range Block::1 bo:320 Volume offset:832 Expected:2 Got:4
62243 Shift 0, v:4 sv:4 min:2 cur:2
62244 Out of Range Block::1 bo:321 Volume offset:833 Expected:2 Got:4
62245 Shift 0, v:4 sv:4 min:2 cur:2
62246 Out of Range Block::1 bo:322 Volume offset:834 Expected:2 Got:4
62247 Shift 0, v:4 sv:4 min:2 cur:2
62248 Out of Range Block::1 bo:323 Volume offset:835 Expected:2 Got:4
62249 Shift 0, v:4 sv:4 min:2 cur:2
62250 Out of Range Block::1 bo:324 Volume offset:836 Expected:2 Got:4
62251 Shift 0, v:4 sv:4 min:2 cur:2
62252 Out of Range Block::1 bo:325 Volume offset:837 Expected:2 Got:4
62253 Shift 0, v:4 sv:4 min:2 cur:2
62254 Out of Range Block::1 bo:326 Volume offset:838 Expected:2 Got:4
62255 Shift 0, v:4 sv:4 min:2 cur:2
62256 Out of Range Block::1 bo:327 Volume offset:839 Expected:2 Got:4
62257 Shift 0, v:4 sv:4 min:2 cur:2
62258 Out of Range Block::1 bo:328 Volume offset:840 Expected:2 Got:4
62259 Shift 0, v:4 sv:4 min:2 cur:2
62260 Out of Range Block::1 bo:329 Volume offset:841 Expected:2 Got:4
62261 Shift 0, v:4 sv:4 min:2 cur:2
62262 Out of Range Block::1 bo:330 Volume offset:842 Expected:2 Got:4
62263 Shift 0, v:4 sv:4 min:2 cur:2
62264 Out of Range Block::1 bo:331 Volume offset:843 Expected:2 Got:4
62265 Shift 0, v:4 sv:4 min:2 cur:2
62266 Out of Range Block::1 bo:332 Volume offset:844 Expected:2 Got:4
62267 Shift 0, v:4 sv:4 min:2 cur:2
62268 Out of Range Block::1 bo:333 Volume offset:845 Expected:2 Got:4
62269 Shift 0, v:4 sv:4 min:2 cur:2
62270 Out of Range Block::1 bo:334 Volume offset:846 Expected:2 Got:4
62271 Shift 0, v:4 sv:4 min:2 cur:2
62272 Out of Range Block::1 bo:335 Volume offset:847 Expected:2 Got:4
62273 Shift 0, v:4 sv:4 min:2 cur:2
62274 Out of Range Block::1 bo:336 Volume offset:848 Expected:2 Got:4
62275 Shift 0, v:4 sv:4 min:2 cur:2
62276 Out of Range Block::1 bo:337 Volume offset:849 Expected:2 Got:4
62277 Shift 0, v:4 sv:4 min:2 cur:2
62278 Out of Range Block::1 bo:338 Volume offset:850 Expected:2 Got:4
62279 Shift 0, v:4 sv:4 min:2 cur:2
62280 Out of Range Block::1 bo:339 Volume offset:851 Expected:2 Got:4
62281 Shift 0, v:4 sv:4 min:2 cur:2
62282 Out of Range Block::1 bo:340 Volume offset:852 Expected:2 Got:4
62283 Shift 0, v:4 sv:4 min:2 cur:2
62284 Out of Range Block::1 bo:341 Volume offset:853 Expected:2 Got:4
62285 Shift 0, v:4 sv:4 min:2 cur:2
62286 
62287 Out of Range Block::1 bo:342 Volume offset:854 Expected:2 Got:4
62288 Shift 0, v:4 sv:4 min:2 cur:2
62289 Out of Range Block::1 bo:343 Volume offset:855 Expected:2 Got:4
62290 Shift 0, v:4 sv:4 min:2 cur:2
62291 Out of Range Block::1 bo:344 Volume offset:856 Expected:2 Got:4
62292 Shift 0, v:4 sv:4 min:2 cur:2
62293 Out of Range Block::1 bo:345 Volume offset:857 Expected:2 Got:4
62294 Shift 0, v:4 sv:4 min:2 cur:2
62295 Out of Range Block::1 bo:346 Volume offset:858 Expected:2 Got:4
62296 Shift 0, v:4 sv:4 min:2 cur:2
62297 Out of Range Block::1 bo:347 Volume offset:859 Expected:2 Got:4
62298 Shift 0, v:4 sv:4 min:2 cur:2
62299 Out of Range Block::1 bo:348 Volume offset:860 Expected:2 Got:4
62300 Shift 0, v:4 sv:4 min:2 cur:2
62301 Out of Range Block::1 bo:349 Volume offset:861 Expected:2 Got:4
62302 Shift 0, v:4 sv:4 min:2 cur:2
62303 Out of Range Block::1 bo:350 Volume offset:862 Expected:2 Got:4
62304 Shift 0, v:4 sv:4 min:2 cur:2
62305 Out of Range Block::1 bo:351 Volume offset:863 Expected:2 Got:4
62306 Shift 0, v:4 sv:4 min:2 cur:2
62307 Out of Range Block::1 bo:352 Volume offset:864 Expected:2 Got:4
62308 Shift 0, v:4 sv:4 min:2 cur:2
62309 Out of Range Block::1 bo:353 Volume offset:865 Expected:2 Got:4
62310 Shift 0, v:4 sv:4 min:2 cur:2
62311 Out of Range Block::1 bo:354 Volume offset:866 Expected:2 Got:4
62312 Shift 0, v:4 sv:4 min:2 cur:2
62313 Out of Range Block::1 bo:355 Volume offset:867 Expected:2 Got:4
62314 Shift 0, v:4 sv:4 min:2 cur:2
62315 Out of Range Block::1 bo:356 Volume offset:868 Expected:2 Got:4
62316 Shift 0, v:4 sv:4 min:2 cur:2
62317 Out of Range Block::1 bo:357 Volume offset:869 Expected:2 Got:4
62318 Shift 0, v:4 sv:4 min:2 cur:2
62319 Out of Range Block::1 bo:358 Volume offset:870 Expected:2 Got:4
62320 Shift 0, v:4 sv:4 min:2 cur:2
62321 Out of Range Block::1 bo:359 Volume offset:871 Expected:2 Got:4
62322 Shift 0, v:4 sv:4 min:2 cur:2
62323 Out of Range Block::1 bo:360 Volume offset:872 Expected:2 Got:4
62324 Shift 0, v:4 sv:4 min:2 cur:2
62325 Out of Range Block::1 bo:361 Volume offset:873 Expected:2 Got:4
62326 Shift 0, v:4 sv:4 min:2 cur:2
62327 Out of Range Block::1 bo:362 Volume offset:874 Expected:2 Got:4
62328 Shift 0, v:4 sv:4 min:2 cur:2
62329 Out of Range Block::1 bo:363 Volume offset:875 Expected:2 Got:4
62330 Shift 0, v:4 sv:4 min:2 cur:2
62331 Out of Range Block::1 bo:364 Volume offset:876 Expected:2 Got:4
62332 Shift 0, v:4 sv:4 min:2 cur:2
62333 Out of Range Block::1 bo:365 Volume offset:877 Expected:2 Got:4
62334 Shift 0, v:4 sv:4 min:2 cur:2
62335 Out of Range Block::1 bo:366 Volume offset:878 Expected:2 Got:4
62336 Shift 0, v:4 sv:4 min:2 cur:2
62337 Out of Range Block::1 bo:367 Volume offset:879 Expected:2 Got:4
62338 Shift 0, v:4 sv:4 min:2 cur:2
62339 Out of Range Block::1 bo:368 Volume offset:880 Expected:2 Got:4
62340 Shift 0, v:4 sv:4 min:2 cur:2
62341 Out of Range Block::1 bo:369 Volume offset:881 Expected:2 Got:4
62342 Shift 0, v:4 sv:4 min:2 cur:2
62343 Out of Range Block::1 bo:370 Volume offset:882 Expected:2 Got:4
62344 Shift 0, v:4 sv:4 min:2 cur:2
62345 Out of Range Block::1 bo:371 Volume offset:883 Expected:2 Got:4
62346 Shift 0, v:4 sv:4 min:2 cur:2
62347 Out of Range Block::1 bo:372 Volume offset:884 Expected:2 Got:4
62348 Shift 0, v:4 sv:4 min:2 cur:2
62349 Out of Range Block::1 bo:373 Volume offset:885 Expected:2 Got:4
62350 Shift 0, v:4 sv:4 min:2 cur:2
62351 Out of Range Block::1 bo:374 Volume offset:886 Expected:2 Got:4
62352 Shift 0, v:4 sv:4 min:2 cur:2
62353 Out of Range Block::1 bo:375 Volume offset:887 Expected:2 Got:4
62354 Shift 0, v:4 sv:4 min:2 cur:2
62355 Out of Range Block::1 bo:376 Volume offset:888 Expected:2 Got:4
62356 Shift 0, v:4 sv:4 min:2 cur:2
62357 Out of Range Block::1 bo:377 Volume offset:889 Expected:2 Got:4
62358 Shift 0, v:4 sv:4 min:2 cur:2
62359 Out of Range Block::1 bo:378 Volume offset:890 Expected:2 Got:4
62360 Shift 0, v:4 sv:4 min:2 cur:2
62361 Out of Range Block::1 bo:379 Volume offset:891 Expected:2 Got:4
62362 Shift 0, v:4 sv:4 min:2 cur:2
62363 Out of Range Block::1 bo:380 Volume offset:892 Expected:2 Got:4
62364 Shift 0, v:4 sv:4 min:2 cur:2
62365 Out of Range Block::1 bo:381 Volume offset:893 Expected:2 Got:4
62366 Shift 0, v:4 sv:4 min:2 cur:2
62367 Out of Range Block::1 bo:382 Volume offset:894 Expected:2 Got:4
62368 Shift 0, v:4 sv:4 min:2 cur:2
62369 Out of Range Block::1 bo:383 Volume offset:895 Expected:2 Got:4
62370 Shift 0, v:4 sv:4 min:2 cur:2
62371 Out of Range Block::1 bo:384 Volume offset:896 Expected:2 Got:4
62372 Shift 0, v:4 sv:4 min:2 cur:2
62373 Out of Range Block::1 bo:385 Volume offset:897 Expected:2 Got:4
62374 Shift 0, v:4 sv:4 min:2 cur:2
62375 Out of Range Block::1 bo:386 Volume offset:898 Expected:2 Got:4
62376 Shift 0, v:4 sv:4 min:2 cur:2
62377 Out of Range Block::1 bo:387 Volume offset:899 Expected:2 Got:4
62378 Shift 0, v:4 sv:4 min:2 cur:2
62379 Out of Range Block::1 bo:388 Volume offset:900 Expected:2 Got:4
62380 Shift 0, v:4 sv:4 min:2 cur:2
62381 Out of Range Block::1 bo:389 Volume offset:901 Expected:2 Got:4
62382 Shift 0, v:4 sv:4 min:2 cur:2
62383 Out of Range Block::1 bo:390 Volume offset:902 Expected:2 Got:4
62384 Shift 0, v:4 sv:4 min:2 cur:2
62385 Out of Range Block::1 bo:391 Volume offset:903 Expected:2 Got:4
62386 Shift 0, v:4 sv:4 min:2 cur:2
62387 Out of Range Block::1 bo:392 Volume offset:904 Expected:2 Got:4
62388 Shift 0, v:4 sv:4 min:2 cur:2
62389 Out of Range Block::1 bo:393 Volume offset:905 Expected:2 Got:4
62390 Shift 0, v:4 sv:4 min:2 cur:2
62391 Out of Range Block::1 bo:394 Volume offset:906 Expected:2 Got:4
62392 Shift 0, v:4 sv:4 min:2 cur:2
62393 Out of Range Block::1 bo:395 Volume offset:907 Expected:2 Got:4
62394 Shift 0, v:4 sv:4 min:2 cur:2
62395 Out of Range Block::1 bo:396 Volume offset:908 Expected:2 Got:4
62396 Shift 0, v:4 sv:4 min:2 cur:2
62397 Out of Range Block::1 bo:397 Volume offset:909 Expected:2 Got:4
62398 Shift 0, v:4 sv:4 min:2 cur:2
62399 Out of Range Block::1 bo:398 Volume offset:910 Expected:2 Got:4
62400 Shift 0, v:4 sv:4 min:2 cur:2
62401 Out of Range Block::1 bo:399 Volume offset:911 Expected:2 Got:4
62402 Shift 0, v:4 sv:4 min:2 cur:2
62403 Out of Range Block::1 bo:400 Volume offset:912 Expected:2 Got:4
62404 Shift 0, v:4 sv:4 min:2 cur:2
62405 Out of Range Block::1 bo:401 Volume offset:913 Expected:2 Got:4
62406 Shift 0, v:4 sv:4 min:2 cur:2
62407 Out of Range Block::1 bo:402 Volume offset:914 Expected:2 Got:4
62408 Shift 0, v:4 sv:4 min:2 cur:2
62409 Out of Range Block::1 bo:403 Volume offset:915 Expected:2 Got:4
62410 Shift 0, v:4 sv:4 min:2 cur:2
62411 Out of Range Block::1 bo:404 Volume offset:916 Expected:2 Got:4
62412 Shift 0, v:4 sv:4 min:2 cur:2
62413 Out of Range Block::1 bo:405 Volume offset:917 Expected:2 Got:4
62414 Shift 0, v:4 sv:4 min:2 cur:2
62415 Out of Range Block::1 bo:406 Volume offset:918 Expected:2 Got:4
62416 Shift 0, v:4 sv:4 min:2 cur:2
62417 Out of Range Block::1 bo:407 Volume offset:919 Expected:2 Got:4
62418 Shift 0, v:4 sv:4 min:2 cur:2
62419 Out of Range Block::1 bo:408 Volume offset:920 Expected:2 Got:4
62420 Shift 0, v:4 sv:4 min:2 cur:2
62421 Out of Range Block::1 bo:409 Volume offset:921 Expected:2 Got:4
62422 Shift 0, v:4 sv:4 min:2 cur:2
62423 Out of Range Block::1 bo:410 Volume offset:922 Expected:2 Got:4
62424 Shift 0, v:4 sv:4 min:2 cur:2
62425 Out of Range Block::1 bo:411 Volume offset:923 Expected:2 Got:4
62426 Shift 0, v:4 sv:4 min:2 cur:2
62427 Out of Range Block::1 bo:412 Volume offset:924 Expected:2 Got:4
62428 Shift 0, v:4 sv:4 min:2 cur:2
62429 Out of Range Block::1 bo:413 Volume offset:925 Expected:2 Got:4
62430 Shift 0, v:4 sv:4 min:2 cur:2
62431 Out of Range Block::1 bo:414 Volume offset:926 Expected:2 Got:4
62432 Shift 0, v:4 sv:4 min:2 cur:2
62433 Out of Range Block::1 bo:415 Volume offset:927 Expected:2 Got:4
62434 Shift 0, v:4 sv:4 min:2 cur:2
62435 Out of Range Block::1 bo:416 Volume offset:928 Expected:2 Got:4
62436 Shift 0, v:4 sv:4 min:2 cur:2
62437 Out of Range Block::1 bo:417 Volume offset:929 Expected:2 Got:4
62438 Shift 0, v:4 sv:4 min:2 cur:2
62439 Out of Range Block::1 bo:418 Volume offset:930 Expected:2 Got:4
62440 Shift 0, v:4 sv:4 min:2 cur:2
62441 Out of Range Block::1 bo:419 Volume offset:931 Expected:2 Got:4
62442 Shift 0, v:4 sv:4 min:2 cur:2
62443 Out of Range Block::1 bo:420 Volume offset:932 Expected:2 Got:4
62444 Shift 0, v:4 sv:4 min:2 cur:2
62445 Out of Range Block::1 bo:421 Volume offset:933 Expected:2 Got:4
62446 Shift 0, v:4 sv:4 min:2 cur:2
62447 Out of Range Block::1 bo:422 Volume offset:934 Expected:2 Got:4
62448 Shift 0, v:4 sv:4 min:2 cur:2
62449 Out of Range Block::1 bo:423 Volume offset:935 Expected:2 Got:4
62450 Shift 0, v:4 sv:4 min:2 cur:2
62451 Out of Range Block::1 bo:424 Volume offset:936 Expected:2 Got:4
62452 Shift 0, v:4 sv:4 min:2 cur:2
62453 Out of Range Block::1 bo:425 Volume offset:937 Expected:2 Got:4
62454 Shift 0, v:4 sv:4 min:2 cur:2
62455 Out of Range Block::1 bo:426 Volume offset:938 Expected:2 Got:4
62456 Shift 0, v:4 sv:4 min:2 cur:2
62457 Out of Range Block::1 bo:427 Volume offset:939 Expected:2 Got:4
62458 Shift 0, v:4 sv:4 min:2 cur:2
62459 Out of Range Block::1 bo:428 Volume offset:940 Expected:2 Got:4
62460 Shift 0, v:4 sv:4 min:2 cur:2
62461 Out of Range Block::1 bo:429 Volume offset:941 Expected:2 Got:4
62462 Shift 0, v:4 sv:4 min:2 cur:2
62463 Out of Range Block::1 bo:430 Volume offset:942 Expected:2 Got:4
62464 Shift 0, v:4 sv:4 min:2 cur:2
62465 Out of Range Block::1 bo:431 Volume offset:943 Expected:2 Got:4
62466 Shift 0, v:4 sv:4 min:2 cur:2
62467 Out of Range Block::1 bo:432 Volume offset:944 Expected:2 Got:4
62468 Shift 0, v:4 sv:4 min:2 cur:2
62469 Out of Range Block::1 bo:433 Volume offset:945 Expected:2 Got:4
62470 Shift 0, v:4 sv:4 min:2 cur:2
62471 Out of Range Block::1 bo:434 Volume offset:946 Expected:2 Got:4
62472 Shift 0, v:4 sv:4 min:2 cur:2
62473 Out of Range Block::1 bo:435 Volume offset:947 Expected:2 Got:4
62474 Shift 0, v:4 sv:4 min:2 cur:2
62475 Out of Range Block::1 bo:436 Volume offset:948 Expected:2 Got:4
62476 Shift 0, v:4 sv:4 min:2 cur:2
62477 Out of Range Block::1 bo:437 Volume offset:949 Expected:2 Got:4
62478 Shift 0, v:4 sv:4 min:2 cur:2
62479 Out of Range Block::1 bo:438 Volume offset:950 Expected:2 Got:4
62480 Shift 0, v:4 sv:4 min:2 cur:2
62481 Out of Range Block::1 bo:439 Volume offset:951 Expected:2 Got:4
62482 Shift 0, v:4 sv:4 min:2 cur:2
62483 Out of Range Block::1 bo:440 Volume offset:952 Expected:2 Got:4
62484 Shift 0, v:4 sv:4 min:2 cur:2
62485 Out of Range Block::1 bo:441 Volume offset:953 Expected:2 Got:4
62486 Shift 0, v:4 sv:4 min:2 cur:2
62487 Out of Range Block::1 bo:442 Volume offset:954 Expected:2 Got:4
62488 Shift 0, v:4 sv:4 min:2 cur:2
62489 Out of Range Block::1 bo:443 Volume offset:955 Expected:2 Got:4
62490 Shift 0, v:4 sv:4 min:2 cur:2
62491 Out of Range Block::1 bo:444 Volume offset:956 Expected:2 Got:4
62492 Shift 0, v:4 sv:4 min:2 cur:2
62493 Out of Range Block::1 bo:445 Volume offset:957 Expected:2 Got:4
62494 Shift 0, v:4 sv:4 min:2 cur:2
62495 Out of Range Block::1 bo:446 Volume offset:958 Expected:2 Got:4
62496 Shift 0, v:4 sv:4 min:2 cur:2
62497 Out of Range Block::1 bo:447 Volume offset:959 Expected:2 Got:4
62498 Shift 0, v:4 sv:4 min:2 cur:2
62499 Out of Range Block::1 bo:448 Volume offset:960 Expected:2 Got:4
62500 Shift 0, v:4 sv:4 min:2 cur:2
62501 Out of Range Block::1 bo:449 Volume offset:961 Expected:2 Got:4
62502 Shift 0, v:4 sv:4 min:2 cur:2
62503 Out of Range Block::1 bo:450 Volume offset:962 Expected:2 Got:4
62504 Shift 0, v:4 sv:4 min:2 cur:2
62505 Out of Range Block::1 bo:451 Volume offset:963 Expected:2 Got:4
62506 Shift 0, v:4 sv:4 min:2 cur:2
62507 Out of Range Block::1 bo:452 Volume offset:964 Expected:2 Got:4
62508 Shift 0, v:4 sv:4 min:2 cur:2
62509 Out of Range Block::1 bo:453 Volume offset:965 Expected:2 Got:4
62510 Shift 0, v:4 sv:4 min:2 cur:2
62511 Out of Range Block::1 bo:454 Volume offset:966 Expected:2 Got:4
62512 Shift 0, v:4 sv:4 min:2 cur:2
62513 Out of Range Block::1 bo:455 Volume offset:967 Expected:2 Got:4
62514 Shift 0, v:4 sv:4 min:2 cur:2
62515 Out of Range Block::1 bo:456 Volume offset:968 Expected:2 Got:4
62516 Shift 0, v:4 sv:4 min:2 cur:2
62517 Out of Range Block::1 bo:457 Volume offset:969 Expected:2 Got:4
62518 Shift 0, v:4 sv:4 min:2 cur:2
62519 Out of Range Block::1 bo:458 Volume offset:970 Expected:2 Got:4
62520 Shift 0, v:4 sv:4 min:2 cur:2
62521 Out of Range Block::1 bo:459 Volume offset:971 Expected:2 Got:4
62522 Shift 0, v:4 sv:4 min:2 cur:2
62523 Out of Range Block::1 bo:460 Volume offset:972 Expected:2 Got:4
62524 Shift 0, v:4 sv:4 min:2 cur:2
62525 Out of Range Block::1 bo:461 Volume offset:973 Expected:2 Got:4
62526 Shift 0, v:4 sv:4 min:2 cur:2
62527 Out of Range Block::1 bo:462 Volume offset:974 Expected:2 Got:4
62528 Shift 0, v:4 sv:4 min:2 cur:2
62529 Out of Range Block::1 bo:463 Volume offset:975 Expected:2 Got:4
62530 Shift 0, v:4 sv:4 min:2 cur:2
62531 Out of Range Block::1 bo:464 Volume offset:976 Expected:2 Got:4
62532 Shift 0, v:4 sv:4 min:2 cur:2
62533 Out of Range Block::1 bo:465 Volume offset:977 Expected:2 Got:4
62534 Shift 0, v:4 sv:4 min:2 cur:2
62535 Out of Range Block::1 bo:466 Volume offset:978 Expected:2 Got:4
62536 Shift 0, v:4 sv:4 min:2 cur:2
62537 Out of Range Block::1 bo:467 Volume offset:979 Expected:2 Got:4
62538 Shift 0, v:4 sv:4 min:2 cur:2
62539 Out of Range Block::1 bo:468 Volume offset:980 Expected:2 Got:4
62540 Shift 0, v:4 sv:4 min:2 cur:2
62541 Out of Range Block::1 bo:469 Volume offset:981 Expected:2 Got:4
62542 Shift 0, v:4 sv:4 min:2 cur:2
62543 Out of Range Block::1 bo:470 Volume offset:982 Expected:2 Got:4
62544 Shift 0, v:4 sv:4 min:2 cur:2
62545 Out of Range Block::1 bo:471 Volume offset:983 Expected:2 Got:4
62546 Shift 0, v:4 sv:4 min:2 cur:2
62547 Out of Range Block::1 bo:472 Volume offset:984 Expected:2 Got:4
62548 Shift 0, v:4 sv:4 min:2 cur:2
62549 Out of Range Block::1 bo:473 Volume offset:985 Expected:2 Got:4
62550 Shift 0, v:4 sv:4 min:2 cur:2
62551 Out of Range Block::1 bo:474 Volume offset:986 Expected:2 Got:4
62552 Shift 0, v:4 sv:4 min:2 cur:2
62553 Out of Range Block::1 bo:475 Volume offset:987 Expected:2 Got:4
62554 Shift 0, v:4 sv:4 min:2 cur:2
62555 Out of Range Block::1 bo:476 Volume offset:988 Expected:2 Got:4
62556 Shift 0, v:4 sv:4 min:2 cur:2
62557 Out of Range Block::1 bo:477 Volume offset:989 Expected:2 Got:4
62558 Shift 0, v:4 sv:4 min:2 cur:2
62559 Out of Range Block::1 bo:478 Volume offset:990 Expected:2 Got:4
62560 Shift 0, v:4 sv:4 min:2 cur:2
62561 Out of Range Block::1 bo:479 Volume offset:991 Expected:2 Got:4
62562 Shift 0, v:4 sv:4 min:2 cur:2
62563 Out of Range Block::1 bo:480 Volume offset:992 Expected:2 Got:4
62564 Shift 0, v:4 sv:4 min:2 cur:2
62565 Out of Range Block::1 bo:481 Volume offset:993 Expected:2 Got:4
62566 Shift 0, v:4 sv:4 min:2 cur:2
62567 Out of Range Block::1 bo:482 Volume offset:994 Expected:2 Got:4
62568 Shift 0, v:4 sv:4 min:2 cur:2
62569 Out of Range Block::1 bo:483 Volume offset:995 Expected:2 Got:4
62570 Shift 0, v:4 sv:4 min:2 cur:2
62571 Out of Range Block::1 bo:484 Volume offset:996 Expected:2 Got:4
62572 Shift 0, v:4 sv:4 min:2 cur:2
62573 Out of Range Block::1 bo:485 Volume offset:997 Expected:2 Got:4
62574 Shift 0, v:4 sv:4 min:2 cur:2
62575 Out of Range Block::1 bo:486 Volume offset:998 Expected:2 Got:4
62576 Shift 0, v:4 sv:4 min:2 cur:2
62577 Out of Range Block::1 bo:487 Volume offset:999 Expected:2 Got:4
62578 Shift 0, v:4 sv:4 min:2 cur:2
62579 Out of Range Block::1 bo:488 Volume offset:1000 Expected:2 Got:4
62580 Shift 0, v:4 sv:4 min:2 cur:2
62581 Out of Range Block::1 bo:489 Volume offset:1001 Expected:2 Got:4
62582 Shift 0, v:4 sv:4 min:2 cur:2
62583 Out of Range Block::1 bo:490 Volume offset:1002 Expected:2 Got:4
62584 Shift 0, v:4 sv:4 min:2 cur:2
62585 Out of Range Block::1 bo:491 Volume offset:1003 Expected:2 Got:4
62586 Shift 0, v:4 sv:4 min:2 cur:2
62587 Out of Range Block::1 bo:492 Volume offset:1004 Expected:2 Got:4
62588 Shift 0, v:4 sv:4 min:2 cur:2
62589 Out of Range Block::1 bo:493 Volume offset:1005 Expected:2 Got:4
62590 Shift 0, v:4 sv:4 min:2 cur:2
62591 Out of Range Block::1 bo:494 Volume offset:1006 Expected:2 Got:4
62592 Shift 0, v:4 sv:4 min:2 cur:2
62593 Out of Range Block::1 bo:495 Volume offset:1007 Expected:2 Got:4
62594 Shift 0, v:4 sv:4 min:2 cur:2
62595 Out of Range Block::1 bo:496 Volume offset:1008 Expected:2 Got:4
62596 Shift 0, v:4 sv:4 min:2 cur:2
62597 Out of Range Block::1 bo:497 Volume offset:1009 Expected:2 Got:4
62598 Shift 0, v:4 sv:4 min:2 cur:2
62599 Out of Range Block::1 bo:498 Volume offset:1010 Expected:2 Got:4
62600 Shift 0, v:4 sv:4 min:2 cur:2
62601 Out of Range Block::1 bo:499 Volume offset:1011 Expected:2 Got:4
62602 Shift 0, v:4 sv:4 min:2 cur:2
62603 Out of Range Block::1 bo:500 Volume offset:1012 Expected:2 Got:4
62604 Shift 0, v:4 sv:4 min:2 cur:2
62605 Out of Range Block::1 bo:501 Volume offset:1013 Expected:2 Got:4
62606 Shift 0, v:4 sv:4 min:2 cur:2
62607 Out of Range Block::1 bo:502 Volume offset:1014 Expected:2 Got:4
62608 Shift 0, v:4 sv:4 min:2 cur:2
62609 Out of Range Block::1 bo:503 Volume offset:1015 Expected:2 Got:4
62610 Shift 0, v:4 sv:4 min:2 cur:2
62611 Out of Range Block::1 bo:504 Volume offset:1016 Expected:2 Got:4
62612 Shift 0, v:4 sv:4 min:2 cur:2
62613 Out of Range Block::1 bo:505 Volume offset:1017 Expected:2 Got:4
62614 Shift 0, v:4 sv:4 min:2 cur:2
62615 Out of Range Block::1 bo:506 Volume offset:1018 Expected:2 Got:4
62616 Shift 0, v:4 sv:4 min:2 cur:2
62617 Out of Range Block::1 bo:507 Volume offset:1019 Expected:2 Got:4
62618 Shift 0, v:4 sv:4 min:2 cur:2
62619 Out of Range Block::1 bo:508 Volume offset:1020 Expected:2 Got:4
62620 Shift 0, v:4 sv:4 min:2 cur:2
62621 Out of Range Block::1 bo:509 Volume offset:1021 Expected:2 Got:4
62622 Shift 0, v:4 sv:4 min:2 cur:2
62623 Out of Range Block::1 bo:510 Volume offset:1022 Expected:2 Got:4
62624 Shift 0, v:4 sv:4 min:2 cur:2
62625 Out of Range Block::1 bo:511 Volume offset:1023 Expected:2 Got:4
62626 test test::test_wl_commit_range_vv ... ok
62627 
62628 test result: ok. 64 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.02s
62629 
62630 Running `/work/oxidecomputer/crucible/target/debug/deps/dsc-2b80980cbc3bac2c --nocapture`
62631 
62632 running 16 tests
62633 Creating region directory at: /tmp/.tmpFyEcKy
62634 Creating region directory at: /tmp/.tmpneCf4N
62635 Creating region directory at: /tmp/.tmpQfbQmF
62636 Creating dsc directory at: /tmp/.tmpa1Clwx
62637 Creating region directory at: /tmp/.tmpb9iNSL
62638 Creating region directory at: /tmp/.tmphjd71E
62639 Creating region directory at: /tmp/.tmpU83AJS
62640 Creating dsc directory at: /tmp/.tmpWQECQf
62641 Creating region directory at: /tmp/.tmp8fIrtP
62642 Creating region directory at: /tmp/.tmptkedSS
62643 Creating region directory at: /tmp/.tmpTiSUvs
62644 Creating region directory at: /tmp/.tmp9bCgVj
62645 Creating region directory at: /tmp/.tmpEJudHw
62646 Creating dsc directory at: /tmp/.tmpBz9a5a
62647 res is Err(No such file or directory (os error 2))
62648 test test::bad_bin ... ok
62649 test test::new_ti_four ... ok
62650 test test::new_ti ... ok
62651 test test::existing_ti ... ok
62652 test test::delete_bad_second_region ... ok
62653 test test::delete_region ... ok
62654 test test::delete_bad_region ... ok
62655 Creating region directory at: /tmp/.tmpXXKa1h
62656 test test::port_to_region_generation ... ok
62657 Creating region directory at: /tmp/.tmpSUuWSI
62658 Creating region directory at: /tmp/.tmp8hG3wS
62659 Creating dsc directory at: /tmp/.tmp4DPP60
62660 test test::new_ti_two_dirs ... ok
62661 Creating region directory at: /tmp/.tmpRlI2MK
62662 test test::new_ti_three ... ok
62663 Creating region directory at: /tmp/.tmpUr9WQJ
62664 Creating region directory at: /tmp/.tmpwqOD5v
62665 Creating region directory at: /tmp/.tmpILYztV
62666 test test::new_ti_two_region_count ... ok
62667 test test::restart_four_region ... ok
62668 test test::restart_three_region ... ok
62669 test test::restart_region_bad ... ok
62670 test test::restart_region_four_bad ... ok
62671 test control::test::test_dsc_openapi ... ok
62672 
62673 test result: ok. 16 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.01s
62674 
62675 Running `/work/oxidecomputer/crucible/target/debug/deps/dsc_client-f8a5b497695371e1 --nocapture`
62676 
62677 running 0 tests
62678 
62679 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62680 
62681 Running `/work/oxidecomputer/crucible/target/debug/deps/measure_iops-cbdca99bf515defe --nocapture`
62682 
62683 running 0 tests
62684 
62685 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62686 
62687 Running `/work/oxidecomputer/crucible/target/debug/deps/repair_client-5353c8de97b4615f --nocapture`
62688 
62689 running 0 tests
62690 
62691 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62692 
62693 Doc-tests crucible
62694 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible --test /work/oxidecomputer/crucible/upstairs/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern aes_gcm_siv=/work/oxidecomputer/crucible/target/debug/deps/libaes_gcm_siv-21495b616a07c9a4.rlib --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern async_recursion=/work/oxidecomputer/crucible/target/debug/deps/libasync_recursion-ce9499495a1cb858.so --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_client_types=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_client_types-bd54c4335d2370bd.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern itertools=/work/oxidecomputer/crucible/target/debug/deps/libitertools-b06e69badd72e55c.rlib --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rlib --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rlib --extern proptest=/work/oxidecomputer/crucible/target/debug/deps/libproptest-327f7f2cf6858f27.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern test_strategy=/work/oxidecomputer/crucible/target/debug/deps/libtest_strategy-5eb6b90d55d9f739.so --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rlib --extern tokio_test=/work/oxidecomputer/crucible/target/debug/deps/libtokio_test-12a28be646ff63e6.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rlib --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --error-format human`
62695 
62696 running 0 tests
62697 
62698 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62699 
62700 Doc-tests crucible-agent-client
62701 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_agent_client --test /work/oxidecomputer/crucible/agent-client/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern crucible_agent_client=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_agent_client-86e1c18945d61be3.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -C embed-bitcode=no --error-format human`
62702 
62703 running 0 tests
62704 
62705 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62706 
62707 Doc-tests crucible-client-types
62708 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_client_types --test /work/oxidecomputer/crucible/crucible-client-types/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern crucible_client_types=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_client_types-bd54c4335d2370bd.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --error-format human`
62709 
62710 running 0 tests
62711 
62712 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62713 
62714 Doc-tests crucible-common
62715 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_common --test /work/oxidecomputer/crucible/common/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern atty=/work/oxidecomputer/crucible/target/debug/deps/libatty-bfb6a2cdc762f7c4.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rlib --extern rusqlite=/work/oxidecomputer/crucible/target/debug/deps/librusqlite-e4d2316a88b06837.rlib --extern rustls_pemfile=/work/oxidecomputer/crucible/target/debug/deps/librustls_pemfile-e52b2a6debfcae48.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_bunyan=/work/oxidecomputer/crucible/target/debug/deps/libslog_bunyan-dce051a6775f1d99.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rlib --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern twox_hash=/work/oxidecomputer/crucible/target/debug/deps/libtwox_hash-9f5dd4f7319ca539.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --error-format human`
62716 
62717 running 0 tests
62718 
62719 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62720 
62721 Doc-tests crucible-control-client
62722 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_control_client --test /work/oxidecomputer/crucible/control-client/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern crucible_control_client=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_control_client-3d0142c7d3790e17.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -C embed-bitcode=no --error-format human`
62723 
62724 running 0 tests
62725 
62726 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62727 
62728 Doc-tests crucible-downstairs
62729 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_downstairs --test /work/oxidecomputer/crucible/downstairs/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_downstairs=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_downstairs-6276be71be5284a4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rlib --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rlib --extern hyper_staticfile=/work/oxidecomputer/crucible/target/debug/deps/libhyper_staticfile-559b4389ef952563.rlib --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern mime_guess=/work/oxidecomputer/crucible/target/debug/deps/libmime_guess-66974d6c31968dc2.rlib --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-5524fd7817ad57fb.rlib --extern opentelemetry_jaeger=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_jaeger-9ebf47742e5e063f.rlib --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rlib --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rlib --extern repair_client=/work/oxidecomputer/crucible/target/debug/deps/librepair_client-1452d56087b6ccb7.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern rusqlite=/work/oxidecomputer/crucible/target/debug/deps/librusqlite-e4d2316a88b06837.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rlib --extern tracing_opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libtracing_opentelemetry-8f8cfc1900c3a663.rlib --extern tracing_subscriber=/work/oxidecomputer/crucible/target/debug/deps/libtracing_subscriber-e39dae5ba339bc78.rlib --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --cfg 'feature="default"' --error-format human`
62730 
62731 running 0 tests
62732 
62733 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62734 
62735 Doc-tests crucible-integration-tests
62736 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_integration_tests --test /work/oxidecomputer/crucible/integration_tests/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_client_types=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_client_types-bd54c4335d2370bd.rlib --extern crucible_downstairs=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_downstairs-6276be71be5284a4.rlib --extern crucible_integration_tests=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_integration_tests-521d4724b4b30c4a.rlib --extern crucible_pantry=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_pantry-fe633af5059fe3a7.rlib --extern crucible_pantry_client=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_pantry_client-ccb9ddeebb23cea2.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern httptest=/work/oxidecomputer/crucible/target/debug/deps/libhttptest-174da737d96e2af6.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --error-format human`
62737 
62738 running 0 tests
62739 
62740 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62741 
62742 Doc-tests crucible-pantry
62743 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_pantry --test /work/oxidecomputer/crucible/pantry/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_pantry=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_pantry-fe633af5059fe3a7.rlib --extern crucible_smf=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_smf-e1a82c6f17385dc6.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rlib --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern subprocess=/work/oxidecomputer/crucible/target/debug/deps/libsubprocess-0acfc5c9b903588a.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --error-format human`
62744 
62745 running 0 tests
62746 
62747 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62748 
62749 Doc-tests crucible-pantry-client
62750 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_pantry_client --test /work/oxidecomputer/crucible/pantry-client/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern crucible_pantry_client=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_pantry_client-ccb9ddeebb23cea2.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --error-format human`
62751 
62752 running 0 tests
62753 
62754 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62755 
62756 Doc-tests crucible-protocol
62757 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_protocol --test /work/oxidecomputer/crucible/protocol/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern num_enum=/work/oxidecomputer/crucible/target/debug/deps/libnum_enum-9cd7a6d9dcf1dd5a.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --error-format human`
62758 
62759 running 0 tests
62760 
62761 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62762 
62763 Doc-tests crucible-smf
62764 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_smf --test /work/oxidecomputer/crucible/smf/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern crucible_smf=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_smf-e1a82c6f17385dc6.rlib --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern num_derive=/work/oxidecomputer/crucible/target/debug/deps/libnum_derive-814c8a0a0a713cba.so --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rlib --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rlib -C embed-bitcode=no --error-format human`
62765 
62766 running 0 tests
62767 
62768 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62769 
62770 Doc-tests dsc-client
62771 Running `rustdoc --edition=2021 --crate-type lib --crate-name dsc_client --test /work/oxidecomputer/crucible/dsc-client/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern dsc_client=/work/oxidecomputer/crucible/target/debug/deps/libdsc_client-15b0c81fa833cf0f.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -C embed-bitcode=no --error-format human`
62772 
62773 running 0 tests
62774 
62775 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62776 
62777 Doc-tests repair-client
62778 Running `rustdoc --edition=2021 --crate-type lib --crate-name repair_client --test /work/oxidecomputer/crucible/repair-client/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern repair_client=/work/oxidecomputer/crucible/target/debug/deps/librepair_client-1452d56087b6ccb7.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -C embed-bitcode=no --error-format human`
62779 
62780 running 0 tests
62781 
62782 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
62783 
62784 
62785 real 16:26.588377952
62786 user 52:35.118836291
62787 sys 5:26.872311264
62788 trap 1.675553487
62789 tflt 0.373093867
62790 dflt 1.700465352
62791 kflt 0.000709715
62792 lock 1:53:33.351688251
62793 slp 2:02:19.411583747
62794 lat 1:25.000689190
62795 stop 50.092823530